mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 16:30:15 +01:00
⚡ feat: Gemini-1.5 Flash, gpt-4o imports, modelSpec greeting fix (#2729)
* fix: Gemini Flash stream fix * fix: correct `sender` field for gpt-4o imports from ChatGPT * add flash model examples and fix vertex streaming * style: modelSpec greeting fix
This commit is contained in:
parent
64bf0800a0
commit
fc9368e0e7
4 changed files with 13 additions and 8 deletions
|
|
@ -116,10 +116,10 @@ GOOGLE_KEY=user_provided
|
||||||
# GOOGLE_REVERSE_PROXY=
|
# GOOGLE_REVERSE_PROXY=
|
||||||
|
|
||||||
# Gemini API
|
# Gemini API
|
||||||
# GOOGLE_MODELS=gemini-1.0-pro,gemini-1.0-pro-001,gemini-1.0-pro-latest,gemini-1.0-pro-vision-latest,gemini-1.5-pro-latest,gemini-pro,gemini-pro-vision
|
# GOOGLE_MODELS=gemini-1.5-flash-latest,gemini-1.0-pro,gemini-1.0-pro-001,gemini-1.0-pro-latest,gemini-1.0-pro-vision-latest,gemini-1.5-pro-latest,gemini-pro,gemini-pro-vision
|
||||||
|
|
||||||
# Vertex AI
|
# Vertex AI
|
||||||
# GOOGLE_MODELS=gemini-1.5-pro-preview-0409,gemini-1.0-pro-vision-001,gemini-pro,gemini-pro-vision,chat-bison,chat-bison-32k,codechat-bison,codechat-bison-32k,text-bison,text-bison-32k,text-unicorn,code-gecko,code-bison,code-bison-32k
|
# GOOGLE_MODELS=gemini-1.5-flash-preview-0514,gemini-1.5-pro-preview-0409,gemini-1.0-pro-vision-001,gemini-pro,gemini-pro-vision,chat-bison,chat-bison-32k,codechat-bison,codechat-bison-32k,text-bison,text-bison-32k,text-unicorn,code-gecko,code-bison,code-bison-32k
|
||||||
|
|
||||||
# Google Gemini Safety Settings
|
# Google Gemini Safety Settings
|
||||||
# NOTE (Vertex AI): You do not have access to the BLOCK_NONE setting by default.
|
# NOTE (Vertex AI): You do not have access to the BLOCK_NONE setting by default.
|
||||||
|
|
|
||||||
|
|
@ -683,11 +683,12 @@ class GoogleClient extends BaseClient {
|
||||||
const safetySettings = _payload.safetySettings;
|
const safetySettings = _payload.safetySettings;
|
||||||
requestOptions.safetySettings = safetySettings;
|
requestOptions.safetySettings = safetySettings;
|
||||||
|
|
||||||
|
const delay = modelName.includes('flash') ? 8 : 14;
|
||||||
const result = await client.generateContentStream(requestOptions);
|
const result = await client.generateContentStream(requestOptions);
|
||||||
for await (const chunk of result.stream) {
|
for await (const chunk of result.stream) {
|
||||||
const chunkText = chunk.text();
|
const chunkText = chunk.text();
|
||||||
this.generateTextStream(chunkText, onProgress, {
|
await this.generateTextStream(chunkText, onProgress, {
|
||||||
delay: 12,
|
delay,
|
||||||
});
|
});
|
||||||
reply += chunkText;
|
reply += chunkText;
|
||||||
}
|
}
|
||||||
|
|
@ -701,10 +702,14 @@ class GoogleClient extends BaseClient {
|
||||||
safetySettings: safetySettings,
|
safetySettings: safetySettings,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
let delay = this.isGenerativeModel ? 12 : 8;
|
||||||
|
if (modelName.includes('flash')) {
|
||||||
|
delay = 5;
|
||||||
|
}
|
||||||
for await (const chunk of stream) {
|
for await (const chunk of stream) {
|
||||||
const chunkText = chunk?.content ?? chunk;
|
const chunkText = chunk?.content ?? chunk;
|
||||||
this.generateTextStream(chunkText, onProgress, {
|
await this.generateTextStream(chunkText, onProgress, {
|
||||||
delay: this.isGenerativeModel ? 12 : 8,
|
delay,
|
||||||
});
|
});
|
||||||
reply += chunkText;
|
reply += chunkText;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -202,7 +202,7 @@ function processConversation(conv, importBatchBuilder, requestUserId) {
|
||||||
const isCreatedByUser = role === 'user';
|
const isCreatedByUser = role === 'user';
|
||||||
let sender = isCreatedByUser ? 'user' : 'GPT-3.5';
|
let sender = isCreatedByUser ? 'user' : 'GPT-3.5';
|
||||||
const model = mapping.message.metadata.model_slug || openAISettings.model.default;
|
const model = mapping.message.metadata.model_slug || openAISettings.model.default;
|
||||||
if (model === 'gpt-4') {
|
if (model.includes('gpt-4')) {
|
||||||
sender = 'GPT-4';
|
sender = 'GPT-4';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -76,7 +76,7 @@ export default function Landing({ Header }: { Header?: ReactNode }) {
|
||||||
</div> */}
|
</div> */}
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<div className="mb-5 text-2xl font-medium dark:text-white">
|
<div className="mb-5 max-w-[75vh] px-12 text-center text-lg font-medium dark:text-white md:px-0 md:text-2xl">
|
||||||
{endpoint === EModelEndpoint.assistants
|
{endpoint === EModelEndpoint.assistants
|
||||||
? conversation?.greeting ?? localize('com_nav_welcome_assistant')
|
? conversation?.greeting ?? localize('com_nav_welcome_assistant')
|
||||||
: conversation?.greeting ?? localize('com_nav_welcome_message')}
|
: conversation?.greeting ?? localize('com_nav_welcome_message')}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue