mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 08:50:15 +01:00
feat: gpt-3.5-turbo-instruct support, refactor: try fetching models if OpenRouter is set (#981)
* refactor: try fetching if OpenRouter api key is set * feat: gpt-3.5-turbo-instruct support * fix: use new assignment in getTokenizer
This commit is contained in:
parent
1a77fb4fd5
commit
d87754c43d
3 changed files with 18 additions and 10 deletions
|
|
@ -153,6 +153,11 @@ class ChatGPTClient extends BaseClient {
|
||||||
} else {
|
} else {
|
||||||
modelOptions.prompt = input;
|
modelOptions.prompt = input;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this.useOpenRouter && modelOptions.prompt) {
|
||||||
|
delete modelOptions.stop;
|
||||||
|
}
|
||||||
|
|
||||||
const { debug } = this.options;
|
const { debug } = this.options;
|
||||||
const url = this.completionsUrl;
|
const url = this.completionsUrl;
|
||||||
if (debug) {
|
if (debug) {
|
||||||
|
|
|
||||||
|
|
@ -73,21 +73,22 @@ class OpenAIClient extends BaseClient {
|
||||||
this.useOpenRouter = true;
|
this.useOpenRouter = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { model } = this.modelOptions;
|
||||||
|
|
||||||
this.isChatCompletion =
|
this.isChatCompletion =
|
||||||
this.useOpenRouter ||
|
this.useOpenRouter ||
|
||||||
this.options.reverseProxyUrl ||
|
this.options.reverseProxyUrl ||
|
||||||
this.options.localAI ||
|
this.options.localAI ||
|
||||||
this.modelOptions.model.startsWith('gpt-');
|
model.includes('gpt-');
|
||||||
this.isChatGptModel = this.isChatCompletion;
|
this.isChatGptModel = this.isChatCompletion;
|
||||||
if (this.modelOptions.model === 'text-davinci-003') {
|
if (model.includes('text-davinci-003') || model.includes('instruct')) {
|
||||||
this.isChatCompletion = false;
|
this.isChatCompletion = false;
|
||||||
this.isChatGptModel = false;
|
this.isChatGptModel = false;
|
||||||
}
|
}
|
||||||
const { isChatGptModel } = this;
|
const { isChatGptModel } = this;
|
||||||
this.isUnofficialChatGptModel =
|
this.isUnofficialChatGptModel =
|
||||||
this.modelOptions.model.startsWith('text-chat') ||
|
model.startsWith('text-chat') || model.startsWith('text-davinci-002-render');
|
||||||
this.modelOptions.model.startsWith('text-davinci-002-render');
|
this.maxContextTokens = maxTokensMap[model] ?? 4095; // 1 less than maximum
|
||||||
this.maxContextTokens = maxTokensMap[this.modelOptions.model] ?? 4095; // 1 less than maximum
|
|
||||||
this.maxResponseTokens = this.modelOptions.max_tokens || 1024;
|
this.maxResponseTokens = this.modelOptions.max_tokens || 1024;
|
||||||
this.maxPromptTokens =
|
this.maxPromptTokens =
|
||||||
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
|
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
|
||||||
|
|
@ -168,8 +169,9 @@ class OpenAIClient extends BaseClient {
|
||||||
tokenizer = this.constructor.getTokenizer(this.encoding, true, extendSpecialTokens);
|
tokenizer = this.constructor.getTokenizer(this.encoding, true, extendSpecialTokens);
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
this.encoding = this.modelOptions.model;
|
const { model } = this.modelOptions;
|
||||||
tokenizer = this.constructor.getTokenizer(this.modelOptions.model, true);
|
this.encoding = model.includes('instruct') ? 'text-davinci-003' : model;
|
||||||
|
tokenizer = this.constructor.getTokenizer(this.encoding, true);
|
||||||
} catch {
|
} catch {
|
||||||
tokenizer = this.constructor.getTokenizer(this.encoding, true);
|
tokenizer = this.constructor.getTokenizer(this.encoding, true);
|
||||||
}
|
}
|
||||||
|
|
@ -354,6 +356,8 @@ class OpenAIClient extends BaseClient {
|
||||||
if (this.isChatCompletion) {
|
if (this.isChatCompletion) {
|
||||||
token =
|
token =
|
||||||
progressMessage.choices?.[0]?.delta?.content ?? progressMessage.choices?.[0]?.text;
|
progressMessage.choices?.[0]?.delta?.content ?? progressMessage.choices?.[0]?.text;
|
||||||
|
} else {
|
||||||
|
token = progressMessage.choices?.[0]?.text;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!token && this.useOpenRouter) {
|
if (!token && this.useOpenRouter) {
|
||||||
|
|
|
||||||
|
|
@ -88,12 +88,11 @@ const getOpenAIModels = async (opts = { azure: false, plugins: false }) => {
|
||||||
return models;
|
return models;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (userProvidedOpenAI) {
|
if (userProvidedOpenAI && !OPENROUTER_API_KEY) {
|
||||||
return models;
|
return models;
|
||||||
}
|
}
|
||||||
|
|
||||||
models = await fetchOpenAIModels(opts, models);
|
return await fetchOpenAIModels(opts, models);
|
||||||
return models;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const getChatGPTBrowserModels = () => {
|
const getChatGPTBrowserModels = () => {
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue