mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 06:00:56 +02:00
🤖 feat: Support OpenAI Web Search models (#6313)
* fix: reorder vision model entries for cheaper models first * fix: add endpoint property to bedrock client initialization * fix: exclude unsupported parameters for OpenAI Web Search models * fix: enhance options to exclude unsupported parameters for Web Search models
This commit is contained in:
parent
cf03731cc8
commit
efed1c461d
5 changed files with 61 additions and 18 deletions
|
@ -1272,6 +1272,29 @@ ${convo}
|
|||
});
|
||||
}
|
||||
|
||||
/** Note: OpenAI Web Search models do not support any known parameters besdies `max_tokens` */
|
||||
if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) {
|
||||
const searchExcludeParams = [
|
||||
'frequency_penalty',
|
||||
'presence_penalty',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'stop',
|
||||
'logit_bias',
|
||||
'seed',
|
||||
'response_format',
|
||||
'n',
|
||||
'logprobs',
|
||||
'user',
|
||||
];
|
||||
|
||||
this.options.dropParams = this.options.dropParams || [];
|
||||
this.options.dropParams = [
|
||||
...new Set([...this.options.dropParams, ...searchExcludeParams]),
|
||||
];
|
||||
}
|
||||
|
||||
if (this.options.dropParams && Array.isArray(this.options.dropParams)) {
|
||||
this.options.dropParams.forEach((param) => {
|
||||
delete modelOptions[param];
|
||||
|
|
|
@ -23,8 +23,9 @@ const initializeClient = async ({ req, res, endpointOption }) => {
|
|||
const agent = {
|
||||
id: EModelEndpoint.bedrock,
|
||||
name: endpointOption.name,
|
||||
instructions: endpointOption.promptPrefix,
|
||||
provider: EModelEndpoint.bedrock,
|
||||
endpoint: EModelEndpoint.bedrock,
|
||||
instructions: endpointOption.promptPrefix,
|
||||
model: endpointOption.model_parameters.model,
|
||||
model_parameters: endpointOption.model_parameters,
|
||||
};
|
||||
|
|
|
@ -135,12 +135,9 @@ const initializeClient = async ({
|
|||
}
|
||||
|
||||
if (optionsOnly) {
|
||||
clientOptions = Object.assign(
|
||||
{
|
||||
modelOptions: endpointOption.model_parameters,
|
||||
},
|
||||
clientOptions,
|
||||
);
|
||||
const modelOptions = endpointOption.model_parameters;
|
||||
modelOptions.model = modelName;
|
||||
clientOptions = Object.assign({ modelOptions }, clientOptions);
|
||||
clientOptions.modelOptions.user = req.user.id;
|
||||
const options = getLLMConfig(apiKey, clientOptions);
|
||||
if (!clientOptions.streamRate) {
|
||||
|
|
|
@ -28,7 +28,7 @@ const { isEnabled } = require('~/server/utils');
|
|||
* @returns {Object} Configuration options for creating an LLM instance.
|
||||
*/
|
||||
function getLLMConfig(apiKey, options = {}, endpoint = null) {
|
||||
const {
|
||||
let {
|
||||
modelOptions = {},
|
||||
reverseProxyUrl,
|
||||
defaultQuery,
|
||||
|
@ -50,10 +50,32 @@ function getLLMConfig(apiKey, options = {}, endpoint = null) {
|
|||
if (addParams && typeof addParams === 'object') {
|
||||
Object.assign(llmConfig, addParams);
|
||||
}
|
||||
/** Note: OpenAI Web Search models do not support any known parameters besdies `max_tokens` */
|
||||
if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) {
|
||||
const searchExcludeParams = [
|
||||
'frequency_penalty',
|
||||
'presence_penalty',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'stop',
|
||||
'logit_bias',
|
||||
'seed',
|
||||
'response_format',
|
||||
'n',
|
||||
'logprobs',
|
||||
'user',
|
||||
];
|
||||
|
||||
dropParams = dropParams || [];
|
||||
dropParams = [...new Set([...dropParams, ...searchExcludeParams])];
|
||||
}
|
||||
|
||||
if (dropParams && Array.isArray(dropParams)) {
|
||||
dropParams.forEach((param) => {
|
||||
delete llmConfig[param];
|
||||
if (llmConfig[param]) {
|
||||
llmConfig[param] = undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -827,28 +827,28 @@ export const supportsBalanceCheck = {
|
|||
};
|
||||
|
||||
export const visionModels = [
|
||||
'grok-3',
|
||||
'grok-2-vision',
|
||||
'grok-vision',
|
||||
'gpt-4.5',
|
||||
'gpt-4o',
|
||||
'grok-2-vision',
|
||||
'grok-3',
|
||||
'gpt-4o-mini',
|
||||
'o1',
|
||||
'gpt-4o',
|
||||
'gpt-4-turbo',
|
||||
'gpt-4-vision',
|
||||
'o1',
|
||||
'gpt-4.5',
|
||||
'llava',
|
||||
'llava-13b',
|
||||
'gemini-pro-vision',
|
||||
'claude-3',
|
||||
'gemini-2.0',
|
||||
'gemini-1.5',
|
||||
'gemini-exp',
|
||||
'gemini-1.5',
|
||||
'gemini-2.0',
|
||||
'moondream',
|
||||
'llama3.2-vision',
|
||||
'llama-3.2-90b-vision',
|
||||
'llama-3.2-11b-vision',
|
||||
'llama-3-2-90b-vision',
|
||||
'llama-3-2-11b-vision',
|
||||
'llama-3.2-90b-vision',
|
||||
'llama-3-2-90b-vision',
|
||||
];
|
||||
export enum VisionModes {
|
||||
generative = 'generative',
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue