mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 16:30:15 +01:00
🕸️ refactor: Drop/Add web_search Param Handling for Custom Endpoints (#9852)
- Added tests to validate behavior of web_search parameter in getOpenAIConfig function. - Implemented logic to handle web_search in addParams and dropParams, ensuring correct precedence and behavior. - Ensured web_search does not appear in modelKwargs or llmConfig when not applicable. - Improved overall configuration management for OpenAI API integration.
This commit is contained in:
parent
3219734b9e
commit
823015160c
2 changed files with 123 additions and 1 deletions
|
|
@ -151,6 +151,114 @@ describe('getOpenAIConfig', () => {
|
|||
expect(result.tools).toEqual([{ type: 'web_search_preview' }]);
|
||||
});
|
||||
|
||||
it('should handle web_search from addParams overriding modelOptions', () => {
|
||||
const modelOptions = {
|
||||
model: 'gpt-5',
|
||||
web_search: false,
|
||||
};
|
||||
|
||||
const addParams = {
|
||||
web_search: true,
|
||||
customParam: 'value',
|
||||
};
|
||||
|
||||
const result = getOpenAIConfig(mockApiKey, { modelOptions, addParams });
|
||||
|
||||
expect(result.llmConfig.useResponsesApi).toBe(true);
|
||||
expect(result.tools).toEqual([{ type: 'web_search_preview' }]);
|
||||
// web_search should not be in modelKwargs or llmConfig
|
||||
expect((result.llmConfig as Record<string, unknown>).web_search).toBeUndefined();
|
||||
expect(result.llmConfig.modelKwargs).toEqual({ customParam: 'value' });
|
||||
});
|
||||
|
||||
it('should disable web_search when included in dropParams', () => {
|
||||
const modelOptions = {
|
||||
model: 'gpt-5',
|
||||
web_search: true,
|
||||
};
|
||||
|
||||
const result = getOpenAIConfig(mockApiKey, {
|
||||
modelOptions,
|
||||
dropParams: ['web_search'],
|
||||
});
|
||||
|
||||
expect(result.llmConfig.useResponsesApi).toBeUndefined();
|
||||
expect(result.tools).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle web_search false from addParams', () => {
|
||||
const modelOptions = {
|
||||
model: 'gpt-5',
|
||||
web_search: true,
|
||||
};
|
||||
|
||||
const addParams = {
|
||||
web_search: false,
|
||||
};
|
||||
|
||||
const result = getOpenAIConfig(mockApiKey, { modelOptions, addParams });
|
||||
|
||||
expect(result.llmConfig.useResponsesApi).toBeUndefined();
|
||||
expect(result.tools).toEqual([]);
|
||||
});
|
||||
|
||||
it('should ignore non-boolean web_search values in addParams', () => {
|
||||
const modelOptions = {
|
||||
model: 'gpt-5',
|
||||
web_search: true,
|
||||
};
|
||||
|
||||
const addParams = {
|
||||
web_search: 'string-value' as unknown,
|
||||
temperature: 0.7,
|
||||
};
|
||||
|
||||
const result = getOpenAIConfig(mockApiKey, { modelOptions, addParams });
|
||||
|
||||
// Should keep the original web_search from modelOptions since addParams value is not boolean
|
||||
expect(result.llmConfig.useResponsesApi).toBe(true);
|
||||
expect(result.tools).toEqual([{ type: 'web_search_preview' }]);
|
||||
expect(result.llmConfig.temperature).toBe(0.7);
|
||||
// web_search should not be added to modelKwargs
|
||||
expect(result.llmConfig.modelKwargs).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle web_search with both addParams and dropParams', () => {
|
||||
const modelOptions = {
|
||||
model: 'gpt-5',
|
||||
};
|
||||
|
||||
const addParams = {
|
||||
web_search: true,
|
||||
};
|
||||
|
||||
const result = getOpenAIConfig(mockApiKey, {
|
||||
modelOptions,
|
||||
addParams,
|
||||
dropParams: ['web_search'], // dropParams takes precedence
|
||||
});
|
||||
|
||||
expect(result.llmConfig.useResponsesApi).toBeUndefined();
|
||||
expect(result.tools).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not add web_search to modelKwargs or llmConfig', () => {
|
||||
const addParams = {
|
||||
web_search: true,
|
||||
customParam1: 'value1',
|
||||
temperature: 0.5,
|
||||
};
|
||||
|
||||
const result = getOpenAIConfig(mockApiKey, { addParams });
|
||||
|
||||
// web_search should trigger the tool but not appear in config
|
||||
expect(result.llmConfig.useResponsesApi).toBe(true);
|
||||
expect(result.tools).toEqual([{ type: 'web_search_preview' }]);
|
||||
expect((result.llmConfig as Record<string, unknown>).web_search).toBeUndefined();
|
||||
expect(result.llmConfig.temperature).toBe(0.5);
|
||||
expect(result.llmConfig.modelKwargs).toEqual({ customParam1: 'value1' });
|
||||
});
|
||||
|
||||
it('should drop params for search models', () => {
|
||||
const modelOptions = {
|
||||
model: 'gpt-4o-search',
|
||||
|
|
|
|||
|
|
@ -129,8 +129,17 @@ export function getOpenAILLMConfig({
|
|||
hasModelKwargs = true;
|
||||
}
|
||||
|
||||
let enableWebSearch = web_search;
|
||||
|
||||
if (addParams && typeof addParams === 'object') {
|
||||
for (const [key, value] of Object.entries(addParams)) {
|
||||
/** Handle web_search directly here instead of adding to modelKwargs or llmConfig */
|
||||
if (key === 'web_search') {
|
||||
if (typeof value === 'boolean') {
|
||||
enableWebSearch = value;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (knownOpenAIParams.has(key)) {
|
||||
(llmConfig as Record<string, unknown>)[key] = value;
|
||||
} else {
|
||||
|
|
@ -166,7 +175,12 @@ export function getOpenAILLMConfig({
|
|||
|
||||
const tools: BindToolsInput[] = [];
|
||||
|
||||
if (web_search) {
|
||||
/** Check if web_search should be disabled via dropParams */
|
||||
if (dropParams && dropParams.includes('web_search')) {
|
||||
enableWebSearch = false;
|
||||
}
|
||||
|
||||
if (enableWebSearch) {
|
||||
llmConfig.useResponsesApi = true;
|
||||
tools.push({ type: 'web_search_preview' });
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue