🔧 fix: Enhance Responses API Auto-Enable Logic for Compatible Endpoints (#8506)

- Updated the logic to auto-enable the Responses API when web search is enabled, specifically for OpenAI, Azure, and Custom endpoints.
- Added import for EModelEndpoint to facilitate endpoint compatibility checks.
This commit is contained in:
Dustin Healy 2025-07-18 19:27:56 -07:00 committed by GitHub
parent 0bf708915b
commit 0761e65086
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -1,5 +1,11 @@
import { useRecoilValue, useSetRecoilState } from 'recoil';
import { TPreset, TPlugin, TConversation, tConvoUpdateSchema } from 'librechat-data-provider';
import {
TPreset,
TPlugin,
TConversation,
tConvoUpdateSchema,
EModelEndpoint,
} from 'librechat-data-provider';
import type { TSetExample, TSetOption, TSetOptionsPayload } from '~/common';
import usePresetIndexOptions from './usePresetIndexOptions';
import { useChatContext } from '~/Providers/ChatContext';
@ -30,11 +36,19 @@ const useSetIndexOptions: TUseSetOptions = (preset = false) => {
};
}
// Auto-enable Responses API when web search is enabled
// Auto-enable Responses API when web search is enabled (only for OpenAI/Azure/Custom endpoints)
if (param === 'web_search' && newValue === true) {
const currentUseResponsesApi = conversation?.useResponsesApi ?? false;
if (!currentUseResponsesApi) {
update['useResponsesApi'] = true;
const currentEndpoint = conversation?.endpoint;
const isOpenAICompatible =
currentEndpoint === EModelEndpoint.openAI ||
currentEndpoint === EModelEndpoint.azureOpenAI ||
currentEndpoint === EModelEndpoint.custom;
if (isOpenAICompatible) {
const currentUseResponsesApi = conversation?.useResponsesApi ?? false;
if (!currentUseResponsesApi) {
update['useResponsesApi'] = true;
}
}
}