mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-18 01:10:14 +01:00
🎚️ feat: Reasoning Parameters for Custom Endpoints (#10297)
Some checks are pending
Docker Dev Images Build / build (Dockerfile, librechat-dev, node) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile.multi, librechat-dev-api, api-build) (push) Waiting to run
Sync Locize Translations & Create Translation PR / Sync Translation Keys with Locize (push) Waiting to run
Sync Locize Translations & Create Translation PR / Create Translation PR on Version Published (push) Blocked by required conditions
Some checks are pending
Docker Dev Images Build / build (Dockerfile, librechat-dev, node) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile.multi, librechat-dev-api, api-build) (push) Waiting to run
Sync Locize Translations & Create Translation PR / Sync Translation Keys with Locize (push) Waiting to run
Sync Locize Translations & Create Translation PR / Create Translation PR on Version Published (push) Blocked by required conditions
This commit is contained in:
parent
861ef98d29
commit
e6aeec9f25
7 changed files with 99 additions and 13 deletions
|
|
@ -1,4 +1,4 @@
|
|||
import { removeNullishValues } from 'librechat-data-provider';
|
||||
import { EModelEndpoint, removeNullishValues } from 'librechat-data-provider';
|
||||
import type { BindToolsInput } from '@langchain/core/language_models/chat_models';
|
||||
import type { AzureOpenAIInput } from '@langchain/openai';
|
||||
import type { OpenAI } from 'openai';
|
||||
|
|
@ -79,6 +79,7 @@ export function getOpenAILLMConfig({
|
|||
azure,
|
||||
apiKey,
|
||||
baseURL,
|
||||
endpoint,
|
||||
streaming,
|
||||
addParams,
|
||||
dropParams,
|
||||
|
|
@ -88,6 +89,7 @@ export function getOpenAILLMConfig({
|
|||
apiKey: string;
|
||||
streaming: boolean;
|
||||
baseURL?: string | null;
|
||||
endpoint?: EModelEndpoint | string | null;
|
||||
modelOptions: Partial<t.OpenAIParameters>;
|
||||
addParams?: Record<string, unknown>;
|
||||
dropParams?: string[];
|
||||
|
|
@ -155,7 +157,8 @@ export function getOpenAILLMConfig({
|
|||
|
||||
if (
|
||||
hasReasoningParams({ reasoning_effort, reasoning_summary }) &&
|
||||
(llmConfig.useResponsesApi === true || useOpenRouter)
|
||||
(llmConfig.useResponsesApi === true ||
|
||||
(endpoint !== EModelEndpoint.openAI && endpoint !== EModelEndpoint.azureOpenAI))
|
||||
) {
|
||||
llmConfig.reasoning = removeNullishValues(
|
||||
{
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue