diff --git a/api/app/clients/AnthropicClient.js b/api/app/clients/AnthropicClient.js index ebec514040..304208e49d 100644 --- a/api/app/clients/AnthropicClient.js +++ b/api/app/clients/AnthropicClient.js @@ -268,13 +268,14 @@ class AnthropicClient extends BaseClient { }; let text = ''; + const { model, stream, maxOutputTokens, ...rest } = this.modelOptions; const requestOptions = { prompt: payload, - model: this.modelOptions.model, - stream: this.modelOptions.stream || true, - max_tokens_to_sample: this.modelOptions.maxOutputTokens || 1500, + model, + stream: stream || true, + max_tokens_to_sample: maxOutputTokens || 1500, metadata, - ...modelOptions, + ...rest, }; if (this.options.debug) { console.log('AnthropicClient: requestOptions'); diff --git a/api/server/routes/ask/anthropic.js b/api/server/routes/ask/anthropic.js index 3517e928b8..637fc090aa 100644 --- a/api/server/routes/ask/anthropic.js +++ b/api/server/routes/ask/anthropic.js @@ -91,7 +91,7 @@ router.post( let response = await client.sendMessage(text, { getIds, - debug: false, + // debug: true, user: req.user.id, conversationId, parentMessageId, diff --git a/api/server/routes/endpoints/schemas.js b/api/server/routes/endpoints/schemas.js index ba36abe64c..7c948f2959 100644 --- a/api/server/routes/endpoints/schemas.js +++ b/api/server/routes/endpoints/schemas.js @@ -191,7 +191,7 @@ const anthropicSchema = tConversationSchema modelLabel: obj.modelLabel ?? null, promptPrefix: obj.promptPrefix ?? null, temperature: obj.temperature ?? 1, - maxOutputTokens: obj.maxOutputTokens ?? 1024, + maxOutputTokens: obj.maxOutputTokens ?? 4000, topP: obj.topP ?? 0.7, topK: obj.topK ?? 5, })) @@ -200,7 +200,7 @@ const anthropicSchema = tConversationSchema modelLabel: null, promptPrefix: null, temperature: 1, - maxOutputTokens: 1024, + maxOutputTokens: 4000, topP: 0.7, topK: 5, })); diff --git a/client/src/components/Endpoints/Settings/Anthropic.tsx b/client/src/components/Endpoints/Settings/Anthropic.tsx index 156c610368..05255ea22b 100644 --- a/client/src/components/Endpoints/Settings/Anthropic.tsx +++ b/client/src/components/Endpoints/Settings/Anthropic.tsx @@ -203,14 +203,14 @@ export default function Settings({ conversation, setOption, models, readonly }:
{localize('com_endpoint_max_output_tokens')}{' '} - ({localize('com_endpoint_default_with_num', '1024')}) + ({localize('com_endpoint_default_with_num', '4000')}) setMaxOutputTokens(Number(value))} - max={1024} + max={4000} min={1} step={1} controls={false} @@ -225,10 +225,10 @@ export default function Settings({ conversation, setOption, models, readonly }:
setMaxOutputTokens(value[0])} doubleClickHandler={() => setMaxOutputTokens(0)} - max={1024} + max={4000} min={1} step={1} className="flex h-4 w-full" diff --git a/packages/data-provider/src/schemas.ts b/packages/data-provider/src/schemas.ts index 6f21e58860..e703389f25 100644 --- a/packages/data-provider/src/schemas.ts +++ b/packages/data-provider/src/schemas.ts @@ -267,7 +267,7 @@ export const anthropicSchema = tConversationSchema modelLabel: obj.modelLabel ?? null, promptPrefix: obj.promptPrefix ?? null, temperature: obj.temperature ?? 1, - maxOutputTokens: obj.maxOutputTokens ?? 1024, + maxOutputTokens: obj.maxOutputTokens ?? 4000, topP: obj.topP ?? 0.7, topK: obj.topK ?? 5, })) @@ -276,7 +276,7 @@ export const anthropicSchema = tConversationSchema modelLabel: null, promptPrefix: null, temperature: 1, - maxOutputTokens: 1024, + maxOutputTokens: 4000, topP: 0.7, topK: 5, }));