mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-21 02:40:14 +01:00
fix(Anthropic): Correct Payload & Increase Default Token Size 🔧 (#933)
* fix: don't pass unnecessary fields to anthropic payload * fix: increase maxOutputTokens range * chore: remove debugging mode
This commit is contained in:
parent
dee5888280
commit
4d89adfc57
5 changed files with 14 additions and 13 deletions
|
|
@ -268,13 +268,14 @@ class AnthropicClient extends BaseClient {
|
|||
};
|
||||
|
||||
let text = '';
|
||||
const { model, stream, maxOutputTokens, ...rest } = this.modelOptions;
|
||||
const requestOptions = {
|
||||
prompt: payload,
|
||||
model: this.modelOptions.model,
|
||||
stream: this.modelOptions.stream || true,
|
||||
max_tokens_to_sample: this.modelOptions.maxOutputTokens || 1500,
|
||||
model,
|
||||
stream: stream || true,
|
||||
max_tokens_to_sample: maxOutputTokens || 1500,
|
||||
metadata,
|
||||
...modelOptions,
|
||||
...rest,
|
||||
};
|
||||
if (this.options.debug) {
|
||||
console.log('AnthropicClient: requestOptions');
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ router.post(
|
|||
|
||||
let response = await client.sendMessage(text, {
|
||||
getIds,
|
||||
debug: false,
|
||||
// debug: true,
|
||||
user: req.user.id,
|
||||
conversationId,
|
||||
parentMessageId,
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ const anthropicSchema = tConversationSchema
|
|||
modelLabel: obj.modelLabel ?? null,
|
||||
promptPrefix: obj.promptPrefix ?? null,
|
||||
temperature: obj.temperature ?? 1,
|
||||
maxOutputTokens: obj.maxOutputTokens ?? 1024,
|
||||
maxOutputTokens: obj.maxOutputTokens ?? 4000,
|
||||
topP: obj.topP ?? 0.7,
|
||||
topK: obj.topK ?? 5,
|
||||
}))
|
||||
|
|
@ -200,7 +200,7 @@ const anthropicSchema = tConversationSchema
|
|||
modelLabel: null,
|
||||
promptPrefix: null,
|
||||
temperature: 1,
|
||||
maxOutputTokens: 1024,
|
||||
maxOutputTokens: 4000,
|
||||
topP: 0.7,
|
||||
topK: 5,
|
||||
}));
|
||||
|
|
|
|||
|
|
@ -203,14 +203,14 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
<div className="flex justify-between">
|
||||
{localize('com_endpoint_max_output_tokens')}{' '}
|
||||
<small className="opacity-40">
|
||||
({localize('com_endpoint_default_with_num', '1024')})
|
||||
({localize('com_endpoint_default_with_num', '4000')})
|
||||
</small>
|
||||
<InputNumber
|
||||
id="max-tokens-int"
|
||||
disabled={readonly}
|
||||
value={maxOutputTokens}
|
||||
onChange={(value) => setMaxOutputTokens(Number(value))}
|
||||
max={1024}
|
||||
max={4000}
|
||||
min={1}
|
||||
step={1}
|
||||
controls={false}
|
||||
|
|
@ -225,10 +225,10 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
</div>
|
||||
<Slider
|
||||
disabled={readonly}
|
||||
value={[maxOutputTokens ?? 1024]}
|
||||
value={[maxOutputTokens ?? 4000]}
|
||||
onValueChange={(value) => setMaxOutputTokens(value[0])}
|
||||
doubleClickHandler={() => setMaxOutputTokens(0)}
|
||||
max={1024}
|
||||
max={4000}
|
||||
min={1}
|
||||
step={1}
|
||||
className="flex h-4 w-full"
|
||||
|
|
|
|||
|
|
@ -267,7 +267,7 @@ export const anthropicSchema = tConversationSchema
|
|||
modelLabel: obj.modelLabel ?? null,
|
||||
promptPrefix: obj.promptPrefix ?? null,
|
||||
temperature: obj.temperature ?? 1,
|
||||
maxOutputTokens: obj.maxOutputTokens ?? 1024,
|
||||
maxOutputTokens: obj.maxOutputTokens ?? 4000,
|
||||
topP: obj.topP ?? 0.7,
|
||||
topK: obj.topK ?? 5,
|
||||
}))
|
||||
|
|
@ -276,7 +276,7 @@ export const anthropicSchema = tConversationSchema
|
|||
modelLabel: null,
|
||||
promptPrefix: null,
|
||||
temperature: 1,
|
||||
maxOutputTokens: 1024,
|
||||
maxOutputTokens: 4000,
|
||||
topP: 0.7,
|
||||
topK: 5,
|
||||
}));
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue