fix(Anthropic): Correct Payload & Increase Default Token Size 🔧 (#933)

* fix: don't pass unnecessary fields to anthropic payload

* fix: increase maxOutputTokens range

* chore: remove debugging mode
This commit is contained in:
Danny Avila 2023-09-12 11:41:15 -04:00 committed by GitHub
parent dee5888280
commit 4d89adfc57
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 14 additions and 13 deletions

View file

@ -268,13 +268,14 @@ class AnthropicClient extends BaseClient {
}; };
let text = ''; let text = '';
const { model, stream, maxOutputTokens, ...rest } = this.modelOptions;
const requestOptions = { const requestOptions = {
prompt: payload, prompt: payload,
model: this.modelOptions.model, model,
stream: this.modelOptions.stream || true, stream: stream || true,
max_tokens_to_sample: this.modelOptions.maxOutputTokens || 1500, max_tokens_to_sample: maxOutputTokens || 1500,
metadata, metadata,
...modelOptions, ...rest,
}; };
if (this.options.debug) { if (this.options.debug) {
console.log('AnthropicClient: requestOptions'); console.log('AnthropicClient: requestOptions');

View file

@ -91,7 +91,7 @@ router.post(
let response = await client.sendMessage(text, { let response = await client.sendMessage(text, {
getIds, getIds,
debug: false, // debug: true,
user: req.user.id, user: req.user.id,
conversationId, conversationId,
parentMessageId, parentMessageId,

View file

@ -191,7 +191,7 @@ const anthropicSchema = tConversationSchema
modelLabel: obj.modelLabel ?? null, modelLabel: obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null, promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? 1, temperature: obj.temperature ?? 1,
maxOutputTokens: obj.maxOutputTokens ?? 1024, maxOutputTokens: obj.maxOutputTokens ?? 4000,
topP: obj.topP ?? 0.7, topP: obj.topP ?? 0.7,
topK: obj.topK ?? 5, topK: obj.topK ?? 5,
})) }))
@ -200,7 +200,7 @@ const anthropicSchema = tConversationSchema
modelLabel: null, modelLabel: null,
promptPrefix: null, promptPrefix: null,
temperature: 1, temperature: 1,
maxOutputTokens: 1024, maxOutputTokens: 4000,
topP: 0.7, topP: 0.7,
topK: 5, topK: 5,
})); }));

View file

@ -203,14 +203,14 @@ export default function Settings({ conversation, setOption, models, readonly }:
<div className="flex justify-between"> <div className="flex justify-between">
{localize('com_endpoint_max_output_tokens')}{' '} {localize('com_endpoint_max_output_tokens')}{' '}
<small className="opacity-40"> <small className="opacity-40">
({localize('com_endpoint_default_with_num', '1024')}) ({localize('com_endpoint_default_with_num', '4000')})
</small> </small>
<InputNumber <InputNumber
id="max-tokens-int" id="max-tokens-int"
disabled={readonly} disabled={readonly}
value={maxOutputTokens} value={maxOutputTokens}
onChange={(value) => setMaxOutputTokens(Number(value))} onChange={(value) => setMaxOutputTokens(Number(value))}
max={1024} max={4000}
min={1} min={1}
step={1} step={1}
controls={false} controls={false}
@ -225,10 +225,10 @@ export default function Settings({ conversation, setOption, models, readonly }:
</div> </div>
<Slider <Slider
disabled={readonly} disabled={readonly}
value={[maxOutputTokens ?? 1024]} value={[maxOutputTokens ?? 4000]}
onValueChange={(value) => setMaxOutputTokens(value[0])} onValueChange={(value) => setMaxOutputTokens(value[0])}
doubleClickHandler={() => setMaxOutputTokens(0)} doubleClickHandler={() => setMaxOutputTokens(0)}
max={1024} max={4000}
min={1} min={1}
step={1} step={1}
className="flex h-4 w-full" className="flex h-4 w-full"

View file

@ -267,7 +267,7 @@ export const anthropicSchema = tConversationSchema
modelLabel: obj.modelLabel ?? null, modelLabel: obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null, promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? 1, temperature: obj.temperature ?? 1,
maxOutputTokens: obj.maxOutputTokens ?? 1024, maxOutputTokens: obj.maxOutputTokens ?? 4000,
topP: obj.topP ?? 0.7, topP: obj.topP ?? 0.7,
topK: obj.topK ?? 5, topK: obj.topK ?? 5,
})) }))
@ -276,7 +276,7 @@ export const anthropicSchema = tConversationSchema
modelLabel: null, modelLabel: null,
promptPrefix: null, promptPrefix: null,
temperature: 1, temperature: 1,
maxOutputTokens: 1024, maxOutputTokens: 4000,
topP: 0.7, topP: 0.7,
topK: 5, topK: 5,
})); }));