fix(Anthropic): Correct Payload & Increase Default Token Size 🔧 (#933)

* fix: don't pass unnecessary fields to anthropic payload

* fix: increase maxOutputTokens range

* chore: remove debugging mode
This commit is contained in:
Danny Avila 2023-09-12 11:41:15 -04:00 committed by GitHub
parent dee5888280
commit 4d89adfc57
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 14 additions and 13 deletions

View file

@ -268,13 +268,14 @@ class AnthropicClient extends BaseClient {
};
let text = '';
const { model, stream, maxOutputTokens, ...rest } = this.modelOptions;
const requestOptions = {
prompt: payload,
model: this.modelOptions.model,
stream: this.modelOptions.stream || true,
max_tokens_to_sample: this.modelOptions.maxOutputTokens || 1500,
model,
stream: stream || true,
max_tokens_to_sample: maxOutputTokens || 1500,
metadata,
...modelOptions,
...rest,
};
if (this.options.debug) {
console.log('AnthropicClient: requestOptions');

View file

@ -91,7 +91,7 @@ router.post(
let response = await client.sendMessage(text, {
getIds,
debug: false,
// debug: true,
user: req.user.id,
conversationId,
parentMessageId,

View file

@ -191,7 +191,7 @@ const anthropicSchema = tConversationSchema
modelLabel: obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? 1,
maxOutputTokens: obj.maxOutputTokens ?? 1024,
maxOutputTokens: obj.maxOutputTokens ?? 4000,
topP: obj.topP ?? 0.7,
topK: obj.topK ?? 5,
}))
@ -200,7 +200,7 @@ const anthropicSchema = tConversationSchema
modelLabel: null,
promptPrefix: null,
temperature: 1,
maxOutputTokens: 1024,
maxOutputTokens: 4000,
topP: 0.7,
topK: 5,
}));

View file

@ -203,14 +203,14 @@ export default function Settings({ conversation, setOption, models, readonly }:
<div className="flex justify-between">
{localize('com_endpoint_max_output_tokens')}{' '}
<small className="opacity-40">
({localize('com_endpoint_default_with_num', '1024')})
({localize('com_endpoint_default_with_num', '4000')})
</small>
<InputNumber
id="max-tokens-int"
disabled={readonly}
value={maxOutputTokens}
onChange={(value) => setMaxOutputTokens(Number(value))}
max={1024}
max={4000}
min={1}
step={1}
controls={false}
@ -225,10 +225,10 @@ export default function Settings({ conversation, setOption, models, readonly }:
</div>
<Slider
disabled={readonly}
value={[maxOutputTokens ?? 1024]}
value={[maxOutputTokens ?? 4000]}
onValueChange={(value) => setMaxOutputTokens(value[0])}
doubleClickHandler={() => setMaxOutputTokens(0)}
max={1024}
max={4000}
min={1}
step={1}
className="flex h-4 w-full"

View file

@ -267,7 +267,7 @@ export const anthropicSchema = tConversationSchema
modelLabel: obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? 1,
maxOutputTokens: obj.maxOutputTokens ?? 1024,
maxOutputTokens: obj.maxOutputTokens ?? 4000,
topP: obj.topP ?? 0.7,
topK: obj.topK ?? 5,
}))
@ -276,7 +276,7 @@ export const anthropicSchema = tConversationSchema
modelLabel: null,
promptPrefix: null,
temperature: 1,
maxOutputTokens: 1024,
maxOutputTokens: 4000,
topP: 0.7,
topK: 5,
}));