fix(Anthropic): Correct Payload & Increase Default Token Size 🔧 (#933)

* fix: don't pass unnecessary fields to anthropic payload

* fix: increase maxOutputTokens range

* chore: remove debugging mode
This commit is contained in:
Danny Avila 2023-09-12 11:41:15 -04:00 committed by GitHub
parent dee5888280
commit 4d89adfc57
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 14 additions and 13 deletions

View file

@ -268,13 +268,14 @@ class AnthropicClient extends BaseClient {
};
let text = '';
const { model, stream, maxOutputTokens, ...rest } = this.modelOptions;
const requestOptions = {
prompt: payload,
model: this.modelOptions.model,
stream: this.modelOptions.stream || true,
max_tokens_to_sample: this.modelOptions.maxOutputTokens || 1500,
model,
stream: stream || true,
max_tokens_to_sample: maxOutputTokens || 1500,
metadata,
...modelOptions,
...rest,
};
if (this.options.debug) {
console.log('AnthropicClient: requestOptions');

View file

@ -91,7 +91,7 @@ router.post(
let response = await client.sendMessage(text, {
getIds,
debug: false,
// debug: true,
user: req.user.id,
conversationId,
parentMessageId,

View file

@ -191,7 +191,7 @@ const anthropicSchema = tConversationSchema
modelLabel: obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? 1,
maxOutputTokens: obj.maxOutputTokens ?? 1024,
maxOutputTokens: obj.maxOutputTokens ?? 4000,
topP: obj.topP ?? 0.7,
topK: obj.topK ?? 5,
}))
@ -200,7 +200,7 @@ const anthropicSchema = tConversationSchema
modelLabel: null,
promptPrefix: null,
temperature: 1,
maxOutputTokens: 1024,
maxOutputTokens: 4000,
topP: 0.7,
topK: 5,
}));