mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 08:50:15 +01:00
🤖 feat: new Anthropic Default Settings / Increased Output Tokens for 3.5-Sonnet (#3407)
* chore: bump data-provider * feat: Add anthropicSettings to endpointSettings The commit adds the `anthropicSettings` object to the `endpointSettings` in the `schemas.ts` file. This allows for the configuration of settings specific to the `anthropic` model endpoint. * chore: adjust maxoutputtokens localization * feat: Update AnthropicClient to use anthropicSettings for default model options and increased output beta header * ci: new anthropic tests
This commit is contained in:
parent
2ad097647c
commit
422d1a2c91
7 changed files with 262 additions and 69 deletions
|
|
@ -4,6 +4,7 @@ const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = requ
|
||||||
const {
|
const {
|
||||||
Constants,
|
Constants,
|
||||||
EModelEndpoint,
|
EModelEndpoint,
|
||||||
|
anthropicSettings,
|
||||||
getResponseSender,
|
getResponseSender,
|
||||||
validateVisionModel,
|
validateVisionModel,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
|
|
@ -31,6 +32,8 @@ function delayBeforeRetry(attempts, baseDelay = 1000) {
|
||||||
return new Promise((resolve) => setTimeout(resolve, baseDelay * attempts));
|
return new Promise((resolve) => setTimeout(resolve, baseDelay * attempts));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { legacy } = anthropicSettings;
|
||||||
|
|
||||||
class AnthropicClient extends BaseClient {
|
class AnthropicClient extends BaseClient {
|
||||||
constructor(apiKey, options = {}) {
|
constructor(apiKey, options = {}) {
|
||||||
super(apiKey, options);
|
super(apiKey, options);
|
||||||
|
|
@ -63,15 +66,20 @@ class AnthropicClient extends BaseClient {
|
||||||
const modelOptions = this.options.modelOptions || {};
|
const modelOptions = this.options.modelOptions || {};
|
||||||
this.modelOptions = {
|
this.modelOptions = {
|
||||||
...modelOptions,
|
...modelOptions,
|
||||||
// set some good defaults (check for undefined in some cases because they may be 0)
|
model: modelOptions.model || anthropicSettings.model.default,
|
||||||
model: modelOptions.model || 'claude-1',
|
|
||||||
temperature: typeof modelOptions.temperature === 'undefined' ? 1 : modelOptions.temperature, // 0 - 1, 1 is default
|
|
||||||
topP: typeof modelOptions.topP === 'undefined' ? 0.7 : modelOptions.topP, // 0 - 1, default: 0.7
|
|
||||||
topK: typeof modelOptions.topK === 'undefined' ? 40 : modelOptions.topK, // 1-40, default: 40
|
|
||||||
stop: modelOptions.stop, // no stop method for now
|
|
||||||
};
|
};
|
||||||
|
|
||||||
this.isClaude3 = this.modelOptions.model.includes('claude-3');
|
this.isClaude3 = this.modelOptions.model.includes('claude-3');
|
||||||
|
this.isLegacyOutput = !this.modelOptions.model.includes('claude-3-5-sonnet');
|
||||||
|
|
||||||
|
if (
|
||||||
|
this.isLegacyOutput &&
|
||||||
|
this.modelOptions.maxOutputTokens &&
|
||||||
|
this.modelOptions.maxOutputTokens > legacy.maxOutputTokens.default
|
||||||
|
) {
|
||||||
|
this.modelOptions.maxOutputTokens = legacy.maxOutputTokens.default;
|
||||||
|
}
|
||||||
|
|
||||||
this.useMessages = this.isClaude3 || !!this.options.attachments;
|
this.useMessages = this.isClaude3 || !!this.options.attachments;
|
||||||
|
|
||||||
this.defaultVisionModel = this.options.visionModel ?? 'claude-3-sonnet-20240229';
|
this.defaultVisionModel = this.options.visionModel ?? 'claude-3-sonnet-20240229';
|
||||||
|
|
@ -121,10 +129,11 @@ class AnthropicClient extends BaseClient {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the initialized Anthropic client.
|
* Get the initialized Anthropic client.
|
||||||
|
* @param {Partial<Anthropic.ClientOptions>} requestOptions - The options for the client.
|
||||||
* @returns {Anthropic} The Anthropic client instance.
|
* @returns {Anthropic} The Anthropic client instance.
|
||||||
*/
|
*/
|
||||||
getClient() {
|
getClient(requestOptions) {
|
||||||
/** @type {Anthropic.default.RequestOptions} */
|
/** @type {Anthropic.ClientOptions} */
|
||||||
const options = {
|
const options = {
|
||||||
fetch: this.fetch,
|
fetch: this.fetch,
|
||||||
apiKey: this.apiKey,
|
apiKey: this.apiKey,
|
||||||
|
|
@ -138,6 +147,12 @@ class AnthropicClient extends BaseClient {
|
||||||
options.baseURL = this.options.reverseProxyUrl;
|
options.baseURL = this.options.reverseProxyUrl;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (requestOptions?.model && requestOptions.model.includes('claude-3-5-sonnet')) {
|
||||||
|
options.defaultHeaders = {
|
||||||
|
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return new Anthropic(options);
|
return new Anthropic(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -558,8 +573,6 @@ class AnthropicClient extends BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug('modelOptions', { modelOptions });
|
logger.debug('modelOptions', { modelOptions });
|
||||||
|
|
||||||
const client = this.getClient();
|
|
||||||
const metadata = {
|
const metadata = {
|
||||||
user_id: this.user,
|
user_id: this.user,
|
||||||
};
|
};
|
||||||
|
|
@ -587,7 +600,7 @@ class AnthropicClient extends BaseClient {
|
||||||
|
|
||||||
if (this.useMessages) {
|
if (this.useMessages) {
|
||||||
requestOptions.messages = payload;
|
requestOptions.messages = payload;
|
||||||
requestOptions.max_tokens = maxOutputTokens || 1500;
|
requestOptions.max_tokens = maxOutputTokens || legacy.maxOutputTokens.default;
|
||||||
} else {
|
} else {
|
||||||
requestOptions.prompt = payload;
|
requestOptions.prompt = payload;
|
||||||
requestOptions.max_tokens_to_sample = maxOutputTokens || 1500;
|
requestOptions.max_tokens_to_sample = maxOutputTokens || 1500;
|
||||||
|
|
@ -614,6 +627,7 @@ class AnthropicClient extends BaseClient {
|
||||||
while (attempts < maxRetries) {
|
while (attempts < maxRetries) {
|
||||||
let response;
|
let response;
|
||||||
try {
|
try {
|
||||||
|
const client = this.getClient(requestOptions);
|
||||||
response = await this.createResponse(client, requestOptions);
|
response = await this.createResponse(client, requestOptions);
|
||||||
|
|
||||||
signal.addEventListener('abort', () => {
|
signal.addEventListener('abort', () => {
|
||||||
|
|
@ -742,7 +756,11 @@ class AnthropicClient extends BaseClient {
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await this.createResponse(this.getClient(), requestOptions, true);
|
const response = await this.createResponse(
|
||||||
|
this.getClient(requestOptions),
|
||||||
|
requestOptions,
|
||||||
|
true,
|
||||||
|
);
|
||||||
let promptTokens = response?.usage?.input_tokens;
|
let promptTokens = response?.usage?.input_tokens;
|
||||||
let completionTokens = response?.usage?.output_tokens;
|
let completionTokens = response?.usage?.output_tokens;
|
||||||
if (!promptTokens) {
|
if (!promptTokens) {
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,6 @@
|
||||||
const AnthropicClient = require('../AnthropicClient');
|
const { anthropicSettings } = require('librechat-data-provider');
|
||||||
|
const AnthropicClient = require('~/app/clients/AnthropicClient');
|
||||||
|
|
||||||
const HUMAN_PROMPT = '\n\nHuman:';
|
const HUMAN_PROMPT = '\n\nHuman:';
|
||||||
const AI_PROMPT = '\n\nAssistant:';
|
const AI_PROMPT = '\n\nAssistant:';
|
||||||
|
|
||||||
|
|
@ -22,7 +24,7 @@ describe('AnthropicClient', () => {
|
||||||
const options = {
|
const options = {
|
||||||
modelOptions: {
|
modelOptions: {
|
||||||
model,
|
model,
|
||||||
temperature: 0.7,
|
temperature: anthropicSettings.temperature.default,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
client = new AnthropicClient('test-api-key');
|
client = new AnthropicClient('test-api-key');
|
||||||
|
|
@ -33,7 +35,42 @@ describe('AnthropicClient', () => {
|
||||||
it('should set the options correctly', () => {
|
it('should set the options correctly', () => {
|
||||||
expect(client.apiKey).toBe('test-api-key');
|
expect(client.apiKey).toBe('test-api-key');
|
||||||
expect(client.modelOptions.model).toBe(model);
|
expect(client.modelOptions.model).toBe(model);
|
||||||
expect(client.modelOptions.temperature).toBe(0.7);
|
expect(client.modelOptions.temperature).toBe(anthropicSettings.temperature.default);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set legacy maxOutputTokens for non-Claude-3 models', () => {
|
||||||
|
const client = new AnthropicClient('test-api-key');
|
||||||
|
client.setOptions({
|
||||||
|
modelOptions: {
|
||||||
|
model: 'claude-2',
|
||||||
|
maxOutputTokens: anthropicSettings.maxOutputTokens.default,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||||
|
anthropicSettings.legacy.maxOutputTokens.default,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
it('should not set maxOutputTokens if not provided', () => {
|
||||||
|
const client = new AnthropicClient('test-api-key');
|
||||||
|
client.setOptions({
|
||||||
|
modelOptions: {
|
||||||
|
model: 'claude-3',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(client.modelOptions.maxOutputTokens).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not set legacy maxOutputTokens for Claude-3 models', () => {
|
||||||
|
const client = new AnthropicClient('test-api-key');
|
||||||
|
client.setOptions({
|
||||||
|
modelOptions: {
|
||||||
|
model: 'claude-3-opus-20240229',
|
||||||
|
maxOutputTokens: anthropicSettings.legacy.maxOutputTokens.default,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||||
|
anthropicSettings.legacy.maxOutputTokens.default,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -136,4 +173,57 @@ describe('AnthropicClient', () => {
|
||||||
expect(prompt).toContain('You are Claude-2');
|
expect(prompt).toContain('You are Claude-2');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('getClient', () => {
|
||||||
|
it('should set legacy maxOutputTokens for non-Claude-3 models', () => {
|
||||||
|
const client = new AnthropicClient('test-api-key');
|
||||||
|
client.setOptions({
|
||||||
|
modelOptions: {
|
||||||
|
model: 'claude-2',
|
||||||
|
maxOutputTokens: anthropicSettings.legacy.maxOutputTokens.default,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||||
|
anthropicSettings.legacy.maxOutputTokens.default,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not set legacy maxOutputTokens for Claude-3 models', () => {
|
||||||
|
const client = new AnthropicClient('test-api-key');
|
||||||
|
client.setOptions({
|
||||||
|
modelOptions: {
|
||||||
|
model: 'claude-3-opus-20240229',
|
||||||
|
maxOutputTokens: anthropicSettings.legacy.maxOutputTokens.default,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
expect(client.modelOptions.maxOutputTokens).toBe(
|
||||||
|
anthropicSettings.legacy.maxOutputTokens.default,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add beta header for claude-3-5-sonnet model', () => {
|
||||||
|
const client = new AnthropicClient('test-api-key');
|
||||||
|
const modelOptions = {
|
||||||
|
model: 'claude-3-5-sonnet-20240307',
|
||||||
|
};
|
||||||
|
client.setOptions({ modelOptions });
|
||||||
|
const anthropicClient = client.getClient(modelOptions);
|
||||||
|
expect(anthropicClient._options.defaultHeaders).toBeDefined();
|
||||||
|
expect(anthropicClient._options.defaultHeaders).toHaveProperty('anthropic-beta');
|
||||||
|
expect(anthropicClient._options.defaultHeaders['anthropic-beta']).toBe(
|
||||||
|
'max-tokens-3-5-sonnet-2024-07-15',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not add beta header for other models', () => {
|
||||||
|
const client = new AnthropicClient('test-api-key');
|
||||||
|
client.setOptions({
|
||||||
|
modelOptions: {
|
||||||
|
model: 'claude-2',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const anthropicClient = client.getClient();
|
||||||
|
expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta');
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import React from 'react';
|
|
||||||
import TextareaAutosize from 'react-textarea-autosize';
|
import TextareaAutosize from 'react-textarea-autosize';
|
||||||
|
import { anthropicSettings } from 'librechat-data-provider';
|
||||||
import type { TModelSelectProps, OnInputNumberChange } from '~/common';
|
import type { TModelSelectProps, OnInputNumberChange } from '~/common';
|
||||||
import {
|
import {
|
||||||
Input,
|
Input,
|
||||||
|
|
@ -41,15 +41,31 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const setModel = setOption('model');
|
|
||||||
const setModelLabel = setOption('modelLabel');
|
const setModelLabel = setOption('modelLabel');
|
||||||
const setPromptPrefix = setOption('promptPrefix');
|
const setPromptPrefix = setOption('promptPrefix');
|
||||||
const setTemperature = setOption('temperature');
|
const setTemperature = setOption('temperature');
|
||||||
const setTopP = setOption('topP');
|
const setTopP = setOption('topP');
|
||||||
const setTopK = setOption('topK');
|
const setTopK = setOption('topK');
|
||||||
const setMaxOutputTokens = setOption('maxOutputTokens');
|
|
||||||
const setResendFiles = setOption('resendFiles');
|
const setResendFiles = setOption('resendFiles');
|
||||||
|
|
||||||
|
const setModel = (newModel: string) => {
|
||||||
|
const modelSetter = setOption('model');
|
||||||
|
const maxOutputSetter = setOption('maxOutputTokens');
|
||||||
|
if (maxOutputTokens) {
|
||||||
|
maxOutputSetter(anthropicSettings.maxOutputTokens.set(maxOutputTokens, newModel));
|
||||||
|
}
|
||||||
|
modelSetter(newModel);
|
||||||
|
};
|
||||||
|
|
||||||
|
const setMaxOutputTokens = (value: number) => {
|
||||||
|
const setter = setOption('maxOutputTokens');
|
||||||
|
if (model) {
|
||||||
|
setter(anthropicSettings.maxOutputTokens.set(value, model));
|
||||||
|
} else {
|
||||||
|
setter(value);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="grid grid-cols-5 gap-6">
|
<div className="grid grid-cols-5 gap-6">
|
||||||
<div className="col-span-5 flex flex-col items-center justify-start gap-6 sm:col-span-3">
|
<div className="col-span-5 flex flex-col items-center justify-start gap-6 sm:col-span-3">
|
||||||
|
|
@ -139,14 +155,16 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
<div className="flex justify-between">
|
<div className="flex justify-between">
|
||||||
<Label htmlFor="temp-int" className="text-left text-sm font-medium">
|
<Label htmlFor="temp-int" className="text-left text-sm font-medium">
|
||||||
{localize('com_endpoint_temperature')}{' '}
|
{localize('com_endpoint_temperature')}{' '}
|
||||||
<small className="opacity-40">({localize('com_endpoint_default')}: 1)</small>
|
<small className="opacity-40">
|
||||||
|
({localize('com_endpoint_default')}: {anthropicSettings.temperature.default})
|
||||||
|
</small>
|
||||||
</Label>
|
</Label>
|
||||||
<InputNumber
|
<InputNumber
|
||||||
id="temp-int"
|
id="temp-int"
|
||||||
disabled={readonly}
|
disabled={readonly}
|
||||||
value={temperature}
|
value={temperature}
|
||||||
onChange={(value) => setTemperature(Number(value))}
|
onChange={(value) => setTemperature(Number(value))}
|
||||||
max={1}
|
max={anthropicSettings.temperature.max}
|
||||||
min={0}
|
min={0}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
controls={false}
|
controls={false}
|
||||||
|
|
@ -161,10 +179,10 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
</div>
|
</div>
|
||||||
<Slider
|
<Slider
|
||||||
disabled={readonly}
|
disabled={readonly}
|
||||||
value={[temperature ?? 1]}
|
value={[temperature ?? anthropicSettings.temperature.default]}
|
||||||
onValueChange={(value) => setTemperature(value[0])}
|
onValueChange={(value) => setTemperature(value[0])}
|
||||||
doubleClickHandler={() => setTemperature(1)}
|
doubleClickHandler={() => setTemperature(anthropicSettings.temperature.default)}
|
||||||
max={1}
|
max={anthropicSettings.temperature.max}
|
||||||
min={0}
|
min={0}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
className="flex h-4 w-full"
|
className="flex h-4 w-full"
|
||||||
|
|
@ -178,7 +196,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
<Label htmlFor="top-p-int" className="text-left text-sm font-medium">
|
<Label htmlFor="top-p-int" className="text-left text-sm font-medium">
|
||||||
{localize('com_endpoint_top_p')}{' '}
|
{localize('com_endpoint_top_p')}{' '}
|
||||||
<small className="opacity-40">
|
<small className="opacity-40">
|
||||||
({localize('com_endpoint_default_with_num', '0.7')})
|
({localize('com_endpoint_default_with_num', anthropicSettings.topP.default + '')})
|
||||||
</small>
|
</small>
|
||||||
</Label>
|
</Label>
|
||||||
<InputNumber
|
<InputNumber
|
||||||
|
|
@ -186,7 +204,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
disabled={readonly}
|
disabled={readonly}
|
||||||
value={topP}
|
value={topP}
|
||||||
onChange={(value) => setTopP(Number(value))}
|
onChange={(value) => setTopP(Number(value))}
|
||||||
max={1}
|
max={anthropicSettings.topP.max}
|
||||||
min={0}
|
min={0}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
controls={false}
|
controls={false}
|
||||||
|
|
@ -203,8 +221,8 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
disabled={readonly}
|
disabled={readonly}
|
||||||
value={[topP ?? 0.7]}
|
value={[topP ?? 0.7]}
|
||||||
onValueChange={(value) => setTopP(value[0])}
|
onValueChange={(value) => setTopP(value[0])}
|
||||||
doubleClickHandler={() => setTopP(1)}
|
doubleClickHandler={() => setTopP(anthropicSettings.topP.default)}
|
||||||
max={1}
|
max={anthropicSettings.topP.max}
|
||||||
min={0}
|
min={0}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
className="flex h-4 w-full"
|
className="flex h-4 w-full"
|
||||||
|
|
@ -219,7 +237,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
<Label htmlFor="top-k-int" className="text-left text-sm font-medium">
|
<Label htmlFor="top-k-int" className="text-left text-sm font-medium">
|
||||||
{localize('com_endpoint_top_k')}{' '}
|
{localize('com_endpoint_top_k')}{' '}
|
||||||
<small className="opacity-40">
|
<small className="opacity-40">
|
||||||
({localize('com_endpoint_default_with_num', '5')})
|
({localize('com_endpoint_default_with_num', anthropicSettings.topK.default + '')})
|
||||||
</small>
|
</small>
|
||||||
</Label>
|
</Label>
|
||||||
<InputNumber
|
<InputNumber
|
||||||
|
|
@ -227,7 +245,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
disabled={readonly}
|
disabled={readonly}
|
||||||
value={topK}
|
value={topK}
|
||||||
onChange={(value) => setTopK(Number(value))}
|
onChange={(value) => setTopK(Number(value))}
|
||||||
max={40}
|
max={anthropicSettings.topK.max}
|
||||||
min={1}
|
min={1}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
controls={false}
|
controls={false}
|
||||||
|
|
@ -244,8 +262,8 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
disabled={readonly}
|
disabled={readonly}
|
||||||
value={[topK ?? 5]}
|
value={[topK ?? 5]}
|
||||||
onValueChange={(value) => setTopK(value[0])}
|
onValueChange={(value) => setTopK(value[0])}
|
||||||
doubleClickHandler={() => setTopK(0)}
|
doubleClickHandler={() => setTopK(anthropicSettings.topK.default)}
|
||||||
max={40}
|
max={anthropicSettings.topK.max}
|
||||||
min={1}
|
min={1}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
className="flex h-4 w-full"
|
className="flex h-4 w-full"
|
||||||
|
|
@ -258,16 +276,14 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
<div className="flex justify-between">
|
<div className="flex justify-between">
|
||||||
<Label htmlFor="max-tokens-int" className="text-left text-sm font-medium">
|
<Label htmlFor="max-tokens-int" className="text-left text-sm font-medium">
|
||||||
{localize('com_endpoint_max_output_tokens')}{' '}
|
{localize('com_endpoint_max_output_tokens')}{' '}
|
||||||
<small className="opacity-40">
|
<small className="opacity-40">({anthropicSettings.maxOutputTokens.default})</small>
|
||||||
({localize('com_endpoint_default_with_num', '4000')})
|
|
||||||
</small>
|
|
||||||
</Label>
|
</Label>
|
||||||
<InputNumber
|
<InputNumber
|
||||||
id="max-tokens-int"
|
id="max-tokens-int"
|
||||||
disabled={readonly}
|
disabled={readonly}
|
||||||
value={maxOutputTokens}
|
value={maxOutputTokens}
|
||||||
onChange={(value) => setMaxOutputTokens(Number(value))}
|
onChange={(value) => setMaxOutputTokens(Number(value))}
|
||||||
max={4000}
|
max={anthropicSettings.maxOutputTokens.max}
|
||||||
min={1}
|
min={1}
|
||||||
step={1}
|
step={1}
|
||||||
controls={false}
|
controls={false}
|
||||||
|
|
@ -282,10 +298,12 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
</div>
|
</div>
|
||||||
<Slider
|
<Slider
|
||||||
disabled={readonly}
|
disabled={readonly}
|
||||||
value={[maxOutputTokens ?? 4000]}
|
value={[maxOutputTokens ?? anthropicSettings.maxOutputTokens.default]}
|
||||||
onValueChange={(value) => setMaxOutputTokens(value[0])}
|
onValueChange={(value) => setMaxOutputTokens(value[0])}
|
||||||
doubleClickHandler={() => setMaxOutputTokens(0)}
|
doubleClickHandler={() =>
|
||||||
max={4000}
|
setMaxOutputTokens(anthropicSettings.maxOutputTokens.default)
|
||||||
|
}
|
||||||
|
max={anthropicSettings.maxOutputTokens.max}
|
||||||
min={1}
|
min={1}
|
||||||
step={1}
|
step={1}
|
||||||
className="flex h-4 w-full"
|
className="flex h-4 w-full"
|
||||||
|
|
|
||||||
|
|
@ -391,7 +391,7 @@ export default {
|
||||||
com_endpoint_google_topk:
|
com_endpoint_google_topk:
|
||||||
'Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model\'s vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).',
|
'Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model\'s vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).',
|
||||||
com_endpoint_google_maxoutputtokens:
|
com_endpoint_google_maxoutputtokens:
|
||||||
' Maximum number of tokens that can be generated in the response. Specify a lower value for shorter responses and a higher value for longer responses.',
|
'Maximum number of tokens that can be generated in the response. Specify a lower value for shorter responses and a higher value for longer responses. Note: models may stop before reaching this maximum.',
|
||||||
com_endpoint_google_custom_name_placeholder: 'Set a custom name for Google',
|
com_endpoint_google_custom_name_placeholder: 'Set a custom name for Google',
|
||||||
com_endpoint_prompt_prefix_placeholder: 'Set custom instructions or context. Ignored if empty.',
|
com_endpoint_prompt_prefix_placeholder: 'Set custom instructions or context. Ignored if empty.',
|
||||||
com_endpoint_instructions_assistants_placeholder:
|
com_endpoint_instructions_assistants_placeholder:
|
||||||
|
|
@ -439,7 +439,7 @@ export default {
|
||||||
com_endpoint_anthropic_topk:
|
com_endpoint_anthropic_topk:
|
||||||
'Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model\'s vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).',
|
'Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model\'s vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).',
|
||||||
com_endpoint_anthropic_maxoutputtokens:
|
com_endpoint_anthropic_maxoutputtokens:
|
||||||
'Maximum number of tokens that can be generated in the response. Specify a lower value for shorter responses and a higher value for longer responses.',
|
'Maximum number of tokens that can be generated in the response. Specify a lower value for shorter responses and a higher value for longer responses. Note: models may stop before reaching this maximum.',
|
||||||
com_endpoint_anthropic_custom_name_placeholder: 'Set a custom name for Anthropic',
|
com_endpoint_anthropic_custom_name_placeholder: 'Set a custom name for Anthropic',
|
||||||
com_endpoint_frequency_penalty: 'Frequency Penalty',
|
com_endpoint_frequency_penalty: 'Frequency Penalty',
|
||||||
com_endpoint_presence_penalty: 'Presence Penalty',
|
com_endpoint_presence_penalty: 'Presence Penalty',
|
||||||
|
|
|
||||||
2
package-lock.json
generated
2
package-lock.json
generated
|
|
@ -29437,7 +29437,7 @@
|
||||||
},
|
},
|
||||||
"packages/data-provider": {
|
"packages/data-provider": {
|
||||||
"name": "librechat-data-provider",
|
"name": "librechat-data-provider",
|
||||||
"version": "0.7.2",
|
"version": "0.7.4",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/js-yaml": "^4.0.9",
|
"@types/js-yaml": "^4.0.9",
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "librechat-data-provider",
|
"name": "librechat-data-provider",
|
||||||
"version": "0.7.3",
|
"version": "0.7.4",
|
||||||
"description": "data services for librechat apps",
|
"description": "data services for librechat apps",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"module": "dist/index.es.js",
|
"module": "dist/index.es.js",
|
||||||
|
|
|
||||||
|
|
@ -156,9 +156,70 @@ export const googleSettings = {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const ANTHROPIC_MAX_OUTPUT = 8192;
|
||||||
|
const LEGACY_ANTHROPIC_MAX_OUTPUT = 4096;
|
||||||
|
export const anthropicSettings = {
|
||||||
|
model: {
|
||||||
|
default: 'claude-3-5-sonnet-20240620',
|
||||||
|
},
|
||||||
|
temperature: {
|
||||||
|
min: 0,
|
||||||
|
max: 1,
|
||||||
|
step: 0.01,
|
||||||
|
default: 1,
|
||||||
|
},
|
||||||
|
maxOutputTokens: {
|
||||||
|
min: 1,
|
||||||
|
max: ANTHROPIC_MAX_OUTPUT,
|
||||||
|
step: 1,
|
||||||
|
default: ANTHROPIC_MAX_OUTPUT,
|
||||||
|
reset: (modelName: string) => {
|
||||||
|
if (modelName.includes('claude-3-5-sonnet')) {
|
||||||
|
return ANTHROPIC_MAX_OUTPUT;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 4096;
|
||||||
|
},
|
||||||
|
set: (value: number, modelName: string) => {
|
||||||
|
if (!modelName.includes('claude-3-5-sonnet') && value > LEGACY_ANTHROPIC_MAX_OUTPUT) {
|
||||||
|
return LEGACY_ANTHROPIC_MAX_OUTPUT;
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
topP: {
|
||||||
|
min: 0,
|
||||||
|
max: 1,
|
||||||
|
step: 0.01,
|
||||||
|
default: 0.7,
|
||||||
|
},
|
||||||
|
topK: {
|
||||||
|
min: 1,
|
||||||
|
max: 40,
|
||||||
|
step: 1,
|
||||||
|
default: 5,
|
||||||
|
},
|
||||||
|
resendFiles: {
|
||||||
|
default: true,
|
||||||
|
},
|
||||||
|
maxContextTokens: {
|
||||||
|
default: undefined,
|
||||||
|
},
|
||||||
|
legacy: {
|
||||||
|
maxOutputTokens: {
|
||||||
|
min: 1,
|
||||||
|
max: LEGACY_ANTHROPIC_MAX_OUTPUT,
|
||||||
|
step: 1,
|
||||||
|
default: LEGACY_ANTHROPIC_MAX_OUTPUT,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
export const endpointSettings = {
|
export const endpointSettings = {
|
||||||
[EModelEndpoint.openAI]: openAISettings,
|
[EModelEndpoint.openAI]: openAISettings,
|
||||||
[EModelEndpoint.google]: googleSettings,
|
[EModelEndpoint.google]: googleSettings,
|
||||||
|
[EModelEndpoint.anthropic]: anthropicSettings,
|
||||||
};
|
};
|
||||||
|
|
||||||
const google = endpointSettings[EModelEndpoint.google];
|
const google = endpointSettings[EModelEndpoint.google];
|
||||||
|
|
@ -576,34 +637,40 @@ export const anthropicSchema = tConversationSchema
|
||||||
spec: true,
|
spec: true,
|
||||||
maxContextTokens: true,
|
maxContextTokens: true,
|
||||||
})
|
})
|
||||||
.transform((obj) => ({
|
.transform((obj) => {
|
||||||
|
const model = obj.model ?? anthropicSettings.model.default;
|
||||||
|
return {
|
||||||
...obj,
|
...obj,
|
||||||
model: obj.model ?? 'claude-1',
|
model,
|
||||||
modelLabel: obj.modelLabel ?? null,
|
modelLabel: obj.modelLabel ?? null,
|
||||||
promptPrefix: obj.promptPrefix ?? null,
|
promptPrefix: obj.promptPrefix ?? null,
|
||||||
temperature: obj.temperature ?? 1,
|
temperature: obj.temperature ?? anthropicSettings.temperature.default,
|
||||||
maxOutputTokens: obj.maxOutputTokens ?? 4000,
|
maxOutputTokens: obj.maxOutputTokens ?? anthropicSettings.maxOutputTokens.reset(model),
|
||||||
topP: obj.topP ?? 0.7,
|
topP: obj.topP ?? anthropicSettings.topP.default,
|
||||||
topK: obj.topK ?? 5,
|
topK: obj.topK ?? anthropicSettings.topK.default,
|
||||||
resendFiles: typeof obj.resendFiles === 'boolean' ? obj.resendFiles : true,
|
resendFiles:
|
||||||
|
typeof obj.resendFiles === 'boolean'
|
||||||
|
? obj.resendFiles
|
||||||
|
: anthropicSettings.resendFiles.default,
|
||||||
iconURL: obj.iconURL ?? undefined,
|
iconURL: obj.iconURL ?? undefined,
|
||||||
greeting: obj.greeting ?? undefined,
|
greeting: obj.greeting ?? undefined,
|
||||||
spec: obj.spec ?? undefined,
|
spec: obj.spec ?? undefined,
|
||||||
maxContextTokens: obj.maxContextTokens ?? undefined,
|
maxContextTokens: obj.maxContextTokens ?? anthropicSettings.maxContextTokens.default,
|
||||||
}))
|
};
|
||||||
|
})
|
||||||
.catch(() => ({
|
.catch(() => ({
|
||||||
model: 'claude-1',
|
model: anthropicSettings.model.default,
|
||||||
modelLabel: null,
|
modelLabel: null,
|
||||||
promptPrefix: null,
|
promptPrefix: null,
|
||||||
temperature: 1,
|
temperature: anthropicSettings.temperature.default,
|
||||||
maxOutputTokens: 4000,
|
maxOutputTokens: anthropicSettings.maxOutputTokens.default,
|
||||||
topP: 0.7,
|
topP: anthropicSettings.topP.default,
|
||||||
topK: 5,
|
topK: anthropicSettings.topK.default,
|
||||||
resendFiles: true,
|
resendFiles: anthropicSettings.resendFiles.default,
|
||||||
iconURL: undefined,
|
iconURL: undefined,
|
||||||
greeting: undefined,
|
greeting: undefined,
|
||||||
spec: undefined,
|
spec: undefined,
|
||||||
maxContextTokens: undefined,
|
maxContextTokens: anthropicSettings.maxContextTokens.default,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
export const chatGPTBrowserSchema = tConversationSchema
|
export const chatGPTBrowserSchema = tConversationSchema
|
||||||
|
|
@ -835,19 +902,19 @@ export const compactAnthropicSchema = tConversationSchema
|
||||||
})
|
})
|
||||||
.transform((obj) => {
|
.transform((obj) => {
|
||||||
const newObj: Partial<TConversation> = { ...obj };
|
const newObj: Partial<TConversation> = { ...obj };
|
||||||
if (newObj.temperature === 1) {
|
if (newObj.temperature === anthropicSettings.temperature.default) {
|
||||||
delete newObj.temperature;
|
delete newObj.temperature;
|
||||||
}
|
}
|
||||||
if (newObj.maxOutputTokens === 4000) {
|
if (newObj.maxOutputTokens === anthropicSettings.legacy.maxOutputTokens.default) {
|
||||||
delete newObj.maxOutputTokens;
|
delete newObj.maxOutputTokens;
|
||||||
}
|
}
|
||||||
if (newObj.topP === 0.7) {
|
if (newObj.topP === anthropicSettings.topP.default) {
|
||||||
delete newObj.topP;
|
delete newObj.topP;
|
||||||
}
|
}
|
||||||
if (newObj.topK === 5) {
|
if (newObj.topK === anthropicSettings.topK.default) {
|
||||||
delete newObj.topK;
|
delete newObj.topK;
|
||||||
}
|
}
|
||||||
if (newObj.resendFiles === true) {
|
if (newObj.resendFiles === anthropicSettings.resendFiles.default) {
|
||||||
delete newObj.resendFiles;
|
delete newObj.resendFiles;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue