🚀 feat: GPT-4.5, Anthropic Tool Header, and OpenAPI Ref Resolution (#6118)

* 🔧 refactor: Update settings to use 'as const' for improved type safety and make gpt-4o-mini default model (cheapest)

* 📖 docs: Update README to reflect support for GPT-4.5 in image analysis feature

* 🔧 refactor: Update model handling to use default settings and improve encoding logic

* 🔧 refactor: Enhance model version extraction logic for improved compatibility with future GPT and omni models

* feat: GPT-4.5 tx/token update, vision support

* fix: $ref resolution logic in OpenAPI handling

* feat: add new 'anthropic-beta' header for Claude 3.7 to include token-efficient tools; ref: https://docs.anthropic.com/en/docs/build-with-claude/tool-use/token-efficient-tool-use
This commit is contained in:
Danny Avila 2025-02-28 12:19:21 -05:00 committed by GitHub
parent 9802629848
commit 2293cd667e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 337 additions and 148 deletions

View file

@ -175,7 +175,7 @@ GOOGLE_KEY=user_provided
#============#
OPENAI_API_KEY=user_provided
# OPENAI_MODELS=o1,o1-mini,o1-preview,gpt-4o,chatgpt-4o-latest,gpt-4o-mini,gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
# OPENAI_MODELS=o1,o1-mini,o1-preview,gpt-4o,gpt-4.5-preview,chatgpt-4o-latest,gpt-4o-mini,gpt-3.5-turbo-0125,gpt-3.5-turbo-0301,gpt-3.5-turbo,gpt-4,gpt-4-0613,gpt-4-vision-preview,gpt-3.5-turbo-0613,gpt-3.5-turbo-16k-0613,gpt-4-0125-preview,gpt-4-turbo-preview,gpt-4-1106-preview,gpt-3.5-turbo-1106,gpt-3.5-turbo-instruct,gpt-3.5-turbo-instruct-0914,gpt-3.5-turbo-16k
DEBUG_OPENAI=false

View file

@ -81,7 +81,7 @@
- [Fork Messages & Conversations](https://www.librechat.ai/docs/features/fork) for Advanced Context control
- 💬 **Multimodal & File Interactions**:
- Upload and analyze images with Claude 3, GPT-4o, o1, Llama-Vision, and Gemini 📸
- Upload and analyze images with Claude 3, GPT-4.5, GPT-4o, o1, Llama-Vision, and Gemini 📸
- Chat with Files using Custom Endpoints, OpenAI, Azure, Anthropic, AWS Bedrock, & Google 🗃️
- 🌎 **Multilingual UI**:

View file

@ -298,7 +298,9 @@ class OpenAIClient extends BaseClient {
}
getEncoding() {
return this.model?.includes('gpt-4o') ? 'o200k_base' : 'cl100k_base';
return this.modelOptions?.model && /gpt-4[^-\s]/.test(this.modelOptions.model)
? 'o200k_base'
: 'cl100k_base';
}
/**
@ -605,7 +607,7 @@ class OpenAIClient extends BaseClient {
}
initializeLLM({
model = 'gpt-4o-mini',
model = openAISettings.model.default,
modelName,
temperature = 0.2,
max_tokens,
@ -706,7 +708,7 @@ class OpenAIClient extends BaseClient {
const { OPENAI_TITLE_MODEL } = process.env ?? {};
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? 'gpt-4o-mini';
let model = this.options.titleModel ?? OPENAI_TITLE_MODEL ?? openAISettings.model.default;
if (model === Constants.CURRENT_MODEL) {
model = this.modelOptions.model;
}
@ -899,7 +901,7 @@ ${convo}
let prompt;
// TODO: remove the gpt fallback and make it specific to endpoint
const { OPENAI_SUMMARY_MODEL = 'gpt-4o-mini' } = process.env ?? {};
const { OPENAI_SUMMARY_MODEL = openAISettings.model.default } = process.env ?? {};
let model = this.options.summaryModel ?? OPENAI_SUMMARY_MODEL;
if (model === Constants.CURRENT_MODEL) {
model = this.modelOptions.model;

View file

@ -79,6 +79,7 @@ const tokenValues = Object.assign(
'o1-mini': { prompt: 1.1, completion: 4.4 },
'o1-preview': { prompt: 15, completion: 60 },
o1: { prompt: 15, completion: 60 },
'gpt-4.5': { prompt: 75, completion: 150 },
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
'gpt-4o': { prompt: 2.5, completion: 10 },
'gpt-4o-2024-05-13': { prompt: 5, completion: 15 },
@ -167,6 +168,8 @@ const getValueKey = (model, endpoint) => {
return 'o1-mini';
} else if (modelName.includes('o1')) {
return 'o1';
} else if (modelName.includes('gpt-4.5')) {
return 'gpt-4.5';
} else if (modelName.includes('gpt-4o-2024-05-13')) {
return 'gpt-4o-2024-05-13';
} else if (modelName.includes('gpt-4o-mini')) {

View file

@ -50,6 +50,16 @@ describe('getValueKey', () => {
expect(getValueKey('gpt-4-0125')).toBe('gpt-4-1106');
});
it('should return "gpt-4.5" for model type of "gpt-4.5"', () => {
expect(getValueKey('gpt-4.5-preview')).toBe('gpt-4.5');
expect(getValueKey('gpt-4.5-2024-08-06')).toBe('gpt-4.5');
expect(getValueKey('gpt-4.5-2024-08-06-0718')).toBe('gpt-4.5');
expect(getValueKey('openai/gpt-4.5')).toBe('gpt-4.5');
expect(getValueKey('openai/gpt-4.5-2024-08-06')).toBe('gpt-4.5');
expect(getValueKey('gpt-4.5-turbo')).toBe('gpt-4.5');
expect(getValueKey('gpt-4.5-0125')).toBe('gpt-4.5');
});
it('should return "gpt-4o" for model type of "gpt-4o"', () => {
expect(getValueKey('gpt-4o-2024-08-06')).toBe('gpt-4o');
expect(getValueKey('gpt-4o-2024-08-06-0718')).toBe('gpt-4o');

View file

@ -48,7 +48,8 @@ function getClaudeHeaders(model, supportsCacheControl) {
};
} else if (/claude-3[-.]7/.test(model)) {
return {
'anthropic-beta': 'output-128k-2025-02-19,prompt-caching-2024-07-31',
'anthropic-beta':
'token-efficient-tools-2025-02-19,output-128k-2025-02-19,prompt-caching-2024-07-31',
};
} else {
return {

View file

@ -13,6 +13,7 @@ const openAIModels = {
'gpt-4-32k-0613': 32758, // -10 from max
'gpt-4-1106': 127500, // -500 from max
'gpt-4-0125': 127500, // -500 from max
'gpt-4.5': 127500, // -500 from max
'gpt-4o': 127500, // -500 from max
'gpt-4o-mini': 127500, // -500 from max
'gpt-4o-2024-05-13': 127500, // -500 from max

View file

@ -103,6 +103,16 @@ describe('getModelMaxTokens', () => {
);
});
test('should return correct tokens for gpt-4.5 matches', () => {
expect(getModelMaxTokens('gpt-4.5')).toBe(maxTokensMap[EModelEndpoint.openAI]['gpt-4.5']);
expect(getModelMaxTokens('gpt-4.5-preview')).toBe(
maxTokensMap[EModelEndpoint.openAI]['gpt-4.5'],
);
expect(getModelMaxTokens('openai/gpt-4.5-preview')).toBe(
maxTokensMap[EModelEndpoint.openAI]['gpt-4.5'],
);
});
test('should return correct tokens for Anthropic models', () => {
const models = [
'claude-2.1',

2
package-lock.json generated
View file

@ -41798,7 +41798,7 @@
},
"packages/data-provider": {
"name": "librechat-data-provider",
"version": "0.7.6993",
"version": "0.7.6994",
"license": "ISC",
"dependencies": {
"axios": "^1.7.7",

View file

@ -1,6 +1,6 @@
{
"name": "librechat-data-provider",
"version": "0.7.6993",
"version": "0.7.6994",
"description": "data services for librechat apps",
"main": "dist/index.js",
"module": "dist/index.es.js",

View file

@ -585,21 +585,99 @@ describe('resolveRef', () => {
openapiSpec.paths['/ai.chatgpt.render-flowchart']?.post
?.requestBody as OpenAPIV3.RequestBodyObject
).content['application/json'].schema;
expect(flowchartRequestRef).toBeDefined();
const resolvedFlowchartRequest = resolveRef(
flowchartRequestRef as OpenAPIV3.RequestBodyObject,
openapiSpec.components,
);
expect(resolvedFlowchartRequest).toBeDefined();
expect(resolvedFlowchartRequest.type).toBe('object');
const properties = resolvedFlowchartRequest.properties as FlowchartSchema;
expect(properties).toBeDefined();
expect(flowchartRequestRef).toBeDefined();
const resolvedSchemaObject = resolveRef(
flowchartRequestRef as OpenAPIV3.ReferenceObject,
openapiSpec.components,
) as OpenAPIV3.SchemaObject;
expect(resolvedSchemaObject).toBeDefined();
expect(resolvedSchemaObject.type).toBe('object');
expect(resolvedSchemaObject.properties).toBeDefined();
const properties = resolvedSchemaObject.properties as FlowchartSchema;
expect(properties.mermaid).toBeDefined();
expect(properties.mermaid.type).toBe('string');
});
});
describe('resolveRef general cases', () => {
const spec = {
openapi: '3.0.0',
info: { title: 'TestSpec', version: '1.0.0' },
paths: {},
components: {
schemas: {
TestSchema: { type: 'string' },
},
parameters: {
TestParam: {
name: 'myParam',
in: 'query',
required: false,
schema: { $ref: '#/components/schemas/TestSchema' },
},
},
requestBodies: {
TestRequestBody: {
content: {
'application/json': {
schema: { $ref: '#/components/schemas/TestSchema' },
},
},
},
},
},
} satisfies OpenAPIV3.Document;
it('resolves schema refs correctly', () => {
const schemaRef: OpenAPIV3.ReferenceObject = { $ref: '#/components/schemas/TestSchema' };
const resolvedSchema = resolveRef<OpenAPIV3.ReferenceObject | OpenAPIV3.SchemaObject>(
schemaRef,
spec.components,
);
expect(resolvedSchema.type).toEqual('string');
});
it('resolves parameter refs correctly, then schema within parameter', () => {
const paramRef: OpenAPIV3.ReferenceObject = { $ref: '#/components/parameters/TestParam' };
const resolvedParam = resolveRef<OpenAPIV3.ReferenceObject | OpenAPIV3.ParameterObject>(
paramRef,
spec.components,
);
expect(resolvedParam.name).toEqual('myParam');
expect(resolvedParam.in).toEqual('query');
expect(resolvedParam.required).toBe(false);
const paramSchema = resolveRef<OpenAPIV3.ReferenceObject | OpenAPIV3.SchemaObject>(
resolvedParam.schema as OpenAPIV3.ReferenceObject,
spec.components,
);
expect(paramSchema.type).toEqual('string');
});
it('resolves requestBody refs correctly, then schema within requestBody', () => {
const requestBodyRef: OpenAPIV3.ReferenceObject = {
$ref: '#/components/requestBodies/TestRequestBody',
};
const resolvedRequestBody = resolveRef<OpenAPIV3.ReferenceObject | OpenAPIV3.RequestBodyObject>(
requestBodyRef,
spec.components,
);
expect(resolvedRequestBody.content['application/json']).toBeDefined();
const schemaInRequestBody = resolveRef<OpenAPIV3.ReferenceObject | OpenAPIV3.SchemaObject>(
resolvedRequestBody.content['application/json'].schema as OpenAPIV3.ReferenceObject,
spec.components,
);
expect(schemaInRequestBody.type).toEqual('string');
});
});
describe('openapiToFunction', () => {
it('converts OpenAPI spec to function signatures and request builders', () => {
const { functionSignatures, requestBuilders } = openapiToFunction(getWeatherOpenapiSpec);
@ -1095,4 +1173,43 @@ describe('createURL', () => {
});
});
});
describe('openapiToFunction parameter refs resolution', () => {
const weatherSpec = {
openapi: '3.0.0',
info: { title: 'Weather', version: '1.0.0' },
servers: [{ url: 'https://api.weather.gov' }],
paths: {
'/points/{point}': {
get: {
operationId: 'getPoint',
parameters: [{ $ref: '#/components/parameters/PathPoint' }],
responses: { '200': { description: 'ok' } },
},
},
},
components: {
parameters: {
PathPoint: {
name: 'point',
in: 'path',
required: true,
schema: { type: 'string', pattern: '^(-?\\d+(?:\\.\\d+)?),(-?\\d+(?:\\.\\d+)?)$' },
},
},
},
} satisfies OpenAPIV3.Document;
it('correctly resolves $ref for parameters', () => {
const { functionSignatures } = openapiToFunction(weatherSpec, true);
const func = functionSignatures.find((sig) => sig.name === 'getPoint');
expect(func).toBeDefined();
expect(func?.parameters.properties).toHaveProperty('point');
expect(func?.parameters.required).toContain('point');
const paramSchema = func?.parameters.properties['point'] as OpenAPIV3.SchemaObject;
expect(paramSchema.type).toEqual('string');
expect(paramSchema.pattern).toEqual('^(-?\\d+(?:\\.\\d+)?),(-?\\d+(?:\\.\\d+)?)$');
});
});
});

View file

@ -358,19 +358,29 @@ export class ActionRequest {
}
}
export function resolveRef(
schema: OpenAPIV3.SchemaObject | OpenAPIV3.ReferenceObject | RequestBodyObject,
components?: OpenAPIV3.ComponentsObject,
): OpenAPIV3.SchemaObject {
if ('$ref' in schema && components) {
const refPath = schema.$ref.replace(/^#\/components\/schemas\//, '');
const resolvedSchema = components.schemas?.[refPath];
if (!resolvedSchema) {
throw new Error(`Reference ${schema.$ref} not found`);
export function resolveRef<
T extends
| OpenAPIV3.ReferenceObject
| OpenAPIV3.SchemaObject
| OpenAPIV3.ParameterObject
| OpenAPIV3.RequestBodyObject,
>(obj: T, components?: OpenAPIV3.ComponentsObject): Exclude<T, OpenAPIV3.ReferenceObject> {
if ('$ref' in obj && components) {
const refPath = obj.$ref.replace(/^#\/components\//, '').split('/');
let resolved: unknown = components as Record<string, unknown>;
for (const segment of refPath) {
if (typeof resolved === 'object' && resolved !== null && segment in resolved) {
resolved = (resolved as Record<string, unknown>)[segment];
} else {
throw new Error(`Could not resolve reference: ${obj.$ref}`);
}
return resolveRef(resolvedSchema, components);
}
return schema as OpenAPIV3.SchemaObject;
return resolveRef(resolved as typeof obj, components) as Exclude<T, OpenAPIV3.ReferenceObject>;
}
return obj as Exclude<T, OpenAPIV3.ReferenceObject>;
}
function sanitizeOperationId(input: string) {
@ -399,7 +409,7 @@ export function openapiToFunction(
const operationObj = operation as OpenAPIV3.OperationObject & {
'x-openai-isConsequential'?: boolean;
} & {
'x-strict'?: boolean
'x-strict'?: boolean;
};
// Operation ID is used as the function name
@ -415,15 +425,25 @@ export function openapiToFunction(
};
if (operationObj.parameters) {
for (const param of operationObj.parameters) {
const paramObj = param as OpenAPIV3.ParameterObject;
const resolvedSchema = resolveRef(
{ ...paramObj.schema } as OpenAPIV3.ReferenceObject | OpenAPIV3.SchemaObject,
for (const param of operationObj.parameters ?? []) {
const resolvedParam = resolveRef(
param,
openapiSpec.components,
);
parametersSchema.properties[paramObj.name] = resolvedSchema;
if (paramObj.required === true) {
parametersSchema.required.push(paramObj.name);
) as OpenAPIV3.ParameterObject;
const paramName = resolvedParam.name;
if (!paramName || !resolvedParam.schema) {
continue;
}
const paramSchema = resolveRef(
resolvedParam.schema,
openapiSpec.components,
) as OpenAPIV3.SchemaObject;
parametersSchema.properties[paramName] = paramSchema;
if (resolvedParam.required) {
parametersSchema.required.push(paramName);
}
}
}
@ -446,7 +466,12 @@ export function openapiToFunction(
}
}
const functionSignature = new FunctionSignature(operationId, description, parametersSchema, isStrict);
const functionSignature = new FunctionSignature(
operationId,
description,
parametersSchema,
isStrict,
);
functionSignatures.push(functionSignature);
const actionRequest = new ActionRequest(

View file

@ -15,6 +15,7 @@ export const defaultRetrievalModels = [
'o1-preview',
'o1-mini-2024-09-12',
'o1-mini',
'o3-mini',
'chatgpt-4o-latest',
'gpt-4o-2024-05-13',
'gpt-4o-2024-08-06',
@ -651,6 +652,8 @@ export const alternateName = {
const sharedOpenAIModels = [
'gpt-4o-mini',
'gpt-4o',
'gpt-4.5-preview',
'gpt-4.5-preview-2025-02-27',
'gpt-3.5-turbo',
'gpt-3.5-turbo-0125',
'gpt-4-turbo',
@ -723,7 +726,7 @@ export const bedrockModels = [
export const defaultModels = {
[EModelEndpoint.azureAssistants]: sharedOpenAIModels,
[EModelEndpoint.assistants]: ['chatgpt-4o-latest', ...sharedOpenAIModels],
[EModelEndpoint.assistants]: [...sharedOpenAIModels, 'chatgpt-4o-latest'],
[EModelEndpoint.agents]: sharedOpenAIModels, // TODO: Add agent models (agentsModels)
[EModelEndpoint.google]: [
// Shared Google Models between Vertex AI & Gen AI
@ -742,8 +745,8 @@ export const defaultModels = {
],
[EModelEndpoint.anthropic]: sharedAnthropicModels,
[EModelEndpoint.openAI]: [
'chatgpt-4o-latest',
...sharedOpenAIModels,
'chatgpt-4o-latest',
'gpt-4-vision-preview',
'gpt-3.5-turbo-instruct-0914',
'gpt-3.5-turbo-instruct',
@ -808,6 +811,7 @@ export const supportsBalanceCheck = {
};
export const visionModels = [
'gpt-4.5',
'gpt-4o',
'gpt-4o-mini',
'o1',

View file

@ -128,7 +128,6 @@ export const envVarRegex = /^\${(.+)}$/;
export function extractEnvVariable(value: string) {
const envVarMatch = value.match(envVarRegex);
if (envVarMatch) {
// eslint-disable-next-line @typescript-eslint/strict-boolean-expressions
return process.env[envVarMatch[1]] || value;
}
return value;
@ -211,6 +210,29 @@ export const parseConvo = ({
return convo;
};
/** Match GPT followed by digit, optional decimal, and optional suffix
*
* Examples: gpt-4, gpt-4o, gpt-4.5, gpt-5a, etc. */
const extractGPTVersion = (modelStr: string): string => {
const gptMatch = modelStr.match(/gpt-(\d+(?:\.\d+)?)([a-z])?/i);
if (gptMatch) {
const version = gptMatch[1];
const suffix = gptMatch[2] || '';
return `GPT-${version}${suffix}`;
}
return '';
};
/** Match omni models (o1, o3, etc.), "o" followed by a digit, possibly with decimal */
const extractOmniVersion = (modelStr: string): string => {
const omniMatch = modelStr.match(/\bo(\d+(?:\.\d+)?)\b/i);
if (omniMatch) {
const version = omniMatch[1];
return `o${version}`;
}
return '';
};
export const getResponseSender = (endpointOption: t.TEndpointOption): string => {
const {
model: _m,
@ -238,18 +260,13 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
return chatGptLabel;
} else if (modelLabel) {
return modelLabel;
} else if (model && /\bo1\b/i.test(model)) {
return 'o1';
} else if (model && /\bo3\b/i.test(model)) {
return 'o3';
} else if (model && model.includes('gpt-3')) {
return 'GPT-3.5';
} else if (model && model.includes('gpt-4o')) {
return 'GPT-4o';
} else if (model && model.includes('gpt-4')) {
return 'GPT-4';
} else if (model && extractOmniVersion(model)) {
return extractOmniVersion(model);
} else if (model && model.includes('mistral')) {
return 'Mistral';
} else if (model && model.includes('gpt-')) {
const gptVersion = extractGPTVersion(model);
return gptVersion || 'GPT';
}
return (alternateName[endpoint] as string | undefined) ?? 'ChatGPT';
}
@ -279,14 +296,13 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
return modelLabel;
} else if (chatGptLabel) {
return chatGptLabel;
} else if (model && extractOmniVersion(model)) {
return extractOmniVersion(model);
} else if (model && model.includes('mistral')) {
return 'Mistral';
} else if (model && model.includes('gpt-3')) {
return 'GPT-3.5';
} else if (model && model.includes('gpt-4o')) {
return 'GPT-4o';
} else if (model && model.includes('gpt-4')) {
return 'GPT-4';
} else if (model && model.includes('gpt-')) {
const gptVersion = extractGPTVersion(model);
return gptVersion || 'GPT';
} else if (modelDisplayLabel) {
return modelDisplayLabel;
}

View file

@ -179,34 +179,34 @@ export const isImageVisionTool = (tool: FunctionTool | FunctionToolCall) =>
export const openAISettings = {
model: {
default: 'gpt-4o',
default: 'gpt-4o-mini' as const,
},
temperature: {
min: 0,
max: 2,
step: 0.01,
default: 1,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 1 as const,
},
top_p: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
presence_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
frequency_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
resendFiles: {
default: true,
default: true as const,
},
maxContextTokens: {
default: undefined,
@ -215,72 +215,72 @@ export const openAISettings = {
default: undefined,
},
imageDetail: {
default: ImageDetail.auto,
min: 0,
max: 2,
step: 1,
default: ImageDetail.auto as const,
min: 0 as const,
max: 2 as const,
step: 1 as const,
},
};
export const googleSettings = {
model: {
default: 'gemini-1.5-flash-latest',
default: 'gemini-1.5-flash-latest' as const,
},
maxOutputTokens: {
min: 1,
max: 8192,
step: 1,
default: 8192,
min: 1 as const,
max: 8192 as const,
step: 1 as const,
default: 8192 as const,
},
temperature: {
min: 0,
max: 2,
step: 0.01,
default: 1,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 1 as const,
},
topP: {
min: 0,
max: 1,
step: 0.01,
default: 0.95,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 0.95 as const,
},
topK: {
min: 1,
max: 40,
step: 1,
default: 40,
min: 1 as const,
max: 40 as const,
step: 1 as const,
default: 40 as const,
},
};
const ANTHROPIC_MAX_OUTPUT = 128000;
const DEFAULT_MAX_OUTPUT = 8192;
const LEGACY_ANTHROPIC_MAX_OUTPUT = 4096;
const ANTHROPIC_MAX_OUTPUT = 128000 as const;
const DEFAULT_MAX_OUTPUT = 8192 as const;
const LEGACY_ANTHROPIC_MAX_OUTPUT = 4096 as const;
export const anthropicSettings = {
model: {
default: 'claude-3-5-sonnet-latest',
default: 'claude-3-5-sonnet-latest' as const,
},
temperature: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
promptCache: {
default: true,
default: true as const,
},
thinking: {
default: true,
default: true as const,
},
thinkingBudget: {
min: 1024,
step: 100,
max: 200000,
default: 2000,
min: 1024 as const,
step: 100 as const,
max: 200000 as const,
default: 2000 as const,
},
maxOutputTokens: {
min: 1,
min: 1 as const,
max: ANTHROPIC_MAX_OUTPUT,
step: 1,
step: 1 as const,
default: DEFAULT_MAX_OUTPUT,
reset: (modelName: string) => {
if (/claude-3[-.]5-sonnet/.test(modelName) || /claude-3[-.]7/.test(modelName)) {
@ -301,28 +301,28 @@ export const anthropicSettings = {
},
},
topP: {
min: 0,
max: 1,
step: 0.01,
default: 0.7,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 0.7 as const,
},
topK: {
min: 1,
max: 40,
step: 1,
default: 5,
min: 1 as const,
max: 40 as const,
step: 1 as const,
default: 5 as const,
},
resendFiles: {
default: true,
default: true as const,
},
maxContextTokens: {
default: undefined,
},
legacy: {
maxOutputTokens: {
min: 1,
min: 1 as const,
max: LEGACY_ANTHROPIC_MAX_OUTPUT,
step: 1,
step: 1 as const,
default: LEGACY_ANTHROPIC_MAX_OUTPUT,
},
},
@ -330,34 +330,34 @@ export const anthropicSettings = {
export const agentsSettings = {
model: {
default: 'gpt-3.5-turbo-test',
default: 'gpt-3.5-turbo-test' as const,
},
temperature: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
top_p: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
presence_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
frequency_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
resendFiles: {
default: true,
default: true as const,
},
maxContextTokens: {
default: undefined,
@ -366,7 +366,7 @@ export const agentsSettings = {
default: undefined,
},
imageDetail: {
default: ImageDetail.auto,
default: ImageDetail.auto as const,
},
};