🚀 feat: GPT-4.5, Anthropic Tool Header, and OpenAPI Ref Resolution (#6118)

* 🔧 refactor: Update settings to use 'as const' for improved type safety and make gpt-4o-mini default model (cheapest)

* 📖 docs: Update README to reflect support for GPT-4.5 in image analysis feature

* 🔧 refactor: Update model handling to use default settings and improve encoding logic

* 🔧 refactor: Enhance model version extraction logic for improved compatibility with future GPT and omni models

* feat: GPT-4.5 tx/token update, vision support

* fix: $ref resolution logic in OpenAPI handling

* feat: add new 'anthropic-beta' header for Claude 3.7 to include token-efficient tools; ref: https://docs.anthropic.com/en/docs/build-with-claude/tool-use/token-efficient-tool-use
This commit is contained in:
Danny Avila 2025-02-28 12:19:21 -05:00 committed by GitHub
parent 9802629848
commit 2293cd667e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 337 additions and 148 deletions

View file

@ -22,8 +22,8 @@ export type ParametersSchema = {
export type OpenAPISchema = OpenAPIV3.SchemaObject &
ParametersSchema & {
items?: OpenAPIV3.ReferenceObject | OpenAPIV3.SchemaObject;
};
items?: OpenAPIV3.ReferenceObject | OpenAPIV3.SchemaObject;
};
export type ApiKeyCredentials = {
api_key: string;
@ -43,8 +43,8 @@ export type Credentials = ApiKeyCredentials | OAuthCredentials;
type MediaTypeObject =
| undefined
| {
[media: string]: OpenAPIV3.MediaTypeObject | undefined;
};
[media: string]: OpenAPIV3.MediaTypeObject | undefined;
};
type RequestBodyObject = Omit<OpenAPIV3.RequestBodyObject, 'content'> & {
content: MediaTypeObject;
@ -358,19 +358,29 @@ export class ActionRequest {
}
}
export function resolveRef(
schema: OpenAPIV3.SchemaObject | OpenAPIV3.ReferenceObject | RequestBodyObject,
components?: OpenAPIV3.ComponentsObject,
): OpenAPIV3.SchemaObject {
if ('$ref' in schema && components) {
const refPath = schema.$ref.replace(/^#\/components\/schemas\//, '');
const resolvedSchema = components.schemas?.[refPath];
if (!resolvedSchema) {
throw new Error(`Reference ${schema.$ref} not found`);
export function resolveRef<
T extends
| OpenAPIV3.ReferenceObject
| OpenAPIV3.SchemaObject
| OpenAPIV3.ParameterObject
| OpenAPIV3.RequestBodyObject,
>(obj: T, components?: OpenAPIV3.ComponentsObject): Exclude<T, OpenAPIV3.ReferenceObject> {
if ('$ref' in obj && components) {
const refPath = obj.$ref.replace(/^#\/components\//, '').split('/');
let resolved: unknown = components as Record<string, unknown>;
for (const segment of refPath) {
if (typeof resolved === 'object' && resolved !== null && segment in resolved) {
resolved = (resolved as Record<string, unknown>)[segment];
} else {
throw new Error(`Could not resolve reference: ${obj.$ref}`);
}
}
return resolveRef(resolvedSchema, components);
return resolveRef(resolved as typeof obj, components) as Exclude<T, OpenAPIV3.ReferenceObject>;
}
return schema as OpenAPIV3.SchemaObject;
return obj as Exclude<T, OpenAPIV3.ReferenceObject>;
}
function sanitizeOperationId(input: string) {
@ -399,7 +409,7 @@ export function openapiToFunction(
const operationObj = operation as OpenAPIV3.OperationObject & {
'x-openai-isConsequential'?: boolean;
} & {
'x-strict'?: boolean
'x-strict'?: boolean;
};
// Operation ID is used as the function name
@ -415,15 +425,25 @@ export function openapiToFunction(
};
if (operationObj.parameters) {
for (const param of operationObj.parameters) {
const paramObj = param as OpenAPIV3.ParameterObject;
const resolvedSchema = resolveRef(
{ ...paramObj.schema } as OpenAPIV3.ReferenceObject | OpenAPIV3.SchemaObject,
for (const param of operationObj.parameters ?? []) {
const resolvedParam = resolveRef(
param,
openapiSpec.components,
);
parametersSchema.properties[paramObj.name] = resolvedSchema;
if (paramObj.required === true) {
parametersSchema.required.push(paramObj.name);
) as OpenAPIV3.ParameterObject;
const paramName = resolvedParam.name;
if (!paramName || !resolvedParam.schema) {
continue;
}
const paramSchema = resolveRef(
resolvedParam.schema,
openapiSpec.components,
) as OpenAPIV3.SchemaObject;
parametersSchema.properties[paramName] = paramSchema;
if (resolvedParam.required) {
parametersSchema.required.push(paramName);
}
}
}
@ -446,7 +466,12 @@ export function openapiToFunction(
}
}
const functionSignature = new FunctionSignature(operationId, description, parametersSchema, isStrict);
const functionSignature = new FunctionSignature(
operationId,
description,
parametersSchema,
isStrict,
);
functionSignatures.push(functionSignature);
const actionRequest = new ActionRequest(
@ -544,4 +569,4 @@ export function validateAndParseOpenAPISpec(specString: string): ValidationResul
console.error(error);
return { status: false, message: 'Error parsing OpenAPI spec.' };
}
}
}

View file

@ -15,6 +15,7 @@ export const defaultRetrievalModels = [
'o1-preview',
'o1-mini-2024-09-12',
'o1-mini',
'o3-mini',
'chatgpt-4o-latest',
'gpt-4o-2024-05-13',
'gpt-4o-2024-08-06',
@ -651,6 +652,8 @@ export const alternateName = {
const sharedOpenAIModels = [
'gpt-4o-mini',
'gpt-4o',
'gpt-4.5-preview',
'gpt-4.5-preview-2025-02-27',
'gpt-3.5-turbo',
'gpt-3.5-turbo-0125',
'gpt-4-turbo',
@ -723,7 +726,7 @@ export const bedrockModels = [
export const defaultModels = {
[EModelEndpoint.azureAssistants]: sharedOpenAIModels,
[EModelEndpoint.assistants]: ['chatgpt-4o-latest', ...sharedOpenAIModels],
[EModelEndpoint.assistants]: [...sharedOpenAIModels, 'chatgpt-4o-latest'],
[EModelEndpoint.agents]: sharedOpenAIModels, // TODO: Add agent models (agentsModels)
[EModelEndpoint.google]: [
// Shared Google Models between Vertex AI & Gen AI
@ -742,8 +745,8 @@ export const defaultModels = {
],
[EModelEndpoint.anthropic]: sharedAnthropicModels,
[EModelEndpoint.openAI]: [
'chatgpt-4o-latest',
...sharedOpenAIModels,
'chatgpt-4o-latest',
'gpt-4-vision-preview',
'gpt-3.5-turbo-instruct-0914',
'gpt-3.5-turbo-instruct',
@ -808,6 +811,7 @@ export const supportsBalanceCheck = {
};
export const visionModels = [
'gpt-4.5',
'gpt-4o',
'gpt-4o-mini',
'o1',

View file

@ -128,7 +128,6 @@ export const envVarRegex = /^\${(.+)}$/;
export function extractEnvVariable(value: string) {
const envVarMatch = value.match(envVarRegex);
if (envVarMatch) {
// eslint-disable-next-line @typescript-eslint/strict-boolean-expressions
return process.env[envVarMatch[1]] || value;
}
return value;
@ -211,6 +210,29 @@ export const parseConvo = ({
return convo;
};
/** Match GPT followed by digit, optional decimal, and optional suffix
*
* Examples: gpt-4, gpt-4o, gpt-4.5, gpt-5a, etc. */
const extractGPTVersion = (modelStr: string): string => {
const gptMatch = modelStr.match(/gpt-(\d+(?:\.\d+)?)([a-z])?/i);
if (gptMatch) {
const version = gptMatch[1];
const suffix = gptMatch[2] || '';
return `GPT-${version}${suffix}`;
}
return '';
};
/** Match omni models (o1, o3, etc.), "o" followed by a digit, possibly with decimal */
const extractOmniVersion = (modelStr: string): string => {
const omniMatch = modelStr.match(/\bo(\d+(?:\.\d+)?)\b/i);
if (omniMatch) {
const version = omniMatch[1];
return `o${version}`;
}
return '';
};
export const getResponseSender = (endpointOption: t.TEndpointOption): string => {
const {
model: _m,
@ -238,18 +260,13 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
return chatGptLabel;
} else if (modelLabel) {
return modelLabel;
} else if (model && /\bo1\b/i.test(model)) {
return 'o1';
} else if (model && /\bo3\b/i.test(model)) {
return 'o3';
} else if (model && model.includes('gpt-3')) {
return 'GPT-3.5';
} else if (model && model.includes('gpt-4o')) {
return 'GPT-4o';
} else if (model && model.includes('gpt-4')) {
return 'GPT-4';
} else if (model && extractOmniVersion(model)) {
return extractOmniVersion(model);
} else if (model && model.includes('mistral')) {
return 'Mistral';
} else if (model && model.includes('gpt-')) {
const gptVersion = extractGPTVersion(model);
return gptVersion || 'GPT';
}
return (alternateName[endpoint] as string | undefined) ?? 'ChatGPT';
}
@ -279,14 +296,13 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
return modelLabel;
} else if (chatGptLabel) {
return chatGptLabel;
} else if (model && extractOmniVersion(model)) {
return extractOmniVersion(model);
} else if (model && model.includes('mistral')) {
return 'Mistral';
} else if (model && model.includes('gpt-3')) {
return 'GPT-3.5';
} else if (model && model.includes('gpt-4o')) {
return 'GPT-4o';
} else if (model && model.includes('gpt-4')) {
return 'GPT-4';
} else if (model && model.includes('gpt-')) {
const gptVersion = extractGPTVersion(model);
return gptVersion || 'GPT';
} else if (modelDisplayLabel) {
return modelDisplayLabel;
}

View file

@ -179,34 +179,34 @@ export const isImageVisionTool = (tool: FunctionTool | FunctionToolCall) =>
export const openAISettings = {
model: {
default: 'gpt-4o',
default: 'gpt-4o-mini' as const,
},
temperature: {
min: 0,
max: 2,
step: 0.01,
default: 1,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 1 as const,
},
top_p: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
presence_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
frequency_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
resendFiles: {
default: true,
default: true as const,
},
maxContextTokens: {
default: undefined,
@ -215,72 +215,72 @@ export const openAISettings = {
default: undefined,
},
imageDetail: {
default: ImageDetail.auto,
min: 0,
max: 2,
step: 1,
default: ImageDetail.auto as const,
min: 0 as const,
max: 2 as const,
step: 1 as const,
},
};
export const googleSettings = {
model: {
default: 'gemini-1.5-flash-latest',
default: 'gemini-1.5-flash-latest' as const,
},
maxOutputTokens: {
min: 1,
max: 8192,
step: 1,
default: 8192,
min: 1 as const,
max: 8192 as const,
step: 1 as const,
default: 8192 as const,
},
temperature: {
min: 0,
max: 2,
step: 0.01,
default: 1,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 1 as const,
},
topP: {
min: 0,
max: 1,
step: 0.01,
default: 0.95,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 0.95 as const,
},
topK: {
min: 1,
max: 40,
step: 1,
default: 40,
min: 1 as const,
max: 40 as const,
step: 1 as const,
default: 40 as const,
},
};
const ANTHROPIC_MAX_OUTPUT = 128000;
const DEFAULT_MAX_OUTPUT = 8192;
const LEGACY_ANTHROPIC_MAX_OUTPUT = 4096;
const ANTHROPIC_MAX_OUTPUT = 128000 as const;
const DEFAULT_MAX_OUTPUT = 8192 as const;
const LEGACY_ANTHROPIC_MAX_OUTPUT = 4096 as const;
export const anthropicSettings = {
model: {
default: 'claude-3-5-sonnet-latest',
default: 'claude-3-5-sonnet-latest' as const,
},
temperature: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
promptCache: {
default: true,
default: true as const,
},
thinking: {
default: true,
default: true as const,
},
thinkingBudget: {
min: 1024,
step: 100,
max: 200000,
default: 2000,
min: 1024 as const,
step: 100 as const,
max: 200000 as const,
default: 2000 as const,
},
maxOutputTokens: {
min: 1,
min: 1 as const,
max: ANTHROPIC_MAX_OUTPUT,
step: 1,
step: 1 as const,
default: DEFAULT_MAX_OUTPUT,
reset: (modelName: string) => {
if (/claude-3[-.]5-sonnet/.test(modelName) || /claude-3[-.]7/.test(modelName)) {
@ -301,28 +301,28 @@ export const anthropicSettings = {
},
},
topP: {
min: 0,
max: 1,
step: 0.01,
default: 0.7,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 0.7 as const,
},
topK: {
min: 1,
max: 40,
step: 1,
default: 5,
min: 1 as const,
max: 40 as const,
step: 1 as const,
default: 5 as const,
},
resendFiles: {
default: true,
default: true as const,
},
maxContextTokens: {
default: undefined,
},
legacy: {
maxOutputTokens: {
min: 1,
min: 1 as const,
max: LEGACY_ANTHROPIC_MAX_OUTPUT,
step: 1,
step: 1 as const,
default: LEGACY_ANTHROPIC_MAX_OUTPUT,
},
},
@ -330,34 +330,34 @@ export const anthropicSettings = {
export const agentsSettings = {
model: {
default: 'gpt-3.5-turbo-test',
default: 'gpt-3.5-turbo-test' as const,
},
temperature: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
top_p: {
min: 0,
max: 1,
step: 0.01,
default: 1,
min: 0 as const,
max: 1 as const,
step: 0.01 as const,
default: 1 as const,
},
presence_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
frequency_penalty: {
min: 0,
max: 2,
step: 0.01,
default: 0,
min: 0 as const,
max: 2 as const,
step: 0.01 as const,
default: 0 as const,
},
resendFiles: {
default: true,
default: true as const,
},
maxContextTokens: {
default: undefined,
@ -366,7 +366,7 @@ export const agentsSettings = {
default: undefined,
},
imageDetail: {
default: ImageDetail.auto,
default: ImageDetail.auto as const,
},
};