mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 16:30:15 +01:00
🚀 feat: GPT-4.5, Anthropic Tool Header, and OpenAPI Ref Resolution (#6118)
* 🔧 refactor: Update settings to use 'as const' for improved type safety and make gpt-4o-mini default model (cheapest) * 📖 docs: Update README to reflect support for GPT-4.5 in image analysis feature * 🔧 refactor: Update model handling to use default settings and improve encoding logic * 🔧 refactor: Enhance model version extraction logic for improved compatibility with future GPT and omni models * feat: GPT-4.5 tx/token update, vision support * fix: $ref resolution logic in OpenAPI handling * feat: add new 'anthropic-beta' header for Claude 3.7 to include token-efficient tools; ref: https://docs.anthropic.com/en/docs/build-with-claude/tool-use/token-efficient-tool-use
This commit is contained in:
parent
9802629848
commit
2293cd667e
15 changed files with 337 additions and 148 deletions
|
|
@ -13,6 +13,7 @@ const openAIModels = {
|
|||
'gpt-4-32k-0613': 32758, // -10 from max
|
||||
'gpt-4-1106': 127500, // -500 from max
|
||||
'gpt-4-0125': 127500, // -500 from max
|
||||
'gpt-4.5': 127500, // -500 from max
|
||||
'gpt-4o': 127500, // -500 from max
|
||||
'gpt-4o-mini': 127500, // -500 from max
|
||||
'gpt-4o-2024-05-13': 127500, // -500 from max
|
||||
|
|
|
|||
|
|
@ -103,6 +103,16 @@ describe('getModelMaxTokens', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should return correct tokens for gpt-4.5 matches', () => {
|
||||
expect(getModelMaxTokens('gpt-4.5')).toBe(maxTokensMap[EModelEndpoint.openAI]['gpt-4.5']);
|
||||
expect(getModelMaxTokens('gpt-4.5-preview')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['gpt-4.5'],
|
||||
);
|
||||
expect(getModelMaxTokens('openai/gpt-4.5-preview')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['gpt-4.5'],
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct tokens for Anthropic models', () => {
|
||||
const models = [
|
||||
'claude-2.1',
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue