mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-03-07 16:42:38 +01:00
🤖 feat: GPT-5.4 and GPT-5.4-pro Context + Pricing (#12099)
Some checks are pending
Docker Dev Branch Images Build / build (Dockerfile, lc-dev, node) (push) Waiting to run
Docker Dev Branch Images Build / build (Dockerfile.multi, lc-dev-api, api-build) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile, librechat-dev, node) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile.multi, librechat-dev-api, api-build) (push) Waiting to run
Sync Locize Translations & Create Translation PR / Sync Translation Keys with Locize (push) Waiting to run
Sync Locize Translations & Create Translation PR / Create Translation PR on Version Published (push) Blocked by required conditions
Some checks are pending
Docker Dev Branch Images Build / build (Dockerfile, lc-dev, node) (push) Waiting to run
Docker Dev Branch Images Build / build (Dockerfile.multi, lc-dev-api, api-build) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile, librechat-dev, node) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile.multi, librechat-dev-api, api-build) (push) Waiting to run
Sync Locize Translations & Create Translation PR / Sync Translation Keys with Locize (push) Waiting to run
Sync Locize Translations & Create Translation PR / Create Translation PR on Version Published (push) Blocked by required conditions
* ✨ feat: Add support for new GPT-5.4 and GPT-5.4-pro models - Introduced new token values and cache settings for 'gpt-5.4' and 'gpt-5.4-pro' in the API model configurations. - Updated maximum output limits for the new models in the tokens utility. - Included 'gpt-5.4' and 'gpt-5.4-pro' in the shared OpenAI models list for consistent access across the application. * 🔧 update: Enhance GPT-5.4 and GPT-5.4-pro model configurations - Refined token pricing and cache settings for 'gpt-5.4' and 'gpt-5.4-pro' in the API model configurations. - Added tests for cache multipliers and maximum token limits for the new models. - Updated shared OpenAI models list to include 'gpt-5.4-thinking' and added a note for verifying pricing before release. * 🔧 update: Add clarification to token pricing for 'gpt-5.4-pro' - Added a comment to the 'gpt-5.4-pro' model configuration in tokens.ts to specify that it shares the same token window as 'gpt-5.4', enhancing clarity for future reference.
This commit is contained in:
parent
3b84cc048a
commit
a79f7cebd5
5 changed files with 100 additions and 24 deletions
|
|
@ -214,6 +214,25 @@ describe('getModelMaxTokens', () => {
|
|||
);
|
||||
});
|
||||
|
||||
test('should return correct tokens for gpt-5.4 matches', () => {
|
||||
expect(getModelMaxTokens('gpt-5.4')).toBe(maxTokensMap[EModelEndpoint.openAI]['gpt-5.4']);
|
||||
expect(getModelMaxTokens('gpt-5.4-thinking')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['gpt-5.4'],
|
||||
);
|
||||
expect(getModelMaxTokens('openai/gpt-5.4')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['gpt-5.4'],
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct tokens for gpt-5.4-pro matches', () => {
|
||||
expect(getModelMaxTokens('gpt-5.4-pro')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['gpt-5.4-pro'],
|
||||
);
|
||||
expect(getModelMaxTokens('openai/gpt-5.4-pro')).toBe(
|
||||
maxTokensMap[EModelEndpoint.openAI]['gpt-5.4-pro'],
|
||||
);
|
||||
});
|
||||
|
||||
test('should return correct tokens for Anthropic models', () => {
|
||||
const models = [
|
||||
'claude-2.1',
|
||||
|
|
@ -495,6 +514,8 @@ describe('getModelMaxTokens', () => {
|
|||
'gpt-5.1',
|
||||
'gpt-5.2',
|
||||
'gpt-5.3',
|
||||
'gpt-5.4',
|
||||
'gpt-5.4-pro',
|
||||
'gpt-5-mini',
|
||||
'gpt-5-nano',
|
||||
'gpt-5-pro',
|
||||
|
|
@ -804,6 +825,12 @@ describe('matchModelName', () => {
|
|||
expect(matchModelName('gpt-5.3-2025-03-01')).toBe('gpt-5.3');
|
||||
});
|
||||
|
||||
it('should return the closest matching key for gpt-5.4 matches', () => {
|
||||
expect(matchModelName('openai/gpt-5.4')).toBe('gpt-5.4');
|
||||
expect(matchModelName('gpt-5.4-thinking')).toBe('gpt-5.4');
|
||||
expect(matchModelName('gpt-5.4-pro')).toBe('gpt-5.4-pro');
|
||||
});
|
||||
|
||||
it('should return the input model name if no match is found - Google models', () => {
|
||||
expect(matchModelName('unknown-google-model', EModelEndpoint.google)).toBe(
|
||||
'unknown-google-model',
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue