🪙 feat: Update GPT-5.1 and GPT-5.2 Token Pricing (#11101)

This commit is contained in:
Danny Avila 2025-12-25 16:08:49 -05:00 committed by GitHub
parent 7183223e59
commit d7a765ac4c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 47 additions and 0 deletions

View file

@ -113,6 +113,8 @@ const tokenValues = Object.assign(
'gpt-4o-2024-05-13': { prompt: 5, completion: 15 },
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
'gpt-5': { prompt: 1.25, completion: 10 },
'gpt-5.1': { prompt: 1.25, completion: 10 },
'gpt-5.2': { prompt: 1.75, completion: 14 },
'gpt-5-nano': { prompt: 0.05, completion: 0.4 },
'gpt-5-mini': { prompt: 0.25, completion: 2 },
'gpt-5-pro': { prompt: 15, completion: 120 },

View file

@ -36,6 +36,19 @@ describe('getValueKey', () => {
expect(getValueKey('gpt-5-0130')).toBe('gpt-5');
});
it('should return "gpt-5.1" for model name containing "gpt-5.1"', () => {
expect(getValueKey('gpt-5.1')).toBe('gpt-5.1');
expect(getValueKey('gpt-5.1-chat')).toBe('gpt-5.1');
expect(getValueKey('gpt-5.1-codex')).toBe('gpt-5.1');
expect(getValueKey('openai/gpt-5.1')).toBe('gpt-5.1');
});
it('should return "gpt-5.2" for model name containing "gpt-5.2"', () => {
expect(getValueKey('gpt-5.2')).toBe('gpt-5.2');
expect(getValueKey('gpt-5.2-chat')).toBe('gpt-5.2');
expect(getValueKey('openai/gpt-5.2')).toBe('gpt-5.2');
});
it('should return "gpt-3.5-turbo-1106" for model name containing "gpt-3.5-turbo-1106"', () => {
expect(getValueKey('gpt-3.5-turbo-1106-some-other-info')).toBe('gpt-3.5-turbo-1106');
expect(getValueKey('openai/gpt-3.5-turbo-1106')).toBe('gpt-3.5-turbo-1106');
@ -311,6 +324,34 @@ describe('getMultiplier', () => {
);
});
it('should return the correct multiplier for gpt-5.1', () => {
expect(getMultiplier({ model: 'gpt-5.1', tokenType: 'prompt' })).toBe(
tokenValues['gpt-5.1'].prompt,
);
expect(getMultiplier({ model: 'gpt-5.1', tokenType: 'completion' })).toBe(
tokenValues['gpt-5.1'].completion,
);
expect(getMultiplier({ model: 'openai/gpt-5.1', tokenType: 'prompt' })).toBe(
tokenValues['gpt-5.1'].prompt,
);
expect(tokenValues['gpt-5.1'].prompt).toBe(1.25);
expect(tokenValues['gpt-5.1'].completion).toBe(10);
});
it('should return the correct multiplier for gpt-5.2', () => {
expect(getMultiplier({ model: 'gpt-5.2', tokenType: 'prompt' })).toBe(
tokenValues['gpt-5.2'].prompt,
);
expect(getMultiplier({ model: 'gpt-5.2', tokenType: 'completion' })).toBe(
tokenValues['gpt-5.2'].completion,
);
expect(getMultiplier({ model: 'openai/gpt-5.2', tokenType: 'prompt' })).toBe(
tokenValues['gpt-5.2'].prompt,
);
expect(tokenValues['gpt-5.2'].prompt).toBe(1.75);
expect(tokenValues['gpt-5.2'].completion).toBe(14);
});
it('should return the correct multiplier for gpt-4o', () => {
const valueKey = getValueKey('gpt-4o-2024-08-06');
expect(getMultiplier({ valueKey, tokenType: 'prompt' })).toBe(tokenValues['gpt-4o'].prompt);

View file

@ -21,6 +21,8 @@ const openAIModels = {
'gpt-4.1-mini': 1047576,
'gpt-4.1-nano': 1047576,
'gpt-5': 400000,
'gpt-5.1': 400000,
'gpt-5.2': 400000,
'gpt-5-mini': 400000,
'gpt-5-nano': 400000,
'gpt-5-pro': 400000,
@ -308,6 +310,8 @@ export const modelMaxOutputs = {
'o1-mini': 65136, // -500 from max: 65,536
'o1-preview': 32268, // -500 from max: 32,768
'gpt-5': 128000,
'gpt-5.1': 128000,
'gpt-5.2': 128000,
'gpt-5-mini': 128000,
'gpt-5-nano': 128000,
'gpt-5-pro': 128000,