🤖 feat: Support o4-mini and o3 Models (#6928)

* feat: Add support for new OpenAI models (o4-mini, o3) and update related logic

* 🔧 fix: Rename 'resubmitFiles' to 'isResubmission' for consistency across types and hooks

* 🔧 fix: Replace hardcoded 'pending_req' with CacheKeys.PENDING_REQ for consistency in cache handling

* 🔧 fix: Update cache handling to use Time.ONE_MINUTE instead of hardcoded TTL and streamline imports

* 🔧 fix: Enhance message handling logic to correctly identify parent messages and streamline imports in useSSE
This commit is contained in:
Danny Avila 2025-04-17 00:40:26 -04:00 committed by GitHub
parent 88f4ad7c47
commit 52f146dd97
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 69 additions and 53 deletions

View file

@ -340,6 +340,15 @@ describe('getModelMaxTokens', () => {
expect(getModelMaxTokens('o1-preview-something')).toBe(o1PreviewTokens);
expect(getModelMaxTokens('openai/o1-preview-something')).toBe(o1PreviewTokens);
});
test('should return correct max context tokens for o4-mini and o3', () => {
const o4MiniTokens = maxTokensMap[EModelEndpoint.openAI]['o4-mini'];
const o3Tokens = maxTokensMap[EModelEndpoint.openAI]['o3'];
expect(getModelMaxTokens('o4-mini')).toBe(o4MiniTokens);
expect(getModelMaxTokens('openai/o4-mini')).toBe(o4MiniTokens);
expect(getModelMaxTokens('o3')).toBe(o3Tokens);
expect(getModelMaxTokens('openai/o3')).toBe(o3Tokens);
});
});
describe('matchModelName', () => {