🤖 feat: Gemini 3 Support (#10584)

* feat: Add support for  model in token configurations and tests

* chore: Update @librechat/agents to version 3.0.26 in package.json and package-lock.json
This commit is contained in:
Danny Avila 2025-11-19 15:05:37 -05:00 committed by GitHub
parent 4c2719a37e
commit 8b9afd5965
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 14 additions and 7 deletions

View file

@ -156,6 +156,7 @@ const tokenValues = Object.assign(
'gemini-2.5-flash': { prompt: 0.3, completion: 2.5 },
'gemini-2.5-flash-lite': { prompt: 0.1, completion: 0.4 },
'gemini-2.5-pro': { prompt: 1.25, completion: 10 },
'gemini-3': { prompt: 2, completion: 12 },
'gemini-pro-vision': { prompt: 0.5, completion: 1.5 },
grok: { prompt: 2.0, completion: 10.0 }, // Base pattern defaults to grok-2
'grok-beta': { prompt: 5.0, completion: 15.0 },

View file

@ -1040,6 +1040,7 @@ describe('getCacheMultiplier', () => {
describe('Google Model Tests', () => {
const googleModels = [
'gemini-3',
'gemini-2.5-pro',
'gemini-2.5-flash',
'gemini-2.5-flash-lite',
@ -1083,6 +1084,7 @@ describe('Google Model Tests', () => {
it('should map to the correct model keys', () => {
const expected = {
'gemini-3': 'gemini-3',
'gemini-2.5-pro': 'gemini-2.5-pro',
'gemini-2.5-flash': 'gemini-2.5-flash',
'gemini-2.5-flash-lite': 'gemini-2.5-flash-lite',

View file

@ -47,7 +47,7 @@
"@langchain/google-genai": "^0.2.13",
"@langchain/google-vertexai": "^0.2.13",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^3.0.25",
"@librechat/agents": "^3.0.26",
"@librechat/api": "*",
"@librechat/data-schemas": "*",
"@microsoft/microsoft-graph-client": "^3.0.7",

View file

@ -275,6 +275,9 @@ describe('getModelMaxTokens', () => {
expect(getModelMaxTokens('gemini-1.5-pro-preview-0409', EModelEndpoint.google)).toBe(
maxTokensMap[EModelEndpoint.google]['gemini-1.5'],
);
expect(getModelMaxTokens('gemini-3', EModelEndpoint.google)).toBe(
maxTokensMap[EModelEndpoint.google]['gemini-3'],
);
expect(getModelMaxTokens('gemini-2.5-pro', EModelEndpoint.google)).toBe(
maxTokensMap[EModelEndpoint.google]['gemini-2.5-pro'],
);