🤖 fix: GoogleClient Context Handling & GenAI Parameters (#5503)

* fix: remove legacy code for GoogleClient and fix model parameters for GenAI

* refactor: streamline client init logic

* refactor: remove legacy vertex clients, WIP remote vertex token count

* refactor: enhance GoogleClient with improved type definitions and streamline token count method

* refactor: remove unused methods and consolidate methods

* refactor: remove examples

* refactor: improve input handling logic in DynamicInput component

* refactor: enhance GoogleClient with token usage tracking and context handling improvements

* refactor: update GoogleClient to support 'learnlm' model and streamline model checks

* refactor: remove unused text model handling in GoogleClient

* refactor: record token usage for GoogleClient titles and handle edge cases

* chore: remove unused undici, addresses verbose version warning
This commit is contained in:
Danny Avila 2025-01-27 12:21:33 -05:00 committed by GitHub
parent 47b72e8159
commit 528ee62eb1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 277 additions and 270 deletions

View file

@ -1,6 +1,6 @@
{
"name": "librechat-data-provider",
"version": "0.7.694",
"version": "0.7.695",
"description": "data services for librechat apps",
"main": "dist/index.js",
"module": "dist/index.es.js",

View file

@ -272,7 +272,7 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
if (endpoint === EModelEndpoint.google) {
if (modelLabel) {
return modelLabel;
} else if (model && model.includes('gemini')) {
} else if (model && (model.includes('gemini') || model.includes('learnlm'))) {
return 'Gemini';
} else if (model && model.includes('code')) {
return 'Codey';

View file

@ -788,6 +788,25 @@ export const googleSchema = tConversationSchema
maxContextTokens: undefined,
}));
/**
* TODO: Map the following fields:
- presence_penalty -> presencePenalty
- frequency_penalty -> frequencyPenalty
- stop -> stopSequences
*/
export const googleGenConfigSchema = z
.object({
maxOutputTokens: coerceNumber.optional(),
temperature: coerceNumber.optional(),
topP: coerceNumber.optional(),
topK: coerceNumber.optional(),
presencePenalty: coerceNumber.optional(),
frequencyPenalty: coerceNumber.optional(),
stopSequences: z.array(z.string()).optional(),
})
.strip()
.optional();
export const bingAISchema = tConversationSchema
.pick({
jailbreak: true,