Merge branch 'main' into feat/Custom-Token-Rates-for-Endpoints

This commit is contained in:
Ruben Talstra 2025-02-27 11:03:23 +01:00 committed by GitHub
commit 59a232812d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 568 additions and 244 deletions

View file

@ -1,6 +1,6 @@
{
"name": "librechat-data-provider",
"version": "0.7.6992",
"version": "0.7.6993",
"description": "data services for librechat apps",
"main": "dist/index.js",
"module": "dist/index.es.js",

View file

@ -2,8 +2,8 @@ import { z } from 'zod';
import type { ZodError } from 'zod';
import type { TModelsConfig } from './types';
import { EModelEndpoint, eModelEndpointSchema } from './schemas';
import { fileConfigSchema } from './file-config';
import { specsConfigSchema, TSpecsConfig } from './models';
import { fileConfigSchema } from './file-config';
import { FileSources } from './types/files';
import { MCPServersSchema } from './mcp';
@ -31,6 +31,27 @@ export const defaultRetrievalModels = [
'gpt-4-1106',
];
export const excludedKeys = new Set([
'conversationId',
'title',
'iconURL',
'greeting',
'endpoint',
'endpointType',
'createdAt',
'updatedAt',
'expiredAt',
'messages',
'isArchived',
'tags',
'user',
'__v',
'_id',
'tools',
'model',
'files',
]);
export enum SettingsViews {
default = 'default',
advanced = 'advanced',

View file

@ -252,7 +252,8 @@ export const googleSettings = {
},
};
const ANTHROPIC_MAX_OUTPUT = 8192;
const ANTHROPIC_MAX_OUTPUT = 128000;
const DEFAULT_MAX_OUTPUT = 8192;
const LEGACY_ANTHROPIC_MAX_OUTPUT = 4096;
export const anthropicSettings = {
model: {
@ -280,16 +281,19 @@ export const anthropicSettings = {
min: 1,
max: ANTHROPIC_MAX_OUTPUT,
step: 1,
default: ANTHROPIC_MAX_OUTPUT,
default: DEFAULT_MAX_OUTPUT,
reset: (modelName: string) => {
if (modelName.includes('claude-3-5-sonnet') || modelName.includes('claude-3-7-sonnet')) {
return ANTHROPIC_MAX_OUTPUT;
if (/claude-3[-.]5-sonnet/.test(modelName) || /claude-3[-.]7/.test(modelName)) {
return DEFAULT_MAX_OUTPUT;
}
return 4096;
},
set: (value: number, modelName: string) => {
if (!modelName.includes('claude-3-5-sonnet') && value > LEGACY_ANTHROPIC_MAX_OUTPUT) {
if (
!(/claude-3[-.]5-sonnet/.test(modelName) || /claude-3[-.]7/.test(modelName)) &&
value > LEGACY_ANTHROPIC_MAX_OUTPUT
) {
return LEGACY_ANTHROPIC_MAX_OUTPUT;
}
@ -760,37 +764,8 @@ export const googleSchema = tConversationSchema
spec: true,
maxContextTokens: true,
})
.transform((obj) => {
return {
...obj,
model: obj.model ?? google.model.default,
modelLabel: obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
examples: obj.examples ?? [{ input: { content: '' }, output: { content: '' } }],
temperature: obj.temperature ?? google.temperature.default,
maxOutputTokens: obj.maxOutputTokens ?? google.maxOutputTokens.default,
topP: obj.topP ?? google.topP.default,
topK: obj.topK ?? google.topK.default,
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? undefined,
};
})
.catch(() => ({
model: google.model.default,
modelLabel: null,
promptPrefix: null,
examples: [{ input: { content: '' }, output: { content: '' } }],
temperature: google.temperature.default,
maxOutputTokens: google.maxOutputTokens.default,
topP: google.topP.default,
topK: google.topK.default,
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: undefined,
}));
.transform((obj: Partial<TConversation>) => removeNullishValues(obj))
.catch(() => ({}));
/**
* TODO: Map the following fields: