🪙 feat: Configure Max Context and Output Tokens (#2648)

* chore: make frequent 'error' log into 'debug' log

* feat: add maxContextTokens as a conversation field

* refactor(settings): increase popover height

* feat: add DynamicInputNumber and maxContextTokens to all endpoints that support it (frontend), fix schema

* feat: maxContextTokens handling (backend)

* style: revert popover height

* feat: max tokens

* fix: Ollama Vision firebase compatibility

* fix: Ollama Vision, use message_file_map to determine multimodal request

* refactor: bring back MobileNav and improve title styling
This commit is contained in:
Danny Avila 2024-05-09 13:27:13 -04:00 committed by GitHub
parent 5293b73b6d
commit 6ba7f60eec
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
26 changed files with 420 additions and 22 deletions

View file

@ -1,6 +1,6 @@
{
"name": "librechat-data-provider",
"version": "0.6.0",
"version": "0.6.1",
"description": "data services for librechat apps",
"main": "dist/index.js",
"module": "dist/index.es.js",

View file

@ -72,6 +72,8 @@ export type DynamicSettingProps = Partial<SettingDefinition> & {
setOption: TSetOption;
conversation: TConversation | TPreset | null;
defaultValue?: number | boolean | string | string[];
className?: string;
inputClassName?: string;
};
const requiredSettingFields = ['key', 'type', 'component'];
@ -508,6 +510,7 @@ export const generateOpenAISchema = (customOpenAI: OpenAISettings) => {
frequency_penalty: true,
resendFiles: true,
imageDetail: true,
maxContextTokens: true,
})
.transform((obj) => ({
...obj,
@ -521,6 +524,7 @@ export const generateOpenAISchema = (customOpenAI: OpenAISettings) => {
resendFiles:
typeof obj.resendFiles === 'boolean' ? obj.resendFiles : defaults.resendFiles.default,
imageDetail: obj.imageDetail ?? defaults.imageDetail.default,
maxContextTokens: obj.maxContextTokens ?? undefined,
}))
.catch(() => ({
model: defaults.model.default,
@ -532,6 +536,7 @@ export const generateOpenAISchema = (customOpenAI: OpenAISettings) => {
frequency_penalty: defaults.frequency_penalty.default,
resendFiles: defaults.resendFiles.default,
imageDetail: defaults.imageDetail.default,
maxContextTokens: undefined,
}));
};
@ -547,6 +552,7 @@ export const generateGoogleSchema = (customGoogle: GoogleSettings) => {
maxOutputTokens: true,
topP: true,
topK: true,
maxContextTokens: true,
})
.transform((obj) => {
const isGemini = obj?.model?.toLowerCase()?.includes('gemini');
@ -571,6 +577,7 @@ export const generateGoogleSchema = (customGoogle: GoogleSettings) => {
maxOutputTokens,
topP: obj.topP ?? defaults.topP.default,
topK: obj.topK ?? defaults.topK.default,
maxContextTokens: obj.maxContextTokens ?? undefined,
};
})
.catch(() => ({
@ -582,5 +589,6 @@ export const generateGoogleSchema = (customGoogle: GoogleSettings) => {
maxOutputTokens: defaults.maxOutputTokens.default,
topP: defaults.topP.default,
topK: defaults.topK.default,
maxContextTokens: undefined,
}));
};

View file

@ -105,6 +105,12 @@ export const openAISettings = {
resendFiles: {
default: true,
},
maxContextTokens: {
default: undefined,
},
max_tokens: {
default: undefined,
},
imageDetail: {
default: ImageDetail.auto,
},
@ -309,6 +315,8 @@ export const tConversationSchema = z.object({
maxOutputTokens: z.number().optional(),
agentOptions: tAgentOptionsSchema.nullable().optional(),
file_ids: z.array(z.string()).optional(),
maxContextTokens: z.number().optional(),
max_tokens: z.number().optional(),
/** @deprecated */
resendImages: z.boolean().optional(),
/* vision */
@ -382,6 +390,8 @@ export const openAISchema = tConversationSchema
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
max_tokens: true,
})
.transform((obj) => ({
...obj,
@ -399,6 +409,8 @@ export const openAISchema = tConversationSchema
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? undefined,
max_tokens: obj.max_tokens ?? undefined,
}))
.catch(() => ({
model: openAISettings.model.default,
@ -414,6 +426,8 @@ export const openAISchema = tConversationSchema
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: undefined,
max_tokens: undefined,
}));
export const googleSchema = tConversationSchema
@ -429,6 +443,7 @@ export const googleSchema = tConversationSchema
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
.transform((obj) => {
const isGemini = obj?.model?.toLowerCase()?.includes('gemini');
@ -456,6 +471,7 @@ export const googleSchema = tConversationSchema
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? undefined,
};
})
.catch(() => ({
@ -470,6 +486,7 @@ export const googleSchema = tConversationSchema
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: undefined,
}));
export const bingAISchema = tConversationSchema
@ -520,6 +537,7 @@ export const anthropicSchema = tConversationSchema
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
.transform((obj) => ({
...obj,
@ -534,6 +552,7 @@ export const anthropicSchema = tConversationSchema
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? undefined,
}))
.catch(() => ({
model: 'claude-1',
@ -547,6 +566,7 @@ export const anthropicSchema = tConversationSchema
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: undefined,
}));
export const chatGPTBrowserSchema = tConversationSchema
@ -576,6 +596,7 @@ export const gptPluginsSchema = tConversationSchema
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
.transform((obj) => ({
...obj,
@ -596,6 +617,7 @@ export const gptPluginsSchema = tConversationSchema
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? undefined,
}))
.catch(() => ({
model: 'gpt-3.5-turbo',
@ -615,6 +637,7 @@ export const gptPluginsSchema = tConversationSchema
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: undefined,
}));
export function removeNullishValues<T extends object>(obj: T): T {
@ -688,6 +711,8 @@ export const compactOpenAISchema = tConversationSchema
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
max_tokens: true,
})
.transform((obj: Partial<TConversation>) => {
const newObj: Partial<TConversation> = { ...obj };
@ -727,6 +752,7 @@ export const compactGoogleSchema = tConversationSchema
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
@ -760,6 +786,7 @@ export const compactAnthropicSchema = tConversationSchema
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
@ -807,6 +834,7 @@ export const compactPluginsSchema = tConversationSchema
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };

View file

@ -17,6 +17,7 @@ export type TEndpointOption = {
endpointType?: EModelEndpoint;
modelDisplayLabel?: string;
resendFiles?: boolean;
maxContextTokens?: number;
imageDetail?: ImageDetail;
model?: string | null;
promptPrefix?: string;