🪙 feat: Configure Max Context and Output Tokens (#2648)

* chore: make frequent 'error' log into 'debug' log

* feat: add maxContextTokens as a conversation field

* refactor(settings): increase popover height

* feat: add DynamicInputNumber and maxContextTokens to all endpoints that support it (frontend), fix schema

* feat: maxContextTokens handling (backend)

* style: revert popover height

* feat: max tokens

* fix: Ollama Vision firebase compatibility

* fix: Ollama Vision, use message_file_map to determine multimodal request

* refactor: bring back MobileNav and improve title styling
This commit is contained in:
Danny Avila 2024-05-09 13:27:13 -04:00 committed by GitHub
parent 5293b73b6d
commit 6ba7f60eec
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
26 changed files with 420 additions and 22 deletions

View file

@ -72,6 +72,8 @@ export type DynamicSettingProps = Partial<SettingDefinition> & {
setOption: TSetOption;
conversation: TConversation | TPreset | null;
defaultValue?: number | boolean | string | string[];
className?: string;
inputClassName?: string;
};
const requiredSettingFields = ['key', 'type', 'component'];
@ -508,6 +510,7 @@ export const generateOpenAISchema = (customOpenAI: OpenAISettings) => {
frequency_penalty: true,
resendFiles: true,
imageDetail: true,
maxContextTokens: true,
})
.transform((obj) => ({
...obj,
@ -521,6 +524,7 @@ export const generateOpenAISchema = (customOpenAI: OpenAISettings) => {
resendFiles:
typeof obj.resendFiles === 'boolean' ? obj.resendFiles : defaults.resendFiles.default,
imageDetail: obj.imageDetail ?? defaults.imageDetail.default,
maxContextTokens: obj.maxContextTokens ?? undefined,
}))
.catch(() => ({
model: defaults.model.default,
@ -532,6 +536,7 @@ export const generateOpenAISchema = (customOpenAI: OpenAISettings) => {
frequency_penalty: defaults.frequency_penalty.default,
resendFiles: defaults.resendFiles.default,
imageDetail: defaults.imageDetail.default,
maxContextTokens: undefined,
}));
};
@ -547,6 +552,7 @@ export const generateGoogleSchema = (customGoogle: GoogleSettings) => {
maxOutputTokens: true,
topP: true,
topK: true,
maxContextTokens: true,
})
.transform((obj) => {
const isGemini = obj?.model?.toLowerCase()?.includes('gemini');
@ -571,6 +577,7 @@ export const generateGoogleSchema = (customGoogle: GoogleSettings) => {
maxOutputTokens,
topP: obj.topP ?? defaults.topP.default,
topK: obj.topK ?? defaults.topK.default,
maxContextTokens: obj.maxContextTokens ?? undefined,
};
})
.catch(() => ({
@ -582,5 +589,6 @@ export const generateGoogleSchema = (customGoogle: GoogleSettings) => {
maxOutputTokens: defaults.maxOutputTokens.default,
topP: defaults.topP.default,
topK: defaults.topK.default,
maxContextTokens: undefined,
}));
};