mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 08:50:15 +01:00
🪙 feat: Configure Max Context and Output Tokens (#2648)
* chore: make frequent 'error' log into 'debug' log * feat: add maxContextTokens as a conversation field * refactor(settings): increase popover height * feat: add DynamicInputNumber and maxContextTokens to all endpoints that support it (frontend), fix schema * feat: maxContextTokens handling (backend) * style: revert popover height * feat: max tokens * fix: Ollama Vision firebase compatibility * fix: Ollama Vision, use message_file_map to determine multimodal request * refactor: bring back MobileNav and improve title styling
This commit is contained in:
parent
5293b73b6d
commit
6ba7f60eec
26 changed files with 420 additions and 22 deletions
|
|
@ -75,7 +75,9 @@ class AnthropicClient extends BaseClient {
|
|||
this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments));
|
||||
|
||||
this.maxContextTokens =
|
||||
getModelMaxTokens(this.modelOptions.model, EModelEndpoint.anthropic) ?? 100000;
|
||||
this.options.maxContextTokens ??
|
||||
getModelMaxTokens(this.modelOptions.model, EModelEndpoint.anthropic) ??
|
||||
100000;
|
||||
this.maxResponseTokens = this.modelOptions.maxOutputTokens || 1500;
|
||||
this.maxPromptTokens =
|
||||
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
|
||||
|
|
@ -652,6 +654,7 @@ class AnthropicClient extends BaseClient {
|
|||
|
||||
getSaveOptions() {
|
||||
return {
|
||||
maxContextTokens: this.options.maxContextTokens,
|
||||
promptPrefix: this.options.promptPrefix,
|
||||
modelLabel: this.options.modelLabel,
|
||||
resendFiles: this.options.resendFiles,
|
||||
|
|
|
|||
|
|
@ -138,7 +138,10 @@ class GoogleClient extends BaseClient {
|
|||
!isGenerativeModel && !isChatModel && /code|text/.test(this.modelOptions.model);
|
||||
const { isTextModel } = this;
|
||||
|
||||
this.maxContextTokens = getModelMaxTokens(this.modelOptions.model, EModelEndpoint.google);
|
||||
this.maxContextTokens =
|
||||
this.options.maxContextTokens ??
|
||||
getModelMaxTokens(this.modelOptions.model, EModelEndpoint.google);
|
||||
|
||||
// The max prompt tokens is determined by the max context tokens minus the max response tokens.
|
||||
// Earlier messages will be dropped until the prompt is within the limit.
|
||||
this.maxResponseTokens = this.modelOptions.maxOutputTokens || settings.maxOutputTokens.default;
|
||||
|
|
|
|||
|
|
@ -161,11 +161,13 @@ class OpenAIClient extends BaseClient {
|
|||
model.startsWith('text-chat') || model.startsWith('text-davinci-002-render');
|
||||
|
||||
this.maxContextTokens =
|
||||
this.options.maxContextTokens ??
|
||||
getModelMaxTokens(
|
||||
model,
|
||||
this.options.endpointType ?? this.options.endpoint,
|
||||
this.options.endpointTokenConfig,
|
||||
) ?? 4095; // 1 less than maximum
|
||||
) ??
|
||||
4095; // 1 less than maximum
|
||||
|
||||
if (this.shouldSummarize) {
|
||||
this.maxContextTokens = Math.floor(this.maxContextTokens / 2);
|
||||
|
|
@ -407,6 +409,7 @@ class OpenAIClient extends BaseClient {
|
|||
|
||||
getSaveOptions() {
|
||||
return {
|
||||
maxContextTokens: this.options.maxContextTokens,
|
||||
chatGptLabel: this.options.chatGptLabel,
|
||||
promptPrefix: this.options.promptPrefix,
|
||||
resendFiles: this.options.resendFiles,
|
||||
|
|
@ -435,7 +438,11 @@ class OpenAIClient extends BaseClient {
|
|||
* @returns {Promise<MongoFile[]>}
|
||||
*/
|
||||
async addImageURLs(message, attachments) {
|
||||
const { files, image_urls } = await encodeAndFormat(this.options.req, attachments);
|
||||
const { files, image_urls } = await encodeAndFormat(
|
||||
this.options.req,
|
||||
attachments,
|
||||
this.options.endpoint,
|
||||
);
|
||||
message.image_urls = image_urls.length ? image_urls : undefined;
|
||||
return files;
|
||||
}
|
||||
|
|
@ -1158,7 +1165,7 @@ ${convo}
|
|||
});
|
||||
}
|
||||
|
||||
if (this.options.attachments && this.options.endpoint?.toLowerCase() === 'ollama') {
|
||||
if (this.message_file_map && this.options.endpoint?.toLowerCase() === 'ollama') {
|
||||
const ollamaClient = new OllamaClient({ baseURL });
|
||||
return await ollamaClient.chatCompletion({
|
||||
payload: modelOptions,
|
||||
|
|
|
|||
|
|
@ -40,7 +40,8 @@ class FakeClient extends BaseClient {
|
|||
};
|
||||
}
|
||||
|
||||
this.maxContextTokens = getModelMaxTokens(this.modelOptions.model) ?? 4097;
|
||||
this.maxContextTokens =
|
||||
this.options.maxContextTokens ?? getModelMaxTokens(this.modelOptions.model) ?? 4097;
|
||||
}
|
||||
buildMessages() {}
|
||||
getTokenCount(str) {
|
||||
|
|
|
|||
|
|
@ -348,7 +348,7 @@ module.exports = function mongoMeili(schema, options) {
|
|||
try {
|
||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
logger.debug(
|
||||
'[MeiliMongooseModel.findOneAndUpdate] Convo not found in MeiliSearch and will index ' +
|
||||
doc.conversationId,
|
||||
error,
|
||||
|
|
|
|||
|
|
@ -104,6 +104,12 @@ const conversationPreset = {
|
|||
type: String,
|
||||
},
|
||||
tools: { type: [{ type: String }], default: undefined },
|
||||
maxContextTokens: {
|
||||
type: Number,
|
||||
},
|
||||
max_tokens: {
|
||||
type: Number,
|
||||
},
|
||||
};
|
||||
|
||||
const agentOptions = {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,14 @@
|
|||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const { modelLabel, promptPrefix, resendFiles, iconURL, greeting, spec, ...rest } = parsedBody;
|
||||
const {
|
||||
modelLabel,
|
||||
promptPrefix,
|
||||
maxContextTokens,
|
||||
resendFiles,
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
...rest
|
||||
} = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
modelLabel,
|
||||
|
|
@ -8,6 +17,7 @@ const buildOptions = (endpoint, parsedBody) => {
|
|||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
maxContextTokens,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,6 +1,15 @@
|
|||
const buildOptions = (endpoint, parsedBody, endpointType) => {
|
||||
const { chatGptLabel, promptPrefix, resendFiles, imageDetail, iconURL, greeting, spec, ...rest } =
|
||||
parsedBody;
|
||||
const {
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
maxContextTokens,
|
||||
resendFiles,
|
||||
imageDetail,
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
...rest
|
||||
} = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
endpointType,
|
||||
|
|
@ -11,6 +20,7 @@ const buildOptions = (endpoint, parsedBody, endpointType) => {
|
|||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
maxContextTokens,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ const buildOptions = (endpoint, parsedBody) => {
|
|||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
maxContextTokens,
|
||||
...modelOptions
|
||||
} = parsedBody;
|
||||
const endpointOption = {
|
||||
|
|
@ -21,6 +22,7 @@ const buildOptions = (endpoint, parsedBody) => {
|
|||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
maxContextTokens,
|
||||
modelOptions,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,15 @@
|
|||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const { chatGptLabel, promptPrefix, resendFiles, imageDetail, iconURL, greeting, spec, ...rest } =
|
||||
parsedBody;
|
||||
const {
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
maxContextTokens,
|
||||
resendFiles,
|
||||
imageDetail,
|
||||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
...rest
|
||||
} = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
chatGptLabel,
|
||||
|
|
@ -10,6 +19,7 @@ const buildOptions = (endpoint, parsedBody) => {
|
|||
iconURL,
|
||||
greeting,
|
||||
spec,
|
||||
maxContextTokens,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ async function fetchImageToBase64(url) {
|
|||
}
|
||||
}
|
||||
|
||||
const base64Only = new Set([EModelEndpoint.google, EModelEndpoint.anthropic]);
|
||||
const base64Only = new Set([EModelEndpoint.google, EModelEndpoint.anthropic, 'Ollama', 'ollama']);
|
||||
|
||||
/**
|
||||
* Encodes and formats the given files.
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ export default function OptionsPopover({
|
|||
|
||||
const localize = useLocalize();
|
||||
const cardStyle =
|
||||
'shadow-xl rounded-md min-w-[75px] font-normal bg-white border-black/10 border dark:bg-gray-700 text-black dark:text-white ';
|
||||
'shadow-xl rounded-md min-w-[75px] font-normal bg-white border-black/10 border dark:bg-gray-700 text-black dark:text-white';
|
||||
|
||||
if (!visible) {
|
||||
return null;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import React from 'react';
|
||||
import TextareaAutosize from 'react-textarea-autosize';
|
||||
import type { TModelSelectProps } from '~/common';
|
||||
import { ESide } from '~/common';
|
||||
import {
|
||||
Input,
|
||||
Label,
|
||||
|
|
@ -12,9 +11,11 @@ import {
|
|||
SelectDropDown,
|
||||
HoverCardTrigger,
|
||||
} from '~/components/ui';
|
||||
import { cn, defaultTextProps, optionText, removeFocusOutlines } from '~/utils';
|
||||
import { DynamicInputNumber } from '~/components/SidePanel/Parameters';
|
||||
import OptionHover from './OptionHover';
|
||||
import { cn, defaultTextProps, optionText, removeFocusOutlines } from '~/utils/';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { ESide } from '~/common';
|
||||
|
||||
export default function Settings({ conversation, setOption, models, readonly }: TModelSelectProps) {
|
||||
const localize = useLocalize();
|
||||
|
|
@ -83,6 +84,28 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
</div>
|
||||
</div>
|
||||
<div className="col-span-5 flex flex-col items-center justify-start gap-6 px-3 sm:col-span-2">
|
||||
<DynamicInputNumber
|
||||
columnSpan={2}
|
||||
settingKey="maxContextTokens"
|
||||
setOption={setOption}
|
||||
label="com_endpoint_context_tokens"
|
||||
labelCode={true}
|
||||
description="com_endpoint_context_info"
|
||||
descriptionCode={true}
|
||||
placeholder="com_nav_theme_system"
|
||||
placeholderCode={true}
|
||||
descriptionSide="right"
|
||||
conversation={conversation}
|
||||
readonly={readonly}
|
||||
range={{
|
||||
min: 10,
|
||||
max: 2000000,
|
||||
step: 1000,
|
||||
}}
|
||||
className="mt-1 w-full justify-between"
|
||||
inputClassName="w-1/3"
|
||||
showDefault={false}
|
||||
/>
|
||||
<HoverCard openDelay={300}>
|
||||
<HoverCardTrigger className="grid w-full items-center gap-2">
|
||||
<div className="flex justify-between">
|
||||
|
|
|
|||
|
|
@ -11,8 +11,9 @@ import {
|
|||
SelectDropDown,
|
||||
HoverCardTrigger,
|
||||
} from '~/components/ui';
|
||||
import OptionHover from './OptionHover';
|
||||
import { cn, defaultTextProps, optionText, removeFocusOutlines } from '~/utils';
|
||||
import { DynamicInputNumber } from '~/components/SidePanel/Parameters';
|
||||
import OptionHover from './OptionHover';
|
||||
import { useLocalize } from '~/hooks';
|
||||
import { ESide } from '~/common';
|
||||
|
||||
|
|
@ -103,6 +104,28 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
</div>
|
||||
</div>
|
||||
<div className="col-span-5 flex flex-col items-center justify-start gap-6 px-3 sm:col-span-2">
|
||||
<DynamicInputNumber
|
||||
columnSpan={2}
|
||||
settingKey="maxContextTokens"
|
||||
setOption={setOption}
|
||||
label="com_endpoint_context_tokens"
|
||||
labelCode={true}
|
||||
description="com_endpoint_context_info"
|
||||
descriptionCode={true}
|
||||
placeholder="com_nav_theme_system"
|
||||
placeholderCode={true}
|
||||
descriptionSide="right"
|
||||
conversation={conversation}
|
||||
readonly={readonly}
|
||||
range={{
|
||||
min: 10,
|
||||
max: 2000000,
|
||||
step: 1000,
|
||||
}}
|
||||
className="mt-1 w-full justify-between"
|
||||
inputClassName="w-1/3"
|
||||
showDefault={false}
|
||||
/>
|
||||
<HoverCard openDelay={300}>
|
||||
<HoverCardTrigger className="grid w-full items-center gap-2">
|
||||
<div className="flex justify-between">
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ import {
|
|||
HoverCardTrigger,
|
||||
} from '~/components/ui';
|
||||
import { cn, defaultTextProps, optionText, removeFocusOutlines } from '~/utils';
|
||||
import { DynamicTags } from '~/components/SidePanel/Parameters';
|
||||
import { DynamicTags, DynamicInputNumber } from '~/components/SidePanel/Parameters';
|
||||
import { useLocalize, useDebouncedInput } from '~/hooks';
|
||||
import type { TModelSelectProps } from '~/common';
|
||||
import OptionHover from './OptionHover';
|
||||
|
|
@ -154,6 +154,50 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
|||
</div>
|
||||
</div>
|
||||
<div className="col-span-5 flex flex-col items-center justify-start gap-6 px-3 sm:col-span-2">
|
||||
<DynamicInputNumber
|
||||
columnSpan={2}
|
||||
settingKey="maxContextTokens"
|
||||
setOption={setOption}
|
||||
label="com_endpoint_context_tokens"
|
||||
labelCode={true}
|
||||
description="com_endpoint_context_info"
|
||||
descriptionCode={true}
|
||||
placeholder="com_nav_theme_system"
|
||||
placeholderCode={true}
|
||||
descriptionSide="right"
|
||||
conversation={conversation}
|
||||
readonly={readonly}
|
||||
range={{
|
||||
min: 10,
|
||||
max: 2000000,
|
||||
step: 1000,
|
||||
}}
|
||||
className="mt-1 w-full justify-between"
|
||||
inputClassName="w-1/3"
|
||||
showDefault={false}
|
||||
/>
|
||||
<DynamicInputNumber
|
||||
columnSpan={2}
|
||||
settingKey="max_tokens"
|
||||
setOption={setOption}
|
||||
label="com_endpoint_max_output_tokens"
|
||||
labelCode={true}
|
||||
description="com_endpoint_openai_max_tokens"
|
||||
descriptionCode={true}
|
||||
placeholder="com_nav_theme_system"
|
||||
placeholderCode={true}
|
||||
descriptionSide="top"
|
||||
conversation={conversation}
|
||||
readonly={readonly}
|
||||
range={{
|
||||
min: 10,
|
||||
max: 2000000,
|
||||
step: 1000,
|
||||
}}
|
||||
className="mt-1 w-full justify-between"
|
||||
inputClassName="w-1/3"
|
||||
showDefault={false}
|
||||
/>
|
||||
<HoverCard openDelay={300}>
|
||||
<HoverCardTrigger className="grid w-full items-center gap-2">
|
||||
<div className="flex justify-between">
|
||||
|
|
|
|||
|
|
@ -14,9 +14,16 @@ import {
|
|||
HoverCardTrigger,
|
||||
MultiSelectDropDown,
|
||||
} from '~/components/ui';
|
||||
import { cn, defaultTextProps, optionText, removeFocusOutlines } from '~/utils';
|
||||
import {
|
||||
cn,
|
||||
defaultTextProps,
|
||||
optionText,
|
||||
removeFocusOutlines,
|
||||
processPlugins,
|
||||
selectPlugins,
|
||||
} from '~/utils';
|
||||
import { DynamicInputNumber } from '~/components/SidePanel/Parameters';
|
||||
import { useLocalize, useDebouncedInput } from '~/hooks';
|
||||
import { processPlugins, selectPlugins } from '~/utils';
|
||||
import OptionHover from './OptionHover';
|
||||
import { ESide } from '~/common';
|
||||
import store from '~/store';
|
||||
|
|
@ -170,6 +177,28 @@ export default function Settings({
|
|||
containerClassName="flex w-full resize-none border border-transparent"
|
||||
labelClassName="dark:text-white"
|
||||
/>
|
||||
<DynamicInputNumber
|
||||
columnSpan={2}
|
||||
settingKey="maxContextTokens"
|
||||
setOption={setOption}
|
||||
label="com_endpoint_context_tokens"
|
||||
labelCode={true}
|
||||
description="com_endpoint_context_info"
|
||||
descriptionCode={true}
|
||||
placeholder="com_nav_theme_system"
|
||||
placeholderCode={true}
|
||||
descriptionSide="right"
|
||||
conversation={conversation}
|
||||
readonly={readonly}
|
||||
range={{
|
||||
min: 10,
|
||||
max: 2000000,
|
||||
step: 1000,
|
||||
}}
|
||||
className="mt-1 w-full justify-between"
|
||||
inputClassName="w-1/3"
|
||||
showDefault={false}
|
||||
/>
|
||||
<HoverCard openDelay={300}>
|
||||
<HoverCardTrigger className="grid w-full items-center gap-2">
|
||||
<div className="flex justify-between">
|
||||
|
|
|
|||
73
client/src/components/Nav/MobileNav.tsx
Normal file
73
client/src/components/Nav/MobileNav.tsx
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
import React from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import type { Dispatch, SetStateAction } from 'react';
|
||||
import { useLocalize, useNewConvo } from '~/hooks';
|
||||
import store from '~/store';
|
||||
|
||||
export default function MobileNav({
|
||||
setNavVisible,
|
||||
}: {
|
||||
setNavVisible: Dispatch<SetStateAction<boolean>>;
|
||||
}) {
|
||||
const localize = useLocalize();
|
||||
const { newConversation } = useNewConvo(0);
|
||||
const conversation = useRecoilValue(store.conversationByIndex(0));
|
||||
const { title = 'New Chat' } = conversation || {};
|
||||
|
||||
return (
|
||||
<div className="border-token-border-medium bg-token-main-surface-primary sticky top-0 z-10 flex min-h-[40px] items-center justify-center border-b bg-white pl-1 dark:bg-gray-800 dark:text-white md:hidden md:hidden">
|
||||
<button
|
||||
type="button"
|
||||
data-testid="mobile-header-new-chat-button"
|
||||
className="inline-flex h-10 w-10 items-center justify-center rounded-md hover:text-gray-800 focus:outline-none focus:ring-2 focus:ring-inset focus:ring-white active:opacity-50 dark:hover:text-white"
|
||||
onClick={() =>
|
||||
setNavVisible((prev) => {
|
||||
localStorage.setItem('navVisible', JSON.stringify(!prev));
|
||||
return !prev;
|
||||
})
|
||||
}
|
||||
>
|
||||
<span className="sr-only">{localize('com_nav_open_sidebar')}</span>
|
||||
<svg
|
||||
width="24"
|
||||
height="24"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
className="icon-md"
|
||||
>
|
||||
<path
|
||||
fillRule="evenodd"
|
||||
clipRule="evenodd"
|
||||
d="M3 8C3 7.44772 3.44772 7 4 7H20C20.5523 7 21 7.44772 21 8C21 8.55228 20.5523 9 20 9H4C3.44772 9 3 8.55228 3 8ZM3 16C3 15.4477 3.44772 15 4 15H14C14.5523 15 15 15.4477 15 16C15 16.5523 14.5523 17 14 17H4C3.44772 17 3 16.5523 3 16Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
<h1 className="flex-1 overflow-hidden text-ellipsis whitespace-nowrap text-center text-sm font-normal">
|
||||
{title || localize('com_ui_new_chat')}
|
||||
</h1>
|
||||
<button
|
||||
type="button"
|
||||
className="inline-flex h-10 w-10 items-center justify-center rounded-md hover:text-gray-800 focus:outline-none focus:ring-2 focus:ring-inset focus:ring-white active:opacity-50 dark:hover:text-white"
|
||||
onClick={() => newConversation()}
|
||||
>
|
||||
<svg
|
||||
width="24"
|
||||
height="24"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
className="icon-md"
|
||||
>
|
||||
<path
|
||||
fillRule="evenodd"
|
||||
clipRule="evenodd"
|
||||
d="M16.7929 2.79289C18.0118 1.57394 19.9882 1.57394 21.2071 2.79289C22.4261 4.01184 22.4261 5.98815 21.2071 7.20711L12.7071 15.7071C12.5196 15.8946 12.2652 16 12 16H9C8.44772 16 8 15.5523 8 15V12C8 11.7348 8.10536 11.4804 8.29289 11.2929L16.7929 2.79289ZM19.7929 4.20711C19.355 3.7692 18.645 3.7692 18.2071 4.2071L10 12.4142V14H11.5858L19.7929 5.79289C20.2308 5.35499 20.2308 4.64501 19.7929 4.20711ZM6 5C5.44772 5 5 5.44771 5 6V18C5 18.5523 5.44772 19 6 19H18C18.5523 19 19 18.5523 19 18V14C19 13.4477 19.4477 13 20 13C20.5523 13 21 13.4477 21 14V18C21 19.6569 19.6569 21 18 21H6C4.34315 21 3 19.6569 3 18V6C3 4.34314 4.34315 3 6 3H10C10.5523 3 11 3.44771 11 4C11 4.55228 10.5523 5 10 5H6Z"
|
||||
fill="currentColor"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
@ -2,6 +2,7 @@ export * from './ExportConversation';
|
|||
export * from './SettingsTabs/';
|
||||
export { default as ClearConvos } from './ClearConvos';
|
||||
export { default as Logout } from './Logout';
|
||||
export { default as MobileNav } from './MobileNav';
|
||||
export { default as Nav } from './Nav';
|
||||
export { default as NavLink } from './NavLink';
|
||||
export { default as NavLinks } from './NavLinks';
|
||||
|
|
|
|||
|
|
@ -0,0 +1,108 @@
|
|||
import { OptionTypes } from 'librechat-data-provider';
|
||||
import type { DynamicSettingProps } from 'librechat-data-provider';
|
||||
import type { ValueType } from '@rc-component/mini-decimal';
|
||||
import { Label, HoverCard, InputNumber, HoverCardTrigger } from '~/components/ui';
|
||||
import { useLocalize, useDebouncedInput, useParameterEffects } from '~/hooks';
|
||||
import { cn, defaultTextProps, optionText } from '~/utils';
|
||||
import { ESide } from '~/common';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import OptionHover from './OptionHover';
|
||||
|
||||
function DynamicInputNumber({
|
||||
label,
|
||||
settingKey,
|
||||
defaultValue,
|
||||
description,
|
||||
columnSpan,
|
||||
setOption,
|
||||
optionType,
|
||||
readonly = false,
|
||||
showDefault = true,
|
||||
labelCode,
|
||||
descriptionCode,
|
||||
placeholderCode,
|
||||
placeholder,
|
||||
conversation,
|
||||
range,
|
||||
className = '',
|
||||
inputClassName = '',
|
||||
}: DynamicSettingProps) {
|
||||
const localize = useLocalize();
|
||||
const { preset } = useChatContext();
|
||||
|
||||
const [setInputValue, inputValue] = useDebouncedInput<ValueType | null>({
|
||||
optionKey: optionType !== OptionTypes.Custom ? settingKey : undefined,
|
||||
initialValue:
|
||||
optionType !== OptionTypes.Custom
|
||||
? (conversation?.[settingKey] as number)
|
||||
: (defaultValue as number),
|
||||
setter: () => ({}),
|
||||
setOption,
|
||||
});
|
||||
|
||||
useParameterEffects({
|
||||
preset,
|
||||
settingKey,
|
||||
defaultValue: typeof defaultValue === 'undefined' ? '' : defaultValue,
|
||||
conversation,
|
||||
inputValue,
|
||||
setInputValue,
|
||||
});
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-col items-center justify-start gap-6',
|
||||
columnSpan ? `col-span-${columnSpan}` : 'col-span-full',
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<HoverCard openDelay={300}>
|
||||
<HoverCardTrigger className="grid w-full items-center gap-2">
|
||||
<div className="flex justify-between">
|
||||
<Label
|
||||
htmlFor={`${settingKey}-dynamic-setting`}
|
||||
className="text-left text-sm font-medium"
|
||||
>
|
||||
{labelCode ? localize(label ?? '') || label : label ?? settingKey}{' '}
|
||||
{showDefault && (
|
||||
<small className="opacity-40">
|
||||
({localize('com_endpoint_default')}: {defaultValue})
|
||||
</small>
|
||||
)}
|
||||
</Label>
|
||||
<InputNumber
|
||||
id={`${settingKey}-dynamic-setting-input-number`}
|
||||
disabled={readonly}
|
||||
value={inputValue}
|
||||
onChange={setInputValue}
|
||||
min={range?.min}
|
||||
max={range?.max}
|
||||
step={range?.step}
|
||||
placeholder={
|
||||
placeholderCode ? localize(placeholder ?? '') || placeholder : placeholder
|
||||
}
|
||||
controls={false}
|
||||
className={cn(
|
||||
defaultTextProps,
|
||||
cn(
|
||||
optionText,
|
||||
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
|
||||
),
|
||||
inputClassName,
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
</HoverCardTrigger>
|
||||
{description && (
|
||||
<OptionHover
|
||||
description={descriptionCode ? localize(description) || description : description}
|
||||
side={ESide.Left}
|
||||
/>
|
||||
)}
|
||||
</HoverCard>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DynamicInputNumber;
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
export { default as DynamicInputNumber } from './DynamicInputNumber';
|
||||
export { default as DynamicDropdown } from './DynamicDropdown';
|
||||
export { default as DynamicCheckbox } from './DynamicCheckbox';
|
||||
export { default as DynamicTextarea } from './DynamicTextarea';
|
||||
|
|
|
|||
|
|
@ -280,6 +280,9 @@ export default {
|
|||
com_endpoint_tone_style: 'Tone Style',
|
||||
com_endpoint_token_count: 'Token count',
|
||||
com_endpoint_output: 'Output',
|
||||
com_endpoint_context_tokens: 'Max Context Tokens',
|
||||
com_endpoint_context_info: `The maximum number of tokens that can be used for context. Use this for control of how many tokens are sent per request.
|
||||
If unspecified, will use system defaults based on known models' context size. Setting higher values may result in errors and/or higher token cost.`,
|
||||
com_endpoint_google_temp:
|
||||
'Higher values = more random, while lower values = more focused and deterministic. We recommend altering this or Top P but not both.',
|
||||
com_endpoint_google_topp:
|
||||
|
|
@ -305,6 +308,9 @@ export default {
|
|||
com_endpoint_max_output_tokens: 'Max Output Tokens',
|
||||
com_endpoint_stop: 'Stop Sequences',
|
||||
com_endpoint_stop_placeholder: 'Separate values by pressing `Enter`',
|
||||
com_endpoint_openai_max_tokens: `Optional \`max_tokens\` field, representing the maximum number of tokens that can be generated in the chat completion.
|
||||
|
||||
The total length of input tokens and generated tokens is limited by the models context length. You may experience errors if this number exceeds the max context tokens.`,
|
||||
com_endpoint_openai_temp:
|
||||
'Higher values = more random, while lower values = more focused and deterministic. We recommend altering this or Top P but not both.',
|
||||
com_endpoint_openai_max:
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { useGetSearchEnabledQuery } from 'librechat-data-provider/react-query';
|
|||
import type { ContextType } from '~/common';
|
||||
import { useAuthContext, useAssistantsMap, useFileMap } from '~/hooks';
|
||||
import { AssistantsMapContext, FileMapContext } from '~/Providers';
|
||||
import { Nav } from '~/components/Nav';
|
||||
import { Nav, MobileNav } from '~/components/Nav';
|
||||
import store from '~/store';
|
||||
|
||||
export default function Root() {
|
||||
|
|
@ -45,6 +45,7 @@ export default function Root() {
|
|||
<div className="relative z-0 flex h-full w-full overflow-hidden">
|
||||
<Nav navVisible={navVisible} setNavVisible={setNavVisible} />
|
||||
<div className="relative flex h-full max-w-full flex-1 flex-col overflow-hidden">
|
||||
<MobileNav setNavVisible={setNavVisible} />
|
||||
<Outlet context={{ navVisible, setNavVisible } satisfies ContextType} />
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "librechat-data-provider",
|
||||
"version": "0.6.0",
|
||||
"version": "0.6.1",
|
||||
"description": "data services for librechat apps",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.es.js",
|
||||
|
|
|
|||
|
|
@ -72,6 +72,8 @@ export type DynamicSettingProps = Partial<SettingDefinition> & {
|
|||
setOption: TSetOption;
|
||||
conversation: TConversation | TPreset | null;
|
||||
defaultValue?: number | boolean | string | string[];
|
||||
className?: string;
|
||||
inputClassName?: string;
|
||||
};
|
||||
|
||||
const requiredSettingFields = ['key', 'type', 'component'];
|
||||
|
|
@ -508,6 +510,7 @@ export const generateOpenAISchema = (customOpenAI: OpenAISettings) => {
|
|||
frequency_penalty: true,
|
||||
resendFiles: true,
|
||||
imageDetail: true,
|
||||
maxContextTokens: true,
|
||||
})
|
||||
.transform((obj) => ({
|
||||
...obj,
|
||||
|
|
@ -521,6 +524,7 @@ export const generateOpenAISchema = (customOpenAI: OpenAISettings) => {
|
|||
resendFiles:
|
||||
typeof obj.resendFiles === 'boolean' ? obj.resendFiles : defaults.resendFiles.default,
|
||||
imageDetail: obj.imageDetail ?? defaults.imageDetail.default,
|
||||
maxContextTokens: obj.maxContextTokens ?? undefined,
|
||||
}))
|
||||
.catch(() => ({
|
||||
model: defaults.model.default,
|
||||
|
|
@ -532,6 +536,7 @@ export const generateOpenAISchema = (customOpenAI: OpenAISettings) => {
|
|||
frequency_penalty: defaults.frequency_penalty.default,
|
||||
resendFiles: defaults.resendFiles.default,
|
||||
imageDetail: defaults.imageDetail.default,
|
||||
maxContextTokens: undefined,
|
||||
}));
|
||||
};
|
||||
|
||||
|
|
@ -547,6 +552,7 @@ export const generateGoogleSchema = (customGoogle: GoogleSettings) => {
|
|||
maxOutputTokens: true,
|
||||
topP: true,
|
||||
topK: true,
|
||||
maxContextTokens: true,
|
||||
})
|
||||
.transform((obj) => {
|
||||
const isGemini = obj?.model?.toLowerCase()?.includes('gemini');
|
||||
|
|
@ -571,6 +577,7 @@ export const generateGoogleSchema = (customGoogle: GoogleSettings) => {
|
|||
maxOutputTokens,
|
||||
topP: obj.topP ?? defaults.topP.default,
|
||||
topK: obj.topK ?? defaults.topK.default,
|
||||
maxContextTokens: obj.maxContextTokens ?? undefined,
|
||||
};
|
||||
})
|
||||
.catch(() => ({
|
||||
|
|
@ -582,5 +589,6 @@ export const generateGoogleSchema = (customGoogle: GoogleSettings) => {
|
|||
maxOutputTokens: defaults.maxOutputTokens.default,
|
||||
topP: defaults.topP.default,
|
||||
topK: defaults.topK.default,
|
||||
maxContextTokens: undefined,
|
||||
}));
|
||||
};
|
||||
|
|
|
|||
|
|
@ -105,6 +105,12 @@ export const openAISettings = {
|
|||
resendFiles: {
|
||||
default: true,
|
||||
},
|
||||
maxContextTokens: {
|
||||
default: undefined,
|
||||
},
|
||||
max_tokens: {
|
||||
default: undefined,
|
||||
},
|
||||
imageDetail: {
|
||||
default: ImageDetail.auto,
|
||||
},
|
||||
|
|
@ -309,6 +315,8 @@ export const tConversationSchema = z.object({
|
|||
maxOutputTokens: z.number().optional(),
|
||||
agentOptions: tAgentOptionsSchema.nullable().optional(),
|
||||
file_ids: z.array(z.string()).optional(),
|
||||
maxContextTokens: z.number().optional(),
|
||||
max_tokens: z.number().optional(),
|
||||
/** @deprecated */
|
||||
resendImages: z.boolean().optional(),
|
||||
/* vision */
|
||||
|
|
@ -382,6 +390,8 @@ export const openAISchema = tConversationSchema
|
|||
iconURL: true,
|
||||
greeting: true,
|
||||
spec: true,
|
||||
maxContextTokens: true,
|
||||
max_tokens: true,
|
||||
})
|
||||
.transform((obj) => ({
|
||||
...obj,
|
||||
|
|
@ -399,6 +409,8 @@ export const openAISchema = tConversationSchema
|
|||
iconURL: obj.iconURL ?? undefined,
|
||||
greeting: obj.greeting ?? undefined,
|
||||
spec: obj.spec ?? undefined,
|
||||
maxContextTokens: obj.maxContextTokens ?? undefined,
|
||||
max_tokens: obj.max_tokens ?? undefined,
|
||||
}))
|
||||
.catch(() => ({
|
||||
model: openAISettings.model.default,
|
||||
|
|
@ -414,6 +426,8 @@ export const openAISchema = tConversationSchema
|
|||
iconURL: undefined,
|
||||
greeting: undefined,
|
||||
spec: undefined,
|
||||
maxContextTokens: undefined,
|
||||
max_tokens: undefined,
|
||||
}));
|
||||
|
||||
export const googleSchema = tConversationSchema
|
||||
|
|
@ -429,6 +443,7 @@ export const googleSchema = tConversationSchema
|
|||
iconURL: true,
|
||||
greeting: true,
|
||||
spec: true,
|
||||
maxContextTokens: true,
|
||||
})
|
||||
.transform((obj) => {
|
||||
const isGemini = obj?.model?.toLowerCase()?.includes('gemini');
|
||||
|
|
@ -456,6 +471,7 @@ export const googleSchema = tConversationSchema
|
|||
iconURL: obj.iconURL ?? undefined,
|
||||
greeting: obj.greeting ?? undefined,
|
||||
spec: obj.spec ?? undefined,
|
||||
maxContextTokens: obj.maxContextTokens ?? undefined,
|
||||
};
|
||||
})
|
||||
.catch(() => ({
|
||||
|
|
@ -470,6 +486,7 @@ export const googleSchema = tConversationSchema
|
|||
iconURL: undefined,
|
||||
greeting: undefined,
|
||||
spec: undefined,
|
||||
maxContextTokens: undefined,
|
||||
}));
|
||||
|
||||
export const bingAISchema = tConversationSchema
|
||||
|
|
@ -520,6 +537,7 @@ export const anthropicSchema = tConversationSchema
|
|||
iconURL: true,
|
||||
greeting: true,
|
||||
spec: true,
|
||||
maxContextTokens: true,
|
||||
})
|
||||
.transform((obj) => ({
|
||||
...obj,
|
||||
|
|
@ -534,6 +552,7 @@ export const anthropicSchema = tConversationSchema
|
|||
iconURL: obj.iconURL ?? undefined,
|
||||
greeting: obj.greeting ?? undefined,
|
||||
spec: obj.spec ?? undefined,
|
||||
maxContextTokens: obj.maxContextTokens ?? undefined,
|
||||
}))
|
||||
.catch(() => ({
|
||||
model: 'claude-1',
|
||||
|
|
@ -547,6 +566,7 @@ export const anthropicSchema = tConversationSchema
|
|||
iconURL: undefined,
|
||||
greeting: undefined,
|
||||
spec: undefined,
|
||||
maxContextTokens: undefined,
|
||||
}));
|
||||
|
||||
export const chatGPTBrowserSchema = tConversationSchema
|
||||
|
|
@ -576,6 +596,7 @@ export const gptPluginsSchema = tConversationSchema
|
|||
iconURL: true,
|
||||
greeting: true,
|
||||
spec: true,
|
||||
maxContextTokens: true,
|
||||
})
|
||||
.transform((obj) => ({
|
||||
...obj,
|
||||
|
|
@ -596,6 +617,7 @@ export const gptPluginsSchema = tConversationSchema
|
|||
iconURL: obj.iconURL ?? undefined,
|
||||
greeting: obj.greeting ?? undefined,
|
||||
spec: obj.spec ?? undefined,
|
||||
maxContextTokens: obj.maxContextTokens ?? undefined,
|
||||
}))
|
||||
.catch(() => ({
|
||||
model: 'gpt-3.5-turbo',
|
||||
|
|
@ -615,6 +637,7 @@ export const gptPluginsSchema = tConversationSchema
|
|||
iconURL: undefined,
|
||||
greeting: undefined,
|
||||
spec: undefined,
|
||||
maxContextTokens: undefined,
|
||||
}));
|
||||
|
||||
export function removeNullishValues<T extends object>(obj: T): T {
|
||||
|
|
@ -688,6 +711,8 @@ export const compactOpenAISchema = tConversationSchema
|
|||
iconURL: true,
|
||||
greeting: true,
|
||||
spec: true,
|
||||
maxContextTokens: true,
|
||||
max_tokens: true,
|
||||
})
|
||||
.transform((obj: Partial<TConversation>) => {
|
||||
const newObj: Partial<TConversation> = { ...obj };
|
||||
|
|
@ -727,6 +752,7 @@ export const compactGoogleSchema = tConversationSchema
|
|||
iconURL: true,
|
||||
greeting: true,
|
||||
spec: true,
|
||||
maxContextTokens: true,
|
||||
})
|
||||
.transform((obj) => {
|
||||
const newObj: Partial<TConversation> = { ...obj };
|
||||
|
|
@ -760,6 +786,7 @@ export const compactAnthropicSchema = tConversationSchema
|
|||
iconURL: true,
|
||||
greeting: true,
|
||||
spec: true,
|
||||
maxContextTokens: true,
|
||||
})
|
||||
.transform((obj) => {
|
||||
const newObj: Partial<TConversation> = { ...obj };
|
||||
|
|
@ -807,6 +834,7 @@ export const compactPluginsSchema = tConversationSchema
|
|||
iconURL: true,
|
||||
greeting: true,
|
||||
spec: true,
|
||||
maxContextTokens: true,
|
||||
})
|
||||
.transform((obj) => {
|
||||
const newObj: Partial<TConversation> = { ...obj };
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ export type TEndpointOption = {
|
|||
endpointType?: EModelEndpoint;
|
||||
modelDisplayLabel?: string;
|
||||
resendFiles?: boolean;
|
||||
maxContextTokens?: number;
|
||||
imageDetail?: ImageDetail;
|
||||
model?: string | null;
|
||||
promptPrefix?: string;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue