diff --git a/api/app/clients/AnthropicClient.js b/api/app/clients/AnthropicClient.js index 662db65e4a..483427e46e 100644 --- a/api/app/clients/AnthropicClient.js +++ b/api/app/clients/AnthropicClient.js @@ -83,11 +83,13 @@ class AnthropicClient extends BaseClient { this.options = options; } - const modelOptions = this.options.modelOptions || {}; - this.modelOptions = { - ...modelOptions, - model: modelOptions.model || anthropicSettings.model.default, - }; + this.modelOptions = Object.assign( + { + model: anthropicSettings.model.default, + }, + this.modelOptions, + this.options.modelOptions, + ); const modelMatch = matchModelName(this.modelOptions.model, EModelEndpoint.anthropic); this.isClaude3 = modelMatch.startsWith('claude-3'); diff --git a/api/app/clients/GoogleClient.js b/api/app/clients/GoogleClient.js index 2dd921c3ce..0eeb4127d0 100644 --- a/api/app/clients/GoogleClient.js +++ b/api/app/clients/GoogleClient.js @@ -120,19 +120,7 @@ class GoogleClient extends BaseClient { .filter((ex) => ex) .filter((obj) => obj.input.content !== '' && obj.output.content !== ''); - const modelOptions = this.options.modelOptions || {}; - this.modelOptions = { - ...modelOptions, - // set some good defaults (check for undefined in some cases because they may be 0) - model: modelOptions.model || settings.model.default, - temperature: - typeof modelOptions.temperature === 'undefined' - ? settings.temperature.default - : modelOptions.temperature, - topP: typeof modelOptions.topP === 'undefined' ? settings.topP.default : modelOptions.topP, - topK: typeof modelOptions.topK === 'undefined' ? settings.topK.default : modelOptions.topK, - // stop: modelOptions.stop // no stop method for now - }; + this.modelOptions = this.options.modelOptions || {}; this.options.attachments?.then((attachments) => this.checkVisionRequest(attachments)); @@ -808,7 +796,7 @@ class GoogleClient extends BaseClient { }); reply = titleResponse.content; - + // TODO: RECORD TOKEN USAGE return reply; } } diff --git a/api/app/clients/OpenAIClient.js b/api/app/clients/OpenAIClient.js index a7f97e5a3c..efb09a6ed3 100644 --- a/api/app/clients/OpenAIClient.js +++ b/api/app/clients/OpenAIClient.js @@ -6,6 +6,7 @@ const { ImageDetail, EModelEndpoint, resolveHeaders, + openAISettings, ImageDetailCost, CohereConstants, getResponseSender, @@ -85,26 +86,13 @@ class OpenAIClient extends BaseClient { this.apiKey = this.options.openaiApiKey; } - const modelOptions = this.options.modelOptions || {}; - - if (!this.modelOptions) { - this.modelOptions = { - ...modelOptions, - model: modelOptions.model || 'gpt-3.5-turbo', - temperature: - typeof modelOptions.temperature === 'undefined' ? 0.8 : modelOptions.temperature, - top_p: typeof modelOptions.top_p === 'undefined' ? 1 : modelOptions.top_p, - presence_penalty: - typeof modelOptions.presence_penalty === 'undefined' ? 1 : modelOptions.presence_penalty, - stop: modelOptions.stop, - }; - } else { - // Update the modelOptions if it already exists - this.modelOptions = { - ...this.modelOptions, - ...modelOptions, - }; - } + this.modelOptions = Object.assign( + { + model: openAISettings.model.default, + }, + this.modelOptions, + this.options.modelOptions, + ); this.defaultVisionModel = this.options.visionModel ?? 'gpt-4-vision-preview'; if (typeof this.options.attachments?.then === 'function') { diff --git a/api/server/middleware/buildEndpointOption.js b/api/server/middleware/buildEndpointOption.js index ddaaa35a32..376daa2ac4 100644 --- a/api/server/middleware/buildEndpointOption.js +++ b/api/server/middleware/buildEndpointOption.js @@ -1,4 +1,4 @@ -const { parseConvo, EModelEndpoint } = require('librechat-data-provider'); +const { parseCompactConvo, EModelEndpoint } = require('librechat-data-provider'); const { getModelsConfig } = require('~/server/controllers/ModelController'); const azureAssistants = require('~/server/services/Endpoints/azureAssistants'); const assistants = require('~/server/services/Endpoints/assistants'); @@ -24,7 +24,7 @@ const buildFunction = { async function buildEndpointOption(req, res, next) { const { endpoint, endpointType } = req.body; - const parsedBody = parseConvo({ endpoint, endpointType, conversation: req.body }); + const parsedBody = parseCompactConvo({ endpoint, endpointType, conversation: req.body }); if (req.app.locals.modelSpecs?.list && req.app.locals.modelSpecs?.enforce) { /** @type {{ list: TModelSpec[] }}*/ diff --git a/api/server/services/Endpoints/anthropic/addTitle.js b/api/server/services/Endpoints/anthropic/addTitle.js index 290183f1ea..b69c04de68 100644 --- a/api/server/services/Endpoints/anthropic/addTitle.js +++ b/api/server/services/Endpoints/anthropic/addTitle.js @@ -21,7 +21,11 @@ const addTitle = async (req, { text, response, client }) => { const titleCache = getLogStores(CacheKeys.GEN_TITLE); const key = `${req.user.id}-${response.conversationId}`; - const title = await client.titleConvo({ text, responseText: response?.text }); + const title = await client.titleConvo({ + text, + responseText: response?.text, + conversationId: response.conversationId, + }); await titleCache.set(key, title, 120000); await saveConvo( req, diff --git a/api/server/services/Endpoints/anthropic/buildOptions.js b/api/server/services/Endpoints/anthropic/buildOptions.js index 677eabc6ae..ea667be2db 100644 --- a/api/server/services/Endpoints/anthropic/buildOptions.js +++ b/api/server/services/Endpoints/anthropic/buildOptions.js @@ -1,15 +1,18 @@ +const { removeNullishValues } = require('librechat-data-provider'); + const buildOptions = (endpoint, parsedBody) => { const { modelLabel, promptPrefix, maxContextTokens, - resendFiles, + resendFiles = true, iconURL, greeting, spec, - ...rest + ...modelOptions } = parsedBody; - const endpointOption = { + + const endpointOption = removeNullishValues({ endpoint, modelLabel, promptPrefix, @@ -18,10 +21,8 @@ const buildOptions = (endpoint, parsedBody) => { greeting, spec, maxContextTokens, - modelOptions: { - ...rest, - }, - }; + modelOptions, + }); return endpointOption; }; diff --git a/api/server/services/Endpoints/assistants/buildOptions.js b/api/server/services/Endpoints/assistants/buildOptions.js index 047663c4e5..49d8771ff9 100644 --- a/api/server/services/Endpoints/assistants/buildOptions.js +++ b/api/server/services/Endpoints/assistants/buildOptions.js @@ -1,17 +1,17 @@ +const { removeNullishValues } = require('librechat-data-provider'); + const buildOptions = (endpoint, parsedBody) => { // eslint-disable-next-line no-unused-vars - const { promptPrefix, assistant_id, iconURL, greeting, spec, ...rest } = parsedBody; - const endpointOption = { + const { promptPrefix, assistant_id, iconURL, greeting, spec, ...modelOptions } = parsedBody; + const endpointOption = removeNullishValues({ endpoint, promptPrefix, assistant_id, iconURL, greeting, spec, - modelOptions: { - ...rest, - }, - }; + modelOptions, + }); return endpointOption; }; diff --git a/api/server/services/Endpoints/azureAssistants/buildOptions.js b/api/server/services/Endpoints/azureAssistants/buildOptions.js index 047663c4e5..49d8771ff9 100644 --- a/api/server/services/Endpoints/azureAssistants/buildOptions.js +++ b/api/server/services/Endpoints/azureAssistants/buildOptions.js @@ -1,17 +1,17 @@ +const { removeNullishValues } = require('librechat-data-provider'); + const buildOptions = (endpoint, parsedBody) => { // eslint-disable-next-line no-unused-vars - const { promptPrefix, assistant_id, iconURL, greeting, spec, ...rest } = parsedBody; - const endpointOption = { + const { promptPrefix, assistant_id, iconURL, greeting, spec, ...modelOptions } = parsedBody; + const endpointOption = removeNullishValues({ endpoint, promptPrefix, assistant_id, iconURL, greeting, spec, - modelOptions: { - ...rest, - }, - }; + modelOptions, + }); return endpointOption; }; diff --git a/api/server/services/Endpoints/custom/buildOptions.js b/api/server/services/Endpoints/custom/buildOptions.js index 83f8d387dd..bfec2983de 100644 --- a/api/server/services/Endpoints/custom/buildOptions.js +++ b/api/server/services/Endpoints/custom/buildOptions.js @@ -1,16 +1,18 @@ +const { removeNullishValues } = require('librechat-data-provider'); + const buildOptions = (endpoint, parsedBody, endpointType) => { const { chatGptLabel, promptPrefix, maxContextTokens, - resendFiles, + resendFiles = true, imageDetail, iconURL, greeting, spec, - ...rest + ...modelOptions } = parsedBody; - const endpointOption = { + const endpointOption = removeNullishValues({ endpoint, endpointType, chatGptLabel, @@ -21,10 +23,8 @@ const buildOptions = (endpoint, parsedBody, endpointType) => { greeting, spec, maxContextTokens, - modelOptions: { - ...rest, - }, - }; + modelOptions, + }); return endpointOption; }; diff --git a/api/server/services/Endpoints/google/addTitle.js b/api/server/services/Endpoints/google/addTitle.js index c6eabd6036..14eafe841d 100644 --- a/api/server/services/Endpoints/google/addTitle.js +++ b/api/server/services/Endpoints/google/addTitle.js @@ -47,7 +47,11 @@ const addTitle = async (req, { text, response, client }) => { const titleCache = getLogStores(CacheKeys.GEN_TITLE); const key = `${req.user.id}-${response.conversationId}`; - const title = await titleClient.titleConvo({ text, responseText: response?.text }); + const title = await titleClient.titleConvo({ + text, + responseText: response?.text, + conversationId: response.conversationId, + }); await titleCache.set(key, title, 120000); await saveConvo( req, diff --git a/api/server/services/Endpoints/google/buildOptions.js b/api/server/services/Endpoints/google/buildOptions.js index 0d26e23c33..11fb1a64fe 100644 --- a/api/server/services/Endpoints/google/buildOptions.js +++ b/api/server/services/Endpoints/google/buildOptions.js @@ -1,17 +1,27 @@ +const { removeNullishValues } = require('librechat-data-provider'); + const buildOptions = (endpoint, parsedBody) => { - const { examples, modelLabel, promptPrefix, iconURL, greeting, spec, ...rest } = parsedBody; - const endpointOption = { + const { examples, - endpoint, modelLabel, + resendFiles = true, promptPrefix, iconURL, greeting, spec, - modelOptions: { - ...rest, - }, - }; + ...modelOptions + } = parsedBody; + const endpointOption = removeNullishValues({ + examples, + endpoint, + modelLabel, + resendFiles, + promptPrefix, + iconURL, + greeting, + spec, + modelOptions, + }); return endpointOption; }; diff --git a/api/server/services/Endpoints/gptPlugins/buildOptions.js b/api/server/services/Endpoints/gptPlugins/buildOptions.js index ec098e9e3b..642feda089 100644 --- a/api/server/services/Endpoints/gptPlugins/buildOptions.js +++ b/api/server/services/Endpoints/gptPlugins/buildOptions.js @@ -1,3 +1,5 @@ +const { removeNullishValues } = require('librechat-data-provider'); + const buildOptions = (endpoint, parsedBody) => { const { chatGptLabel, @@ -10,7 +12,7 @@ const buildOptions = (endpoint, parsedBody) => { maxContextTokens, ...modelOptions } = parsedBody; - const endpointOption = { + const endpointOption = removeNullishValues({ endpoint, tools: tools @@ -24,7 +26,7 @@ const buildOptions = (endpoint, parsedBody) => { spec, maxContextTokens, modelOptions, - }; + }); return endpointOption; }; diff --git a/api/server/services/Endpoints/openAI/addTitle.js b/api/server/services/Endpoints/openAI/addTitle.js index 968c747ff1..af886dd22d 100644 --- a/api/server/services/Endpoints/openAI/addTitle.js +++ b/api/server/services/Endpoints/openAI/addTitle.js @@ -21,7 +21,11 @@ const addTitle = async (req, { text, response, client }) => { const titleCache = getLogStores(CacheKeys.GEN_TITLE); const key = `${req.user.id}-${response.conversationId}`; - const title = await client.titleConvo({ text, responseText: response?.text }); + const title = await client.titleConvo({ + text, + responseText: response?.text, + conversationId: response.conversationId, + }); await titleCache.set(key, title, 120000); await saveConvo( req, diff --git a/api/server/services/Endpoints/openAI/buildOptions.js b/api/server/services/Endpoints/openAI/buildOptions.js index 1a6ebea4b6..f4ee0c5808 100644 --- a/api/server/services/Endpoints/openAI/buildOptions.js +++ b/api/server/services/Endpoints/openAI/buildOptions.js @@ -1,16 +1,18 @@ +const { removeNullishValues } = require('librechat-data-provider'); + const buildOptions = (endpoint, parsedBody) => { const { chatGptLabel, promptPrefix, maxContextTokens, - resendFiles, + resendFiles = true, imageDetail, iconURL, greeting, spec, - ...rest + ...modelOptions } = parsedBody; - const endpointOption = { + const endpointOption = removeNullishValues({ endpoint, chatGptLabel, promptPrefix, @@ -20,10 +22,8 @@ const buildOptions = (endpoint, parsedBody) => { greeting, spec, maxContextTokens, - modelOptions: { - ...rest, - }, - }; + modelOptions, + }); return endpointOption; }; diff --git a/client/src/components/Chat/Messages/Content/Markdown.tsx b/client/src/components/Chat/Messages/Content/Markdown.tsx index bc6dc5741e..07c64d6443 100644 --- a/client/src/components/Chat/Messages/Content/Markdown.tsx +++ b/client/src/components/Chat/Messages/Content/Markdown.tsx @@ -144,7 +144,7 @@ const Markdown = memo(({ content = '', isEdited, showCursor, isLatestMessage }: return (

- +

); diff --git a/client/src/components/Endpoints/Settings/OpenAI.tsx b/client/src/components/Endpoints/Settings/OpenAI.tsx index 0670e2d723..a95e7823c6 100644 --- a/client/src/components/Endpoints/Settings/OpenAI.tsx +++ b/client/src/components/Endpoints/Settings/OpenAI.tsx @@ -1,10 +1,10 @@ import { useMemo } from 'react'; import TextareaAutosize from 'react-textarea-autosize'; import { + openAISettings, EModelEndpoint, - ImageDetail, - imageDetailNumeric, imageDetailValue, + imageDetailNumeric, } from 'librechat-data-provider'; import type { TModelSelectProps, OnInputNumberChange } from '~/common'; import { @@ -240,7 +240,12 @@ export default function Settings({ conversation, setOption, models, readonly }: setTemperature(value[0])} - doubleClickHandler={() => setTemperature(1)} - max={2} - min={0} - step={0.01} + doubleClickHandler={() => setTemperature(openAISettings.temperature.default)} + max={openAISettings.temperature.max} + min={openAISettings.temperature.min} + step={openAISettings.temperature.step} className="flex h-4 w-full" /> @@ -280,16 +285,18 @@ export default function Settings({ conversation, setOption, models, readonly }:
setTopP(Number(value))} - max={1} - min={0} - step={0.01} + max={openAISettings.top_p.max} + min={openAISettings.top_p.min} + step={openAISettings.top_p.step} controls={false} className={cn( defaultTextProps, @@ -302,12 +309,12 @@ export default function Settings({ conversation, setOption, models, readonly }:
setTopP(value[0])} - doubleClickHandler={() => setTopP(1)} - max={1} - min={0} - step={0.01} + doubleClickHandler={() => setTopP(openAISettings.top_p.default)} + max={openAISettings.top_p.max} + min={openAISettings.top_p.min} + step={openAISettings.top_p.step} className="flex h-4 w-full" /> @@ -319,16 +326,23 @@ export default function Settings({ conversation, setOption, models, readonly }:
setFreqP(Number(value))} - max={2} - min={-2} - step={0.01} + max={openAISettings.frequency_penalty.max} + min={openAISettings.frequency_penalty.min} + step={openAISettings.frequency_penalty.step} controls={false} className={cn( defaultTextProps, @@ -341,12 +355,12 @@ export default function Settings({ conversation, setOption, models, readonly }:
setFreqP(value[0])} - doubleClickHandler={() => setFreqP(0)} - max={2} - min={-2} - step={0.01} + doubleClickHandler={() => setFreqP(openAISettings.frequency_penalty.default)} + max={openAISettings.frequency_penalty.max} + min={openAISettings.frequency_penalty.min} + step={openAISettings.frequency_penalty.step} className="flex h-4 w-full" /> @@ -358,16 +372,23 @@ export default function Settings({ conversation, setOption, models, readonly }:
setPresP(Number(value))} - max={2} - min={-2} - step={0.01} + max={openAISettings.presence_penalty.max} + min={openAISettings.presence_penalty.min} + step={openAISettings.presence_penalty.step} controls={false} className={cn( defaultTextProps, @@ -380,12 +401,12 @@ export default function Settings({ conversation, setOption, models, readonly }:
setPresP(value[0])} - doubleClickHandler={() => setPresP(0)} - max={2} - min={-2} - step={0.01} + doubleClickHandler={() => setPresP(openAISettings.presence_penalty.default)} + max={openAISettings.presence_penalty.max} + min={openAISettings.presence_penalty.min} + step={openAISettings.presence_penalty.step} className="flex h-4 w-full" /> @@ -408,7 +429,7 @@ export default function Settings({ conversation, setOption, models, readonly }: setResendFiles(checked)} disabled={readonly} className="flex" @@ -436,13 +457,14 @@ export default function Settings({ conversation, setOption, models, readonly }: id="image-detail-slider" disabled={readonly} value={[ - imageDetailNumeric[imageDetail ?? ''] ?? imageDetailNumeric[ImageDetail.auto], + imageDetailNumeric[imageDetail ?? ''] ?? + imageDetailNumeric[openAISettings.imageDetail.default], ]} onValueChange={(value) => setImageDetail(imageDetailValue[value[0]])} - doubleClickHandler={() => setImageDetail(ImageDetail.auto)} - max={2} - min={0} - step={1} + doubleClickHandler={() => setImageDetail(openAISettings.imageDetail.default)} + max={openAISettings.imageDetail.max} + min={openAISettings.imageDetail.min} + step={openAISettings.imageDetail.step} /> diff --git a/package-lock.json b/package-lock.json index 7f873295a7..7110c93f78 100644 --- a/package-lock.json +++ b/package-lock.json @@ -31493,7 +31493,7 @@ }, "packages/data-provider": { "name": "librechat-data-provider", - "version": "0.7.412", + "version": "0.7.413", "license": "ISC", "dependencies": { "@types/js-yaml": "^4.0.9", diff --git a/packages/data-provider/package.json b/packages/data-provider/package.json index 43495cffae..986c8e8f20 100644 --- a/packages/data-provider/package.json +++ b/packages/data-provider/package.json @@ -1,6 +1,6 @@ { "name": "librechat-data-provider", - "version": "0.7.412", + "version": "0.7.413", "description": "data services for librechat apps", "main": "dist/index.js", "module": "dist/index.es.js", diff --git a/packages/data-provider/src/schemas.ts b/packages/data-provider/src/schemas.ts index fc871a1808..76aac7e046 100644 --- a/packages/data-provider/src/schemas.ts +++ b/packages/data-provider/src/schemas.ts @@ -82,7 +82,7 @@ export const ImageVisionTool: FunctionTool = { }; export const isImageVisionTool = (tool: FunctionTool | FunctionToolCall) => - tool.type === 'function' && tool.function?.name === ImageVisionTool?.function?.name; + tool.type === 'function' && tool.function?.name === ImageVisionTool.function?.name; export const openAISettings = { model: { @@ -123,6 +123,9 @@ export const openAISettings = { }, imageDetail: { default: ImageDetail.auto, + min: 0, + max: 2, + step: 1, }, }; @@ -840,22 +843,22 @@ export const compactOpenAISchema = tConversationSchema }) .transform((obj: Partial) => { const newObj: Partial = { ...obj }; - if (newObj.temperature === 1) { + if (newObj.temperature === openAISettings.temperature.default) { delete newObj.temperature; } - if (newObj.top_p === 1) { + if (newObj.top_p === openAISettings.top_p.default) { delete newObj.top_p; } - if (newObj.presence_penalty === 0) { + if (newObj.presence_penalty === openAISettings.presence_penalty.default) { delete newObj.presence_penalty; } - if (newObj.frequency_penalty === 0) { + if (newObj.frequency_penalty === openAISettings.frequency_penalty.default) { delete newObj.frequency_penalty; } - if (newObj.resendFiles === true) { + if (newObj.resendFiles === openAISettings.resendFiles.default) { delete newObj.resendFiles; } - if (newObj.imageDetail === ImageDetail.auto) { + if (newObj.imageDetail === openAISettings.imageDetail.default) { delete newObj.imageDetail; }