From c8baceac76d35c60c7aad617830add86f342aa5f Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Fri, 3 May 2024 12:49:26 -0400 Subject: [PATCH] =?UTF-8?q?=F0=9F=90=9B=20fix:=20Prevent=20Empty=20File=20?= =?UTF-8?q?Uploads=20&=20Assistants=20Fixes=20(#2611)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update default models for openai/assistants * fix: allows assistants models fetching * change default models order, ensure assistant_id is defined if intended * fix: prevent empty files from being uploaded --- api/server/services/Files/process.js | 4 ++++ api/server/services/ModelService.js | 10 +++++----- client/src/hooks/Files/useFileHandling.ts | 4 ++++ client/src/utils/buildDefaultConvo.ts | 5 +++++ packages/data-provider/package.json | 2 +- packages/data-provider/src/config.ts | 6 +++++- 6 files changed, 24 insertions(+), 7 deletions(-) diff --git a/api/server/services/Files/process.js b/api/server/services/Files/process.js index efbcd63bff..7f91d481ae 100644 --- a/api/server/services/Files/process.js +++ b/api/server/services/Files/process.js @@ -514,6 +514,10 @@ function filterFile({ req, file, image }) { throw new Error('No file_id provided'); } + if (file.size === 0) { + throw new Error('Empty file uploaded'); + } + /* parse to validate api call, throws error on fail */ isUUID.parse(file_id); diff --git a/api/server/services/ModelService.js b/api/server/services/ModelService.js index 2145953bbe..540e7240a4 100644 --- a/api/server/services/ModelService.js +++ b/api/server/services/ModelService.js @@ -141,6 +141,7 @@ const fetchModels = async ({ * @param {object} opts - The options for fetching the models. * @param {string} opts.user - The user ID to send to the API. * @param {boolean} [opts.azure=false] - Whether to fetch models from Azure. + * @param {boolean} [opts.assistants=false] - Whether to fetch models from Azure. * @param {boolean} [opts.plugins=false] - Whether to fetch models from the plugins. * @param {string[]} [_models=[]] - The models to use as a fallback. */ @@ -150,7 +151,10 @@ const fetchOpenAIModels = async (opts, _models = []) => { const openaiBaseURL = 'https://api.openai.com/v1'; let baseURL = openaiBaseURL; let reverseProxyUrl = process.env.OPENAI_REVERSE_PROXY; - if (opts.azure) { + + if (opts.assistants && process.env.ASSISTANTS_BASE_URL) { + reverseProxyUrl = process.env.ASSISTANTS_BASE_URL; + } else if (opts.azure) { return models; // const azure = getAzureCredentials(); // baseURL = (genAzureChatCompletion(azure)) @@ -245,10 +249,6 @@ const getOpenAIModels = async (opts) => { return models; } - if (opts.assistants) { - return models; - } - return await fetchOpenAIModels(opts, models); }; diff --git a/client/src/hooks/Files/useFileHandling.ts b/client/src/hooks/Files/useFileHandling.ts index b1187bdd08..ea7a745c9e 100644 --- a/client/src/hooks/Files/useFileHandling.ts +++ b/client/src/hooks/Files/useFileHandling.ts @@ -163,6 +163,10 @@ const useFileHandling = (params?: UseFileHandling) => { const validateFiles = (fileList: File[]) => { const existingFiles = Array.from(files.values()); const incomingTotalSize = fileList.reduce((total, file) => total + file.size, 0); + if (incomingTotalSize === 0) { + setError('Empty files are not allowed.'); + return false; + } const currentTotalSize = existingFiles.reduce((total, file) => total + file.size, 0); if (fileList.length + files.size > fileLimit) { diff --git a/client/src/utils/buildDefaultConvo.ts b/client/src/utils/buildDefaultConvo.ts index 93edcbf8d2..c8b00b18f9 100644 --- a/client/src/utils/buildDefaultConvo.ts +++ b/client/src/utils/buildDefaultConvo.ts @@ -64,6 +64,11 @@ const buildDefaultConvo = ({ endpoint, }; + // Ensures assistant_id is always defined + if (endpoint === EModelEndpoint.assistants && !defaultConvo.assistant_id && convo.assistant_id) { + defaultConvo.assistant_id = convo.assistant_id; + } + defaultConvo.tools = lastConversationSetup?.tools ?? lastSelectedTools ?? defaultConvo.tools; defaultConvo.jailbreak = jailbreak ?? defaultConvo.jailbreak; defaultConvo.toneStyle = toneStyle ?? defaultConvo.toneStyle; diff --git a/packages/data-provider/package.json b/packages/data-provider/package.json index 2bf5b6b9e7..8a1be2f304 100644 --- a/packages/data-provider/package.json +++ b/packages/data-provider/package.json @@ -1,6 +1,6 @@ { "name": "librechat-data-provider", - "version": "0.5.9", + "version": "0.6.0", "description": "data services for librechat apps", "main": "dist/index.js", "module": "dist/index.es.js", diff --git a/packages/data-provider/src/config.ts b/packages/data-provider/src/config.ts index cba900353e..f7884f889e 100644 --- a/packages/data-provider/src/config.ts +++ b/packages/data-provider/src/config.ts @@ -345,14 +345,16 @@ export const alternateName = { export const defaultModels = { [EModelEndpoint.assistants]: [ + 'gpt-3.5-turbo', 'gpt-3.5-turbo-0125', + 'gpt-4-turbo', + 'gpt-4-turbo-2024-04-09', 'gpt-4-0125-preview', 'gpt-4-turbo-preview', 'gpt-4-1106-preview', 'gpt-3.5-turbo-1106', 'gpt-3.5-turbo-16k-0613', 'gpt-3.5-turbo-16k', - 'gpt-3.5-turbo', 'gpt-4', 'gpt-4-0314', 'gpt-4-32k-0314', @@ -387,6 +389,8 @@ export const defaultModels = { ], [EModelEndpoint.openAI]: [ 'gpt-3.5-turbo-0125', + 'gpt-4-turbo', + 'gpt-4-turbo-2024-04-09', 'gpt-3.5-turbo-16k-0613', 'gpt-3.5-turbo-16k', 'gpt-4-turbo-preview',