From 0e05ff484fe5a55af36ca4cad803ae15413478fd Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Tue, 28 Oct 2025 09:36:03 -0400 Subject: [PATCH] =?UTF-8?q?=F0=9F=94=84=20refactor:=20OAI=20Image=20Edit?= =?UTF-8?q?=20Proxy,=20Speech=20Settings=20Handling,=20Import=20Query=20Da?= =?UTF-8?q?ta=20Usage=20(#10281)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: correct startupConfig usage in ImportConversations component * refactor: properly process configured speechToText and textToSpeech settings in getCustomConfigSpeech * refactor: proxy configuration by utilizing HttpsProxyAgent for OpenAI Image Edits --- .../tools/structured/OpenAIImageTools.js | 12 ++-------- .../Files/Audio/getCustomConfigSpeech.js | 24 ++++++++++++------- .../SettingsTabs/Data/ImportConversations.tsx | 11 ++++----- 3 files changed, 23 insertions(+), 24 deletions(-) diff --git a/api/app/clients/tools/structured/OpenAIImageTools.js b/api/app/clients/tools/structured/OpenAIImageTools.js index 9a2a047bb1..35eeb32ffe 100644 --- a/api/app/clients/tools/structured/OpenAIImageTools.js +++ b/api/app/clients/tools/structured/OpenAIImageTools.js @@ -5,6 +5,7 @@ const FormData = require('form-data'); const { ProxyAgent } = require('undici'); const { tool } = require('@langchain/core/tools'); const { logger } = require('@librechat/data-schemas'); +const { HttpsProxyAgent } = require('https-proxy-agent'); const { logAxiosError, oaiToolkit } = require('@librechat/api'); const { ContentTypes, EImageOutputType } = require('librechat-data-provider'); const { getStrategyFunctions } = require('~/server/services/Files/strategies'); @@ -348,16 +349,7 @@ Error Message: ${error.message}`); }; if (process.env.PROXY) { - try { - const url = new URL(process.env.PROXY); - axiosConfig.proxy = { - host: url.hostname.replace(/^\[|\]$/g, ''), - port: url.port ? parseInt(url.port, 10) : undefined, - protocol: url.protocol.replace(':', ''), - }; - } catch (error) { - logger.error('Error parsing proxy URL:', error); - } + axiosConfig.httpsAgent = new HttpsProxyAgent(process.env.PROXY); } if (process.env.IMAGE_GEN_OAI_AZURE_API_VERSION && process.env.IMAGE_GEN_OAI_BASEURL) { diff --git a/api/server/services/Files/Audio/getCustomConfigSpeech.js b/api/server/services/Files/Audio/getCustomConfigSpeech.js index b4bc8f704f..d0d0b51ac2 100644 --- a/api/server/services/Files/Audio/getCustomConfigSpeech.js +++ b/api/server/services/Files/Audio/getCustomConfigSpeech.js @@ -42,18 +42,26 @@ async function getCustomConfigSpeech(req, res) { settings.advancedMode = speechTab.advancedMode; } - if (speechTab.speechToText) { - for (const key in speechTab.speechToText) { - if (speechTab.speechToText[key] !== undefined) { - settings[key] = speechTab.speechToText[key]; + if (speechTab.speechToText !== undefined) { + if (typeof speechTab.speechToText === 'boolean') { + settings.speechToText = speechTab.speechToText; + } else { + for (const key in speechTab.speechToText) { + if (speechTab.speechToText[key] !== undefined) { + settings[key] = speechTab.speechToText[key]; + } } } } - if (speechTab.textToSpeech) { - for (const key in speechTab.textToSpeech) { - if (speechTab.textToSpeech[key] !== undefined) { - settings[key] = speechTab.textToSpeech[key]; + if (speechTab.textToSpeech !== undefined) { + if (typeof speechTab.textToSpeech === 'boolean') { + settings.textToSpeech = speechTab.textToSpeech; + } else { + for (const key in speechTab.textToSpeech) { + if (speechTab.textToSpeech[key] !== undefined) { + settings[key] = speechTab.textToSpeech[key]; + } } } } diff --git a/client/src/components/Nav/SettingsTabs/Data/ImportConversations.tsx b/client/src/components/Nav/SettingsTabs/Data/ImportConversations.tsx index 2d06b74392..816c5a2deb 100644 --- a/client/src/components/Nav/SettingsTabs/Data/ImportConversations.tsx +++ b/client/src/components/Nav/SettingsTabs/Data/ImportConversations.tsx @@ -9,12 +9,10 @@ import { useLocalize } from '~/hooks'; import { cn, logger } from '~/utils'; function ImportConversations() { - const queryClient = useQueryClient(); - const startupConfig = queryClient.getQueryData([QueryKeys.startupConfig]); const localize = useLocalize(); - const fileInputRef = useRef(null); + const queryClient = useQueryClient(); const { showToast } = useToastContext(); - + const fileInputRef = useRef(null); const [isUploading, setIsUploading] = useState(false); const handleSuccess = useCallback(() => { @@ -53,7 +51,8 @@ function ImportConversations() { const handleFileUpload = useCallback( async (file: File) => { try { - const maxFileSize = (startupConfig as any)?.conversationImportMaxFileSize; + const startupConfig = queryClient.getQueryData([QueryKeys.startupConfig]); + const maxFileSize = startupConfig?.conversationImportMaxFileSize; if (maxFileSize && file.size > maxFileSize) { const size = (maxFileSize / (1024 * 1024)).toFixed(2); showToast({ @@ -76,7 +75,7 @@ function ImportConversations() { }); } }, - [uploadFile, showToast, localize, startupConfig], + [uploadFile, showToast, localize, queryClient], ); const handleFileChange = useCallback(