From 89fb9c7e1cea622e13120739ce34b01b02118a70 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Mon, 18 Aug 2025 13:27:56 -0400 Subject: [PATCH] refactor: consolidate endpoint loading logic into loadEndpoints function --- api/server/services/AppService.js | 57 ++-------------------- api/server/services/start/endpoints.js | 66 ++++++++++++++++++++++++++ 2 files changed, 70 insertions(+), 53 deletions(-) create mode 100644 api/server/services/start/endpoints.js diff --git a/api/server/services/AppService.js b/api/server/services/AppService.js index 881f6dc9ca..f06311f76a 100644 --- a/api/server/services/AppService.js +++ b/api/server/services/AppService.js @@ -12,23 +12,21 @@ const { } = require('librechat-data-provider'); const { checkWebSearchConfig, - checkAzureVariables, checkVariables, checkHealth, checkConfig, } = require('./start/checks'); const { ensureDefaultCategories, seedDefaultRoles, initializeRoles } = require('~/models'); -const { azureAssistantsDefaults, assistantsConfigSetup } = require('./start/assistants'); const { setCachedTools, setAppConfig, loadCustomConfig } = require('./Config'); const { initializeAzureBlobService } = require('./Files/Azure/initialize'); const { initializeFirebase } = require('./Files/Firebase/initialize'); const handleRateLimits = require('./Config/handleRateLimits'); const { loadDefaultInterface } = require('./start/interface'); const { loadTurnstileConfig } = require('./start/turnstile'); -const { azureConfigSetup } = require('./start/azureOpenAI'); const { processModelSpecs } = require('./start/modelSpecs'); const { initializeS3 } = require('./Files/S3/initialize'); const { loadAndFormatTools } = require('./ToolService'); +const { loadEndpoints } = require('./start/endpoints'); const paths = require('~/config/paths'); /** @@ -121,61 +119,14 @@ const AppService = async () => { checkConfig(config); handleRateLimits(config?.rateLimits); - - const endpointLocals = {}; - const endpoints = config?.endpoints; - - if (endpoints?.[EModelEndpoint.azureOpenAI]) { - endpointLocals[EModelEndpoint.azureOpenAI] = azureConfigSetup(config); - checkAzureVariables(); - } - - if (endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) { - endpointLocals[EModelEndpoint.azureAssistants] = azureAssistantsDefaults(); - } - - if (endpoints?.[EModelEndpoint.azureAssistants]) { - endpointLocals[EModelEndpoint.azureAssistants] = assistantsConfigSetup( - config, - EModelEndpoint.azureAssistants, - endpointLocals[EModelEndpoint.azureAssistants], - ); - } - - if (endpoints?.[EModelEndpoint.assistants]) { - endpointLocals[EModelEndpoint.assistants] = assistantsConfigSetup( - config, - EModelEndpoint.assistants, - endpointLocals[EModelEndpoint.assistants], - ); - } - - endpointLocals[EModelEndpoint.agents] = agentsConfigSetup(config, agentsDefaults); - - const endpointKeys = [ - EModelEndpoint.openAI, - EModelEndpoint.google, - EModelEndpoint.bedrock, - EModelEndpoint.anthropic, - EModelEndpoint.gptPlugins, - ]; - - endpointKeys.forEach((key) => { - if (endpoints?.[key]) { - endpointLocals[key] = endpoints[key]; - } - }); - - if (endpoints?.all) { - endpointLocals.all = endpoints.all; - } + const loadedEndpoints = loadEndpoints(config, agentsDefaults); const appConfig = { ...defaultConfig, fileConfig: config?.fileConfig, secureImageLinks: config?.secureImageLinks, - modelSpecs: processModelSpecs(endpoints, config.modelSpecs, interfaceConfig), - ...endpointLocals, + modelSpecs: processModelSpecs(config?.endpoints, config.modelSpecs, interfaceConfig), + ...loadedEndpoints, }; await setAppConfig(appConfig); diff --git a/api/server/services/start/endpoints.js b/api/server/services/start/endpoints.js new file mode 100644 index 0000000000..4aaf53305f --- /dev/null +++ b/api/server/services/start/endpoints.js @@ -0,0 +1,66 @@ +const { agentsConfigSetup } = require('@librechat/api'); +const { EModelEndpoint } = require('librechat-data-provider'); +const { azureAssistantsDefaults, assistantsConfigSetup } = require('./assistants'); +const { azureConfigSetup } = require('./azureOpenAI'); +const { checkAzureVariables } = require('./checks'); + +/** + * Loads custom config endpoints + * @param {TCustomConfig} [config] + * @param {TCustomConfig['endpoints']['agents']} [agentsDefaults] + */ +const loadEndpoints = (config, agentsDefaults) => { + const loadedEndpoints = {}; + const endpoints = config?.endpoints; + + if (endpoints?.[EModelEndpoint.azureOpenAI]) { + loadedEndpoints[EModelEndpoint.azureOpenAI] = azureConfigSetup(config); + checkAzureVariables(); + } + + if (endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) { + loadedEndpoints[EModelEndpoint.azureAssistants] = azureAssistantsDefaults(); + } + + if (endpoints?.[EModelEndpoint.azureAssistants]) { + loadedEndpoints[EModelEndpoint.azureAssistants] = assistantsConfigSetup( + config, + EModelEndpoint.azureAssistants, + loadedEndpoints[EModelEndpoint.azureAssistants], + ); + } + + if (endpoints?.[EModelEndpoint.assistants]) { + loadedEndpoints[EModelEndpoint.assistants] = assistantsConfigSetup( + config, + EModelEndpoint.assistants, + loadedEndpoints[EModelEndpoint.assistants], + ); + } + + loadedEndpoints[EModelEndpoint.agents] = agentsConfigSetup(config, agentsDefaults); + + const endpointKeys = [ + EModelEndpoint.openAI, + EModelEndpoint.google, + EModelEndpoint.bedrock, + EModelEndpoint.anthropic, + EModelEndpoint.gptPlugins, + ]; + + endpointKeys.forEach((key) => { + if (endpoints?.[key]) { + loadedEndpoints[key] = endpoints[key]; + } + }); + + if (endpoints?.all) { + loadedEndpoints.all = endpoints.all; + } + + return loadedEndpoints; +}; + +module.exports = { + loadEndpoints, +};