From 5eef6ea9e80fbd1232d24973a1616bb6401943d6 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Mon, 18 Aug 2025 16:58:05 -0400 Subject: [PATCH] refactor: implement custom endpoints configuration and streamline endpoint loading logic --- .../services/Config/getEndpointsConfig.js | 26 +++++-- .../services/Config/loadAsyncEndpoints.js | 10 ++- .../services/Config/loadConfigEndpoints.js | 71 ------------------- .../services/Config/loadDefaultEConfig.js | 6 +- api/server/services/start/endpoints.js | 2 +- packages/api/src/endpoints/custom/config.ts | 56 +++++++++++++++ packages/api/src/endpoints/custom/index.ts | 1 + packages/api/src/endpoints/index.ts | 1 + packages/api/src/types/config.ts | 5 +- packages/api/src/types/endpoints.ts | 3 + packages/api/src/types/index.ts | 1 + packages/data-provider/src/config.ts | 6 +- 12 files changed, 100 insertions(+), 88 deletions(-) delete mode 100644 api/server/services/Config/loadConfigEndpoints.js create mode 100644 packages/api/src/endpoints/custom/config.ts create mode 100644 packages/api/src/endpoints/custom/index.ts create mode 100644 packages/api/src/types/endpoints.ts diff --git a/api/server/services/Config/getEndpointsConfig.js b/api/server/services/Config/getEndpointsConfig.js index c08ceb8a7d..8c6ff1f5fd 100644 --- a/api/server/services/Config/getEndpointsConfig.js +++ b/api/server/services/Config/getEndpointsConfig.js @@ -1,3 +1,4 @@ +const { loadCustomEndpointsConfig } = require('@librechat/api'); const { CacheKeys, EModelEndpoint, @@ -6,7 +7,6 @@ const { defaultAgentCapabilities, } = require('librechat-data-provider'); const loadDefaultEndpointsConfig = require('./loadDefaultEConfig'); -const loadConfigEndpoints = require('./loadConfigEndpoints'); const getLogStores = require('~/cache/getLogStores'); const { getAppConfig } = require('./app'); @@ -22,12 +22,30 @@ async function getEndpointsConfig(req) { return cachedEndpointsConfig; } - const defaultEndpointsConfig = await loadDefaultEndpointsConfig(req); - const customConfigEndpoints = await loadConfigEndpoints(req); const appConfig = await getAppConfig({ role: req.user?.role }); + const defaultEndpointsConfig = await loadDefaultEndpointsConfig(appConfig); + const customEndpointsConfig = loadCustomEndpointsConfig(appConfig?.endpoints?.custom); /** @type {TEndpointsConfig} */ - const mergedConfig = { ...defaultEndpointsConfig, ...customConfigEndpoints }; + const mergedConfig = { + ...defaultEndpointsConfig, + ...customEndpointsConfig, + }; + + if (appConfig.endpoints?.[EModelEndpoint.azureOpenAI]) { + /** @type {Omit} */ + mergedConfig[EModelEndpoint.azureOpenAI] = { + userProvide: false, + }; + } + + if (appConfig.endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) { + /** @type {Omit} */ + mergedConfig[EModelEndpoint.azureAssistants] = { + userProvide: false, + }; + } + if ( mergedConfig[EModelEndpoint.assistants] && appConfig?.endpoints?.[EModelEndpoint.assistants] diff --git a/api/server/services/Config/loadAsyncEndpoints.js b/api/server/services/Config/loadAsyncEndpoints.js index 69958c2c61..48b42131e0 100644 --- a/api/server/services/Config/loadAsyncEndpoints.js +++ b/api/server/services/Config/loadAsyncEndpoints.js @@ -1,18 +1,16 @@ const path = require('path'); const { logger } = require('@librechat/data-schemas'); -const { loadServiceKey, isUserProvided } = require('@librechat/api'); const { EModelEndpoint } = require('librechat-data-provider'); +const { loadServiceKey, isUserProvided } = require('@librechat/api'); const { config } = require('./EndpointService'); -const { getAppConfig } = require('./app'); const { openAIApiKey, azureOpenAIApiKey, useAzurePlugins, userProvidedOpenAI, googleKey } = config; /** * Load async endpoints and return a configuration object - * @param {Express.Request} req - The request object + * @param {AppConfig} [appConfig] - The app configuration object */ -async function loadAsyncEndpoints(req) { - const appConfig = await getAppConfig({ role: req.user?.role }); +async function loadAsyncEndpoints(appConfig) { let serviceKey, googleUserProvides; /** Check if GOOGLE_KEY is provided at all(including 'user_provided') */ @@ -36,7 +34,7 @@ async function loadAsyncEndpoints(req) { const google = serviceKey || isGoogleKeyProvided ? { userProvide: googleUserProvides } : false; - const useAzure = appConfig.endpoints?.[EModelEndpoint.azureOpenAI]?.plugins; + const useAzure = !!appConfig?.endpoints?.[EModelEndpoint.azureOpenAI]?.plugins; const gptPlugins = useAzure || openAIApiKey || azureOpenAIApiKey ? { diff --git a/api/server/services/Config/loadConfigEndpoints.js b/api/server/services/Config/loadConfigEndpoints.js deleted file mode 100644 index b69ebec25b..0000000000 --- a/api/server/services/Config/loadConfigEndpoints.js +++ /dev/null @@ -1,71 +0,0 @@ -const { isUserProvided, normalizeEndpointName } = require('@librechat/api'); -const { EModelEndpoint, extractEnvVariable } = require('librechat-data-provider'); -const { getAppConfig } = require('./app'); - -/** - * Load config endpoints from the cached configuration object - * @param {Express.Request} req - The request object - * @returns {Promise} A promise that resolves to an object containing the endpoints configuration - */ -async function loadConfigEndpoints(req) { - const appConfig = await getAppConfig({ role: req.user?.role }); - if (!appConfig) { - return {}; - } - - const endpointsConfig = {}; - - if (Array.isArray(appConfig.endpoints?.[EModelEndpoint.custom])) { - const customEndpoints = appConfig.endpoints[EModelEndpoint.custom].filter( - (endpoint) => - endpoint.baseURL && - endpoint.apiKey && - endpoint.name && - endpoint.models && - (endpoint.models.fetch || endpoint.models.default), - ); - - for (let i = 0; i < customEndpoints.length; i++) { - const endpoint = customEndpoints[i]; - const { - baseURL, - apiKey, - name: configName, - iconURL, - modelDisplayLabel, - customParams, - } = endpoint; - const name = normalizeEndpointName(configName); - - const resolvedApiKey = extractEnvVariable(apiKey); - const resolvedBaseURL = extractEnvVariable(baseURL); - - endpointsConfig[name] = { - type: EModelEndpoint.custom, - userProvide: isUserProvided(resolvedApiKey), - userProvideURL: isUserProvided(resolvedBaseURL), - modelDisplayLabel, - iconURL, - customParams, - }; - } - } - - if (appConfig.endpoints?.[EModelEndpoint.azureOpenAI]) { - /** @type {Omit} */ - endpointsConfig[EModelEndpoint.azureOpenAI] = { - userProvide: false, - }; - } - - if (appConfig.endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) { - /** @type {Omit} */ - endpointsConfig[EModelEndpoint.azureAssistants] = { - userProvide: false, - }; - } - - return endpointsConfig; -} - -module.exports = loadConfigEndpoints; diff --git a/api/server/services/Config/loadDefaultEConfig.js b/api/server/services/Config/loadDefaultEConfig.js index a9602bac2d..f3c12a4933 100644 --- a/api/server/services/Config/loadDefaultEConfig.js +++ b/api/server/services/Config/loadDefaultEConfig.js @@ -4,11 +4,11 @@ const { config } = require('./EndpointService'); /** * Load async endpoints and return a configuration object - * @param {Express.Request} req - The request object + * @param {AppConfig} appConfig - The app configuration object * @returns {Promise>} An object whose keys are endpoint names and values are objects that contain the endpoint configuration and an order. */ -async function loadDefaultEndpointsConfig(req) { - const { google, gptPlugins } = await loadAsyncEndpoints(req); +async function loadDefaultEndpointsConfig(appConfig) { + const { google, gptPlugins } = await loadAsyncEndpoints(appConfig); const { assistants, azureAssistants, azureOpenAI, chatGPTBrowser } = config; const enabledEndpoints = getEnabledEndpoints(); diff --git a/api/server/services/start/endpoints.js b/api/server/services/start/endpoints.js index 4aaf53305f..c37aafd2b6 100644 --- a/api/server/services/start/endpoints.js +++ b/api/server/services/start/endpoints.js @@ -43,9 +43,9 @@ const loadEndpoints = (config, agentsDefaults) => { const endpointKeys = [ EModelEndpoint.openAI, EModelEndpoint.google, + EModelEndpoint.custom, EModelEndpoint.bedrock, EModelEndpoint.anthropic, - EModelEndpoint.gptPlugins, ]; endpointKeys.forEach((key) => { diff --git a/packages/api/src/endpoints/custom/config.ts b/packages/api/src/endpoints/custom/config.ts new file mode 100644 index 0000000000..220eb43509 --- /dev/null +++ b/packages/api/src/endpoints/custom/config.ts @@ -0,0 +1,56 @@ +import { EModelEndpoint, extractEnvVariable } from 'librechat-data-provider'; +import type { TCustomEndpoints, TEndpoint, TConfig } from 'librechat-data-provider'; +import type { TCustomEndpointsConfig } from '~/types/endpoints'; +import { isUserProvided, normalizeEndpointName } from '~/utils'; + +/** + * Load config endpoints from the cached configuration object + * @param customEndpointsConfig - The configuration object + */ +export function loadCustomEndpointsConfig( + customEndpoints?: TCustomEndpoints, +): TCustomEndpointsConfig | undefined { + if (!customEndpoints) { + return; + } + + const customEndpointsConfig: TCustomEndpointsConfig = {}; + + if (Array.isArray(customEndpoints)) { + const filteredEndpoints = customEndpoints.filter( + (endpoint) => + endpoint.baseURL && + endpoint.apiKey && + endpoint.name && + endpoint.models && + (endpoint.models.fetch || endpoint.models.default), + ); + + for (let i = 0; i < filteredEndpoints.length; i++) { + const endpoint = filteredEndpoints[i] as TEndpoint; + const { + baseURL, + apiKey, + name: configName, + iconURL, + modelDisplayLabel, + customParams, + } = endpoint; + const name = normalizeEndpointName(configName); + + const resolvedApiKey = extractEnvVariable(apiKey ?? ''); + const resolvedBaseURL = extractEnvVariable(baseURL ?? ''); + + customEndpointsConfig[name] = { + type: EModelEndpoint.custom, + userProvide: isUserProvided(resolvedApiKey), + userProvideURL: isUserProvided(resolvedBaseURL), + customParams: customParams as TConfig['customParams'], + modelDisplayLabel, + iconURL, + }; + } + } + + return customEndpointsConfig; +} diff --git a/packages/api/src/endpoints/custom/index.ts b/packages/api/src/endpoints/custom/index.ts new file mode 100644 index 0000000000..f03c2281a9 --- /dev/null +++ b/packages/api/src/endpoints/custom/index.ts @@ -0,0 +1 @@ +export * from './config'; diff --git a/packages/api/src/endpoints/index.ts b/packages/api/src/endpoints/index.ts index e12780d876..7b98ffcb6b 100644 --- a/packages/api/src/endpoints/index.ts +++ b/packages/api/src/endpoints/index.ts @@ -1,2 +1,3 @@ +export * from './custom'; export * from './google'; export * from './openai'; diff --git a/packages/api/src/types/config.ts b/packages/api/src/types/config.ts index ea21052ba6..7089327bb7 100644 --- a/packages/api/src/types/config.ts +++ b/packages/api/src/types/config.ts @@ -6,6 +6,7 @@ import type { TMemoryConfig, EModelEndpoint, TAgentsEndpoint, + TCustomEndpoints, TAssistantEndpoint, } from 'librechat-data-provider'; @@ -78,9 +79,9 @@ export interface AppConfig { azureAssistants?: TAssistantEndpoint; /** Agents endpoint configuration */ [EModelEndpoint.agents]?: TAgentsEndpoint; + /** Custom endpoints configuration */ + [EModelEndpoint.custom]?: TCustomEndpoints; /** Global endpoint configuration */ all?: TEndpoint; - /** Any additional endpoint configurations */ - [key: string]: unknown; }; } diff --git a/packages/api/src/types/endpoints.ts b/packages/api/src/types/endpoints.ts new file mode 100644 index 0000000000..c16691518e --- /dev/null +++ b/packages/api/src/types/endpoints.ts @@ -0,0 +1,3 @@ +import type { TConfig } from 'librechat-data-provider'; + +export type TCustomEndpointsConfig = Partial<{ [key: string]: Omit }>; diff --git a/packages/api/src/types/index.ts b/packages/api/src/types/index.ts index 11cac41332..052e20ed22 100644 --- a/packages/api/src/types/index.ts +++ b/packages/api/src/types/index.ts @@ -1,6 +1,7 @@ export * from './config'; export * from './azure'; export * from './balance'; +export * from './endpoints'; export * from './events'; export * from './error'; export * from './google'; diff --git a/packages/data-provider/src/config.ts b/packages/data-provider/src/config.ts index 0aa7c93bd4..610e4e3489 100644 --- a/packages/data-provider/src/config.ts +++ b/packages/data-provider/src/config.ts @@ -300,6 +300,7 @@ export const endpointSchema = baseEndpointSchema.merge( }), summarize: z.boolean().optional(), summaryModel: z.string().optional(), + iconURL: z.string().optional(), forcePrompt: z.boolean().optional(), modelDisplayLabel: z.string().optional(), headers: z.record(z.any()).optional(), @@ -789,6 +790,8 @@ export const memorySchema = z.object({ export type TMemoryConfig = z.infer; +const customEndpointsSchema = z.array(endpointSchema.partial()).optional(); + export const configSchema = z.object({ version: z.string(), cache: z.boolean().default(true), @@ -837,7 +840,7 @@ export const configSchema = z.object({ [EModelEndpoint.azureAssistants]: assistantEndpointSchema.optional(), [EModelEndpoint.assistants]: assistantEndpointSchema.optional(), [EModelEndpoint.agents]: agentsEndpointSchema.optional(), - [EModelEndpoint.custom]: z.array(endpointSchema.partial()).optional(), + [EModelEndpoint.custom]: customEndpointsSchema.optional(), [EModelEndpoint.bedrock]: baseEndpointSchema.optional(), }) .strict() @@ -850,6 +853,7 @@ export const configSchema = z.object({ export const getConfigDefaults = () => getSchemaDefaults(configSchema); export type TCustomConfig = z.infer; +export type TCustomEndpoints = z.infer; export type TProviderSchema = | z.infer