🪦 refactor: Remove Legacy Code (#10533)

* 🗑️ chore: Remove unused Legacy Provider clients and related helpers

* Deleted OpenAIClient and GoogleClient files along with their associated tests.
* Removed references to these clients in the clients index file.
* Cleaned up typedefs by removing the OpenAISpecClient export.
* Updated chat controllers to use the OpenAI SDK directly instead of the removed client classes.

* chore/remove-openapi-specs

* 🗑️ chore: Remove unused mergeSort and misc utility functions

* Deleted mergeSort.js and misc.js files as they are no longer needed.
* Removed references to cleanUpPrimaryKeyValue in messages.js and adjusted related logic.
* Updated mongoMeili.ts to eliminate local implementations of removed functions.

* chore: remove legacy endpoints

* chore: remove all plugins endpoint related code

* chore: remove unused prompt handling code and clean up imports

* Deleted handleInputs.js and instructions.js files as they are no longer needed.
* Removed references to these files in the prompts index.js.
* Updated docker-compose.yml to simplify reverse proxy configuration.

* chore: remove unused LightningIcon import from Icons.tsx

* chore: clean up translation.json by removing deprecated and unused keys

* chore: update Jest configuration and remove unused mock file

    * Simplified the setupFiles array in jest.config.js by removing the fetchEventSource mock.
    * Deleted the fetchEventSource.js mock file as it is no longer needed.

* fix: simplify endpoint type check in Landing and ConversationStarters components

    * Updated the endpoint type check to use strict equality for better clarity and performance.
    * Ensured consistency in the handling of the azureOpenAI endpoint across both components.

* chore: remove unused dependencies from package.json and package-lock.json

* chore: remove legacy EditController, associated routes and imports

* chore: update banResponse logic to refine request handling for banned users

* chore: remove unused validateEndpoint middleware and its references

* chore: remove unused 'res' parameter from initializeClient in multiple endpoint files

* chore: remove unused 'isSmallScreen' prop from BookmarkNav and NewChat components; clean up imports in ArchivedChatsTable and useSetIndexOptions hooks; enhance localization in PromptVersions

* chore: remove unused import of Constants and TMessage from MobileNav; retain only necessary QueryKeys import

* chore: remove unused TResPlugin type and related references; clean up imports in types and schemas
This commit is contained in:
Danny Avila 2025-11-25 15:20:07 -05:00
parent b6dcefc53a
commit 656e1abaea
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
161 changed files with 256 additions and 10513 deletions

View file

@ -8,8 +8,6 @@ const {
ASSISTANTS_API_KEY: assistantsApiKey,
AZURE_API_KEY: azureOpenAIApiKey,
ANTHROPIC_API_KEY: anthropicApiKey,
CHATGPT_TOKEN: chatGPTToken,
PLUGINS_USE_AZURE,
GOOGLE_KEY: googleKey,
OPENAI_REVERSE_PROXY,
AZURE_OPENAI_BASEURL,
@ -17,21 +15,15 @@ const {
AZURE_ASSISTANTS_BASE_URL,
} = process.env ?? {};
const useAzurePlugins = !!PLUGINS_USE_AZURE;
const userProvidedOpenAI = useAzurePlugins
? isUserProvided(azureOpenAIApiKey)
: isUserProvided(openAIApiKey);
const userProvidedOpenAI = isUserProvided(openAIApiKey);
module.exports = {
config: {
googleKey,
openAIApiKey,
azureOpenAIApiKey,
useAzurePlugins,
userProvidedOpenAI,
googleKey,
[EModelEndpoint.anthropic]: generateConfig(anthropicApiKey),
[EModelEndpoint.chatGPTBrowser]: generateConfig(chatGPTToken),
[EModelEndpoint.openAI]: generateConfig(openAIApiKey, OPENAI_REVERSE_PROXY),
[EModelEndpoint.azureOpenAI]: generateConfig(azureOpenAIApiKey, AZURE_OPENAI_BASEURL),
[EModelEndpoint.assistants]: generateConfig(

View file

@ -1,17 +1,11 @@
const path = require('path');
const { logger } = require('@librechat/data-schemas');
const { EModelEndpoint } = require('librechat-data-provider');
const { loadServiceKey, isUserProvided } = require('@librechat/api');
const { config } = require('./EndpointService');
const { openAIApiKey, azureOpenAIApiKey, useAzurePlugins, userProvidedOpenAI, googleKey } = config;
/**
* Load async endpoints and return a configuration object
* @param {AppConfig} [appConfig] - The app configuration object
*/
async function loadAsyncEndpoints(appConfig) {
async function loadAsyncEndpoints() {
let serviceKey, googleUserProvides;
const { googleKey } = config;
/** Check if GOOGLE_KEY is provided at all(including 'user_provided') */
const isGoogleKeyProvided = googleKey && googleKey.trim() !== '';
@ -34,21 +28,7 @@ async function loadAsyncEndpoints(appConfig) {
const google = serviceKey || isGoogleKeyProvided ? { userProvide: googleUserProvides } : false;
const useAzure = !!appConfig?.endpoints?.[EModelEndpoint.azureOpenAI]?.plugins;
const gptPlugins =
useAzure || openAIApiKey || azureOpenAIApiKey
? {
availableAgents: ['classic', 'functions'],
userProvide: useAzure ? false : userProvidedOpenAI,
userProvideURL: useAzure
? false
: config[EModelEndpoint.openAI]?.userProvideURL ||
config[EModelEndpoint.azureOpenAI]?.userProvideURL,
azure: useAzurePlugins || useAzure,
}
: false;
return { google, gptPlugins };
return { google };
}
module.exports = loadAsyncEndpoints;

View file

@ -25,10 +25,6 @@ async function loadConfigModels(req) {
modelsConfig[EModelEndpoint.azureOpenAI] = modelNames;
}
if (modelNames && azureConfig && azureConfig.plugins) {
modelsConfig[EModelEndpoint.gptPlugins] = modelNames;
}
if (azureConfig?.assistants && azureConfig.assistantModels) {
modelsConfig[EModelEndpoint.azureAssistants] = azureConfig.assistantModels;
}

View file

@ -8,8 +8,8 @@ const { config } = require('./EndpointService');
* @returns {Promise<Object.<string, EndpointWithOrder>>} An object whose keys are endpoint names and values are objects that contain the endpoint configuration and an order.
*/
async function loadDefaultEndpointsConfig(appConfig) {
const { google, gptPlugins } = await loadAsyncEndpoints(appConfig);
const { assistants, azureAssistants, azureOpenAI, chatGPTBrowser } = config;
const { google } = await loadAsyncEndpoints(appConfig);
const { assistants, azureAssistants, azureOpenAI } = config;
const enabledEndpoints = getEnabledEndpoints();
@ -20,8 +20,6 @@ async function loadDefaultEndpointsConfig(appConfig) {
[EModelEndpoint.azureAssistants]: azureAssistants,
[EModelEndpoint.azureOpenAI]: azureOpenAI,
[EModelEndpoint.google]: google,
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
[EModelEndpoint.gptPlugins]: gptPlugins,
[EModelEndpoint.anthropic]: config[EModelEndpoint.anthropic],
[EModelEndpoint.bedrock]: config[EModelEndpoint.bedrock],
};