🪦 refactor: Remove Legacy Code (#10533)

* 🗑️ chore: Remove unused Legacy Provider clients and related helpers

* Deleted OpenAIClient and GoogleClient files along with their associated tests.
* Removed references to these clients in the clients index file.
* Cleaned up typedefs by removing the OpenAISpecClient export.
* Updated chat controllers to use the OpenAI SDK directly instead of the removed client classes.

* chore/remove-openapi-specs

* 🗑️ chore: Remove unused mergeSort and misc utility functions

* Deleted mergeSort.js and misc.js files as they are no longer needed.
* Removed references to cleanUpPrimaryKeyValue in messages.js and adjusted related logic.
* Updated mongoMeili.ts to eliminate local implementations of removed functions.

* chore: remove legacy endpoints

* chore: remove all plugins endpoint related code

* chore: remove unused prompt handling code and clean up imports

* Deleted handleInputs.js and instructions.js files as they are no longer needed.
* Removed references to these files in the prompts index.js.
* Updated docker-compose.yml to simplify reverse proxy configuration.

* chore: remove unused LightningIcon import from Icons.tsx

* chore: clean up translation.json by removing deprecated and unused keys

* chore: update Jest configuration and remove unused mock file

    * Simplified the setupFiles array in jest.config.js by removing the fetchEventSource mock.
    * Deleted the fetchEventSource.js mock file as it is no longer needed.

* fix: simplify endpoint type check in Landing and ConversationStarters components

    * Updated the endpoint type check to use strict equality for better clarity and performance.
    * Ensured consistency in the handling of the azureOpenAI endpoint across both components.

* chore: remove unused dependencies from package.json and package-lock.json

* chore: remove legacy EditController, associated routes and imports

* chore: update banResponse logic to refine request handling for banned users

* chore: remove unused validateEndpoint middleware and its references

* chore: remove unused 'res' parameter from initializeClient in multiple endpoint files

* chore: remove unused 'isSmallScreen' prop from BookmarkNav and NewChat components; clean up imports in ArchivedChatsTable and useSetIndexOptions hooks; enhance localization in PromptVersions

* chore: remove unused import of Constants and TMessage from MobileNav; retain only necessary QueryKeys import

* chore: remove unused TResPlugin type and related references; clean up imports in types and schemas
This commit is contained in:
Danny Avila 2025-11-25 15:20:07 -05:00
parent b6dcefc53a
commit 656e1abaea
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
161 changed files with 256 additions and 10513 deletions

View file

@ -860,7 +860,6 @@ export const configSchema = z.object({
[EModelEndpoint.openAI]: baseEndpointSchema.optional(),
[EModelEndpoint.google]: baseEndpointSchema.optional(),
[EModelEndpoint.anthropic]: baseEndpointSchema.optional(),
[EModelEndpoint.gptPlugins]: baseEndpointSchema.optional(),
[EModelEndpoint.azureOpenAI]: azureEndpointSchema.optional(),
[EModelEndpoint.azureAssistants]: assistantEndpointSchema.optional(),
[EModelEndpoint.assistants]: assistantEndpointSchema.optional(),
@ -936,8 +935,6 @@ export const defaultEndpoints: EModelEndpoint[] = [
EModelEndpoint.azureAssistants,
EModelEndpoint.azureOpenAI,
EModelEndpoint.agents,
EModelEndpoint.chatGPTBrowser,
EModelEndpoint.gptPlugins,
EModelEndpoint.google,
EModelEndpoint.anthropic,
EModelEndpoint.custom,
@ -950,8 +947,6 @@ export const alternateName = {
[EModelEndpoint.agents]: 'My Agents',
[EModelEndpoint.azureAssistants]: 'Azure Assistants',
[EModelEndpoint.azureOpenAI]: 'Azure OpenAI',
[EModelEndpoint.chatGPTBrowser]: 'ChatGPT',
[EModelEndpoint.gptPlugins]: 'Plugins',
[EModelEndpoint.google]: 'Google',
[EModelEndpoint.anthropic]: 'Anthropic',
[EModelEndpoint.custom]: 'Custom',
@ -1098,9 +1093,7 @@ export const initialModelsConfig: TModelsConfig = {
[EModelEndpoint.openAI]: openAIModels,
[EModelEndpoint.assistants]: openAIModels.filter(fitlerAssistantModels),
[EModelEndpoint.agents]: openAIModels, // TODO: Add agent models (agentsModels)
[EModelEndpoint.gptPlugins]: openAIModels,
[EModelEndpoint.azureOpenAI]: openAIModels,
[EModelEndpoint.chatGPTBrowser]: ['text-davinci-002-render-sha'],
[EModelEndpoint.google]: defaultModels[EModelEndpoint.google],
[EModelEndpoint.anthropic]: defaultModels[EModelEndpoint.anthropic],
[EModelEndpoint.bedrock]: defaultModels[EModelEndpoint.bedrock],
@ -1113,7 +1106,6 @@ export const EndpointURLs = {
} as const;
export const modularEndpoints = new Set<EModelEndpoint | string>([
EModelEndpoint.gptPlugins,
EModelEndpoint.anthropic,
EModelEndpoint.google,
EModelEndpoint.openAI,
@ -1127,7 +1119,6 @@ export const supportsBalanceCheck = {
[EModelEndpoint.custom]: true,
[EModelEndpoint.openAI]: true,
[EModelEndpoint.anthropic]: true,
[EModelEndpoint.gptPlugins]: true,
[EModelEndpoint.assistants]: true,
[EModelEndpoint.agents]: true,
[EModelEndpoint.azureAssistants]: true,
@ -1243,10 +1234,6 @@ export enum CacheKeys {
* Key for the roles cache.
*/
ROLES = 'ROLES',
/**
* Key for the plugins cache.
*/
PLUGINS = 'PLUGINS',
/**
* Key for the title generation cache.
*/

View file

@ -10,11 +10,9 @@ import {
EModelEndpoint,
anthropicSchema,
assistantSchema,
gptPluginsSchema,
// agentsSchema,
compactAgentsSchema,
compactGoogleSchema,
compactPluginsSchema,
compactAssistantSchema,
} from './schemas';
import { bedrockInputSchema } from './bedrock';
@ -24,12 +22,11 @@ type EndpointSchema =
| typeof openAISchema
| typeof googleSchema
| typeof anthropicSchema
| typeof gptPluginsSchema
| typeof assistantSchema
| typeof compactAgentsSchema
| typeof bedrockInputSchema;
export type EndpointSchemaKey = Exclude<EModelEndpoint, EModelEndpoint.chatGPTBrowser>;
export type EndpointSchemaKey = EModelEndpoint;
const endpointSchemas: Record<EndpointSchemaKey, EndpointSchema> = {
[EModelEndpoint.openAI]: openAISchema,
@ -37,7 +34,6 @@ const endpointSchemas: Record<EndpointSchemaKey, EndpointSchema> = {
[EModelEndpoint.custom]: openAISchema,
[EModelEndpoint.google]: googleSchema,
[EModelEndpoint.anthropic]: anthropicSchema,
[EModelEndpoint.gptPlugins]: gptPluginsSchema,
[EModelEndpoint.assistants]: assistantSchema,
[EModelEndpoint.azureAssistants]: assistantSchema,
[EModelEndpoint.agents]: compactAgentsSchema,
@ -57,8 +53,6 @@ export function getEnabledEndpoints() {
EModelEndpoint.azureAssistants,
EModelEndpoint.azureOpenAI,
EModelEndpoint.google,
EModelEndpoint.chatGPTBrowser,
EModelEndpoint.gptPlugins,
EModelEndpoint.anthropic,
EModelEndpoint.bedrock,
];
@ -143,7 +137,6 @@ export function getNonEmptyValue(possibleValues: string[]) {
export type TPossibleValues = {
models: string[];
secondaryModels?: string[];
};
export const parseConvo = ({
@ -172,16 +165,12 @@ export const parseConvo = ({
// }
const convo = schema?.parse(conversation) as s.TConversation | undefined;
const { models, secondaryModels } = possibleValues ?? {};
const { models } = possibleValues ?? {};
if (models && convo) {
convo.model = getFirstDefinedValue(models) ?? convo.model;
}
if (secondaryModels && convo?.agentOptions) {
convo.agentOptions.model = getFirstDefinedValue(secondaryModels) ?? convo.agentOptions.model;
}
return convo;
};
@ -225,13 +214,7 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
const chatGptLabel = _cgl ?? '';
const modelLabel = _ml ?? '';
if (
[
EModelEndpoint.openAI,
EModelEndpoint.bedrock,
EModelEndpoint.gptPlugins,
EModelEndpoint.azureOpenAI,
EModelEndpoint.chatGPTBrowser,
].includes(endpoint)
[EModelEndpoint.openAI, EModelEndpoint.bedrock, EModelEndpoint.azureOpenAI].includes(endpoint)
) {
if (chatGptLabel) {
return chatGptLabel;
@ -247,7 +230,7 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
const gptVersion = extractGPTVersion(model);
return gptVersion || 'GPT';
}
return (alternateName[endpoint] as string | undefined) ?? 'ChatGPT';
return (alternateName[endpoint] as string | undefined) ?? 'AI';
}
if (endpoint === EModelEndpoint.anthropic) {
@ -298,8 +281,7 @@ type CompactEndpointSchema =
| typeof compactAgentsSchema
| typeof compactGoogleSchema
| typeof anthropicSchema
| typeof bedrockInputSchema
| typeof compactPluginsSchema;
| typeof bedrockInputSchema;
const compactEndpointSchemas: Record<EndpointSchemaKey, CompactEndpointSchema> = {
[EModelEndpoint.openAI]: openAISchema,
@ -311,7 +293,6 @@ const compactEndpointSchemas: Record<EndpointSchemaKey, CompactEndpointSchema> =
[EModelEndpoint.google]: compactGoogleSchema,
[EModelEndpoint.bedrock]: bedrockInputSchema,
[EModelEndpoint.anthropic]: anthropicSchema,
[EModelEndpoint.gptPlugins]: compactPluginsSchema,
};
export const parseCompactConvo = ({
@ -348,17 +329,12 @@ export const parseCompactConvo = ({
const { iconURL: _clientIconURL, ...conversationWithoutIconURL } = conversation;
const convo = schema.parse(conversationWithoutIconURL) as s.TConversation | null;
// const { models, secondaryModels } = possibleValues ?? {};
const { models } = possibleValues ?? {};
if (models && convo) {
convo.model = getFirstDefinedValue(models) ?? convo.model;
}
// if (secondaryModels && convo.agentOptions) {
// convo.agentOptionmodel = getFirstDefinedValue(secondaryModels) ?? convo.agentOptionmodel;
// }
return convo;
};

View file

@ -25,10 +25,6 @@ export enum EModelEndpoint {
agents = 'agents',
custom = 'custom',
bedrock = 'bedrock',
/** @deprecated */
chatGPTBrowser = 'chatGPTBrowser',
/** @deprecated */
gptPlugins = 'gptPlugins',
}
/** Mirrors `@librechat/agents` providers */
@ -529,16 +525,6 @@ export type TInput = {
inputStr: string;
};
export type TResPlugin = {
plugin: string;
input: string;
thought: string;
loading?: boolean;
outputs?: string;
latest?: string;
inputs?: TInput[];
};
export const tExampleSchema = z.object({
input: z.object({
content: z.string(),
@ -550,39 +536,6 @@ export const tExampleSchema = z.object({
export type TExample = z.infer<typeof tExampleSchema>;
export enum EAgent {
functions = 'functions',
classic = 'classic',
}
export const agentOptionSettings = {
model: {
default: 'gpt-4o-mini',
},
temperature: {
min: 0,
max: 1,
step: 0.01,
default: 0,
},
agent: {
default: EAgent.functions,
options: [EAgent.functions, EAgent.classic],
},
skipCompletion: {
default: true,
},
};
export const eAgentOptionsSchema = z.nativeEnum(EAgent);
export const tAgentOptionsSchema = z.object({
agent: z.string().default(EAgent.functions),
skipCompletion: z.boolean().default(agentOptionSettings.skipCompletion.default),
model: z.string(),
temperature: z.number().default(agentOptionSettings.temperature.default),
});
export const tMessageSchema = z.object({
messageId: z.string(),
endpoint: z.string().optional(),
@ -659,8 +612,6 @@ export type TAttachment =
export type TMessage = z.input<typeof tMessageSchema> & {
children?: TMessage[];
plugin?: TResPlugin | null;
plugins?: TResPlugin[];
content?: TMessageContentParts[];
files?: Partial<TFile>[];
depth?: number;
@ -775,8 +726,6 @@ export const tConversationSchema = z.object({
fileTokenLimit: coerceNumber.optional(),
/** @deprecated */
resendImages: z.boolean().optional(),
/** @deprecated */
agentOptions: tAgentOptionsSchema.nullable().optional(),
/** @deprecated Prefer `modelLabel` over `chatGptLabel` */
chatGptLabel: z.string().nullable().optional(),
});
@ -982,75 +931,6 @@ export const googleGenConfigSchema = z
.strip()
.optional();
const gptPluginsBaseSchema = tConversationSchema.pick({
model: true,
modelLabel: true,
chatGptLabel: true,
promptPrefix: true,
temperature: true,
artifacts: true,
top_p: true,
presence_penalty: true,
frequency_penalty: true,
tools: true,
agentOptions: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
});
export const gptPluginsSchema = gptPluginsBaseSchema
.transform((obj) => {
const result = {
...obj,
model: obj.model ?? 'gpt-3.5-turbo',
chatGptLabel: obj.chatGptLabel ?? obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? 0.8,
top_p: obj.top_p ?? 1,
presence_penalty: obj.presence_penalty ?? 0,
frequency_penalty: obj.frequency_penalty ?? 0,
tools: obj.tools ?? [],
agentOptions: obj.agentOptions ?? {
agent: EAgent.functions,
skipCompletion: true,
model: 'gpt-3.5-turbo',
temperature: 0,
},
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? undefined,
};
if (obj.modelLabel != null && obj.modelLabel !== '') {
result.modelLabel = null;
}
return result;
})
.catch(() => ({
model: 'gpt-3.5-turbo',
chatGptLabel: null,
promptPrefix: null,
temperature: 0.8,
top_p: 1,
presence_penalty: 0,
frequency_penalty: 0,
tools: [],
agentOptions: {
agent: EAgent.functions,
skipCompletion: true,
model: 'gpt-3.5-turbo',
temperature: 0,
},
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: undefined,
}));
export function removeNullishValues<T extends Record<string, unknown>>(
obj: T,
removeEmptyStrings?: boolean,
@ -1251,48 +1131,6 @@ export const anthropicSchema = anthropicBaseSchema
.transform((obj) => removeNullishValues(obj))
.catch(() => ({}));
export const compactPluginsSchema = gptPluginsBaseSchema
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
if (newObj.modelLabel === null) {
delete newObj.modelLabel;
}
if (newObj.chatGptLabel === null) {
delete newObj.chatGptLabel;
}
if (newObj.promptPrefix === null) {
delete newObj.promptPrefix;
}
if (newObj.temperature === 0.8) {
delete newObj.temperature;
}
if (newObj.top_p === 1) {
delete newObj.top_p;
}
if (newObj.presence_penalty === 0) {
delete newObj.presence_penalty;
}
if (newObj.frequency_penalty === 0) {
delete newObj.frequency_penalty;
}
if (newObj.tools?.length === 0) {
delete newObj.tools;
}
if (
newObj.agentOptions &&
newObj.agentOptions.agent === EAgent.functions &&
newObj.agentOptions.skipCompletion === true &&
newObj.agentOptions.model === 'gpt-3.5-turbo' &&
newObj.agentOptions.temperature === 0
) {
delete newObj.agentOptions;
}
return removeNullishValues(newObj);
})
.catch(() => ({}));
export const tBannerSchema = z.object({
bannerId: z.string(),
message: z.string(),

View file

@ -1,13 +1,12 @@
import type { InfiniteData } from '@tanstack/react-query';
import type {
TBanner,
TMessage,
TResPlugin,
TSharedLink,
TConversation,
EModelEndpoint,
TConversationTag,
EModelEndpoint,
TConversation,
TSharedLink,
TAttachment,
TMessage,
TBanner,
} from './schemas';
import type { SettingDefinition } from './generate';
import type { TMinimalFeedback } from './feedback';
@ -125,8 +124,6 @@ export type TEditedContent =
};
export type TSubmission = {
plugin?: TResPlugin;
plugins?: TResPlugin[];
userMessage: TMessage;
isEdited?: boolean;
isContinued?: boolean;