mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-02-13 21:14:24 +01:00
🧵 refactor: Migrate Endpoint Initialization to TypeScript (#10794)
* refactor: move endpoint initialization methods to typescript * refactor: move agent init to packages/api - Introduced `initialize.ts` for agent initialization, including file processing and tool loading. - Updated `resources.ts` to allow optional appConfig parameter. - Enhanced endpoint configuration handling in various initialization files to support model parameters. - Added new artifacts and prompts for React component generation. - Refactored existing code to improve type safety and maintainability. * refactor: streamline endpoint initialization and enhance type safety - Updated initialization functions across various endpoints to use a consistent request structure, replacing `unknown` types with `ServerResponse`. - Simplified request handling by directly extracting keys from the request body. - Improved type safety by ensuring user IDs are safely accessed with optional chaining. - Removed unnecessary parameters and streamlined model options handling for better clarity and maintainability. * refactor: moved ModelService and extractBaseURL to packages/api - Added comprehensive tests for the models fetching functionality, covering scenarios for OpenAI, Anthropic, Google, and Ollama models. - Updated existing endpoint index to include the new models module. - Enhanced utility functions for URL extraction and model data processing. - Improved type safety and error handling across the models fetching logic. * refactor: consolidate utility functions and remove unused files - Merged `deriveBaseURL` and `extractBaseURL` into the `@librechat/api` module for better organization. - Removed redundant utility files and their associated tests to streamline the codebase. - Updated imports across various client files to utilize the new consolidated functions. - Enhanced overall maintainability by reducing the number of utility modules. * refactor: replace ModelService references with direct imports from @librechat/api and remove ModelService file * refactor: move encrypt/decrypt methods and key db methods to data-schemas, use `getProviderConfig` from `@librechat/api` * chore: remove unused 'res' from options in AgentClient * refactor: file model imports and methods - Updated imports in various controllers and services to use the unified file model from '~/models' instead of '~/models/File'. - Consolidated file-related methods into a new file methods module in the data-schemas package. - Added comprehensive tests for file methods including creation, retrieval, updating, and deletion. - Enhanced the initializeAgent function to accept dependency injection for file-related methods. - Improved error handling and logging in file methods. * refactor: streamline database method references in agent initialization * refactor: enhance file method tests and update type references to IMongoFile * refactor: consolidate database method imports in agent client and initialization * chore: remove redundant import of initializeAgent from @librechat/api * refactor: move checkUserKeyExpiry utility to @librechat/api and update references across endpoints * refactor: move updateUserPlugins logic to user.ts and simplify UserController * refactor: update imports for user key management and remove UserService * refactor: remove unused Anthropics and Bedrock endpoint files and clean up imports * refactor: consolidate and update encryption imports across various files to use @librechat/data-schemas * chore: update file model mock to use unified import from '~/models' * chore: import order * refactor: remove migrated to TS agent.js file and its associated logic from the endpoints * chore: add reusable function to extract imports from source code in unused-packages workflow * chore: enhance unused-packages workflow to include @librechat/api dependencies and improve dependency extraction * chore: improve dependency extraction in unused-packages workflow with enhanced error handling and debugging output * chore: add detailed debugging output to unused-packages workflow for better visibility into unused dependencies and exclusion lists * chore: refine subpath handling in unused-packages workflow to correctly process scoped and non-scoped package imports * chore: clean up unused debug output in unused-packages workflow and reorganize type imports in initialize.ts
This commit is contained in:
parent
1a11b64266
commit
04a4a2aa44
103 changed files with 4135 additions and 2647 deletions
|
|
@ -1,13 +1,11 @@
|
|||
import { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } from 'librechat-data-provider';
|
||||
import type {
|
||||
InitializeOpenAIOptionsParams,
|
||||
BaseInitializeParams,
|
||||
InitializeResultBase,
|
||||
OpenAIConfigOptions,
|
||||
LLMConfigResult,
|
||||
UserKeyValues,
|
||||
} from '~/types';
|
||||
import { getAzureCredentials } from '~/utils/azure';
|
||||
import { isUserProvided } from '~/utils/common';
|
||||
import { resolveHeaders } from '~/utils/env';
|
||||
import { getAzureCredentials, resolveHeaders, isUserProvided, checkUserKeyExpiry } from '~/utils';
|
||||
import { getOpenAIConfig } from './config';
|
||||
|
||||
/**
|
||||
|
|
@ -18,25 +16,18 @@ import { getOpenAIConfig } from './config';
|
|||
* @returns Promise resolving to OpenAI configuration options
|
||||
* @throws Error if API key is missing or user key has expired
|
||||
*/
|
||||
export const initializeOpenAI = async ({
|
||||
export async function initializeOpenAI({
|
||||
req,
|
||||
appConfig,
|
||||
overrideModel,
|
||||
endpointOption,
|
||||
overrideEndpoint,
|
||||
getUserKeyValues,
|
||||
checkUserKeyExpiry,
|
||||
}: InitializeOpenAIOptionsParams): Promise<LLMConfigResult> => {
|
||||
endpoint,
|
||||
model_parameters,
|
||||
db,
|
||||
}: BaseInitializeParams): Promise<InitializeResultBase> {
|
||||
const appConfig = req.config;
|
||||
const { PROXY, OPENAI_API_KEY, AZURE_API_KEY, OPENAI_REVERSE_PROXY, AZURE_OPENAI_BASEURL } =
|
||||
process.env;
|
||||
|
||||
const { key: expiresAt } = req.body;
|
||||
const modelName = overrideModel ?? req.body.model;
|
||||
const endpoint = overrideEndpoint ?? req.body.endpoint;
|
||||
|
||||
if (!endpoint) {
|
||||
throw new Error('Endpoint is required');
|
||||
}
|
||||
const modelName = model_parameters?.model as string | undefined;
|
||||
|
||||
const credentials = {
|
||||
[EModelEndpoint.openAI]: OPENAI_API_KEY,
|
||||
|
|
@ -54,7 +45,7 @@ export const initializeOpenAI = async ({
|
|||
let userValues: UserKeyValues | null = null;
|
||||
if (expiresAt && (userProvidesKey || userProvidesURL)) {
|
||||
checkUserKeyExpiry(expiresAt, endpoint);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
|
||||
userValues = await db.getUserKeyValues({ userId: req.user?.id ?? '', name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey
|
||||
|
|
@ -71,7 +62,8 @@ export const initializeOpenAI = async ({
|
|||
};
|
||||
|
||||
const isAzureOpenAI = endpoint === EModelEndpoint.azureOpenAI;
|
||||
const azureConfig = isAzureOpenAI && appConfig.endpoints?.[EModelEndpoint.azureOpenAI];
|
||||
const azureConfig = isAzureOpenAI && appConfig?.endpoints?.[EModelEndpoint.azureOpenAI];
|
||||
let isServerless = false;
|
||||
|
||||
if (isAzureOpenAI && azureConfig) {
|
||||
const { modelGroupMap, groupMap } = azureConfig;
|
||||
|
|
@ -85,6 +77,7 @@ export const initializeOpenAI = async ({
|
|||
modelGroupMap,
|
||||
groupMap,
|
||||
});
|
||||
isServerless = serverless === true;
|
||||
|
||||
clientOptions.reverseProxyUrl = configBaseURL ?? clientOptions.reverseProxyUrl;
|
||||
clientOptions.headers = resolveHeaders({
|
||||
|
|
@ -99,9 +92,9 @@ export const initializeOpenAI = async ({
|
|||
}
|
||||
|
||||
apiKey = azureOptions.azureOpenAIApiKey;
|
||||
clientOptions.azure = !serverless ? azureOptions : undefined;
|
||||
clientOptions.azure = !isServerless ? azureOptions : undefined;
|
||||
|
||||
if (serverless === true) {
|
||||
if (isServerless) {
|
||||
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
|
||||
? { 'api-version': azureOptions.azureOpenAIApiVersion }
|
||||
: undefined;
|
||||
|
|
@ -130,9 +123,9 @@ export const initializeOpenAI = async ({
|
|||
}
|
||||
|
||||
const modelOptions = {
|
||||
...endpointOption.model_parameters,
|
||||
...(model_parameters ?? {}),
|
||||
model: modelName,
|
||||
user: req.user.id,
|
||||
user: req.user?.id,
|
||||
};
|
||||
|
||||
const finalClientOptions: OpenAIConfigOptions = {
|
||||
|
|
@ -142,8 +135,13 @@ export const initializeOpenAI = async ({
|
|||
|
||||
const options = getOpenAIConfig(apiKey, finalClientOptions, endpoint);
|
||||
|
||||
const openAIConfig = appConfig.endpoints?.[EModelEndpoint.openAI];
|
||||
const allConfig = appConfig.endpoints?.all;
|
||||
/** Set useLegacyContent for Azure serverless deployments */
|
||||
if (isServerless) {
|
||||
(options as InitializeResultBase).useLegacyContent = true;
|
||||
}
|
||||
|
||||
const openAIConfig = appConfig?.endpoints?.[EModelEndpoint.openAI];
|
||||
const allConfig = appConfig?.endpoints?.all;
|
||||
const azureRate = modelName?.includes('gpt-4') ? 30 : 17;
|
||||
|
||||
let streamRate: number | undefined;
|
||||
|
|
@ -163,4 +161,4 @@ export const initializeOpenAI = async ({
|
|||
}
|
||||
|
||||
return options;
|
||||
};
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue