mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-28 06:08:50 +01:00
Merge branch 'main' into feature/entra-id-azure-integration
This commit is contained in:
commit
be58d8e4f0
244 changed files with 6722 additions and 3399 deletions
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@librechat/api",
|
||||
"version": "1.4.0",
|
||||
"version": "1.4.1",
|
||||
"type": "commonjs",
|
||||
"description": "MCP services for LibreChat",
|
||||
"main": "dist/index.js",
|
||||
|
|
@ -18,8 +18,9 @@
|
|||
"build:dev": "npm run clean && NODE_ENV=development rollup -c --bundleConfigAsCjs",
|
||||
"build:watch": "NODE_ENV=development rollup -c -w --bundleConfigAsCjs",
|
||||
"build:watch:prod": "rollup -c -w --bundleConfigAsCjs",
|
||||
"test": "jest --coverage --watch",
|
||||
"test:ci": "jest --coverage --ci",
|
||||
"test": "jest --coverage --watch --testPathIgnorePatterns=\"\\.integration\\.\"",
|
||||
"test:ci": "jest --coverage --ci --testPathIgnorePatterns=\"\\.integration\\.\"",
|
||||
"test:cache:integration": "jest --testPathPattern=\"src/cache/.*\\.integration\\.spec\\.ts$\" --coverage=false",
|
||||
"verify": "npm run test:ci",
|
||||
"b:clean": "bun run rimraf dist",
|
||||
"b:build": "bun run b:clean && bun run rollup -c --silent --bundleConfigAsCjs",
|
||||
|
|
@ -52,6 +53,7 @@
|
|||
"@types/bun": "^1.2.15",
|
||||
"@types/diff": "^6.0.0",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/express-session": "^1.18.2",
|
||||
"@types/jest": "^29.5.2",
|
||||
"@types/jsonwebtoken": "^9.0.0",
|
||||
"@types/multer": "^1.4.13",
|
||||
|
|
@ -61,7 +63,7 @@
|
|||
"jest": "^29.5.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"librechat-data-provider": "*",
|
||||
"mongoose": "^8.12.1",
|
||||
"mongodb": "^6.14.2",
|
||||
"rimraf": "^5.0.1",
|
||||
"rollup": "^4.22.4",
|
||||
"rollup-plugin-peer-deps-external": "^2.2.4",
|
||||
|
|
@ -72,20 +74,33 @@
|
|||
"registry": "https://registry.npmjs.org/"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@aws-sdk/client-s3": "^3.758.0",
|
||||
"@azure/identity": "^4.7.0",
|
||||
"@azure/search-documents": "^12.0.0",
|
||||
"@azure/storage-blob": "^12.27.0",
|
||||
"@keyv/redis": "^4.3.3",
|
||||
"@langchain/core": "^0.3.62",
|
||||
"@librechat/agents": "^2.4.81",
|
||||
"@librechat/agents": "^2.4.85",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@modelcontextprotocol/sdk": "^1.17.1",
|
||||
"axios": "^1.12.1",
|
||||
"connect-redis": "^8.1.0",
|
||||
"diff": "^7.0.0",
|
||||
"eventsource": "^3.0.2",
|
||||
"express": "^4.21.2",
|
||||
"express-session": "^1.18.2",
|
||||
"firebase": "^11.0.2",
|
||||
"form-data": "^4.0.4",
|
||||
"ioredis": "^5.3.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"keyv": "^5.3.2",
|
||||
"keyv-file": "^5.1.2",
|
||||
"librechat-data-provider": "*",
|
||||
"memorystore": "^1.6.7",
|
||||
"mongoose": "^8.12.1",
|
||||
"node-fetch": "2.7.0",
|
||||
"rate-limit-redis": "^4.2.0",
|
||||
"tiktoken": "^1.0.15",
|
||||
"undici": "^7.10.0",
|
||||
"zod": "^3.22.4"
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ const plugins = [
|
|||
peerDepsExternal(),
|
||||
resolve({
|
||||
preferBuiltins: true,
|
||||
skipSelf: true,
|
||||
}),
|
||||
replace({
|
||||
__IS_DEV__: isDevelopment,
|
||||
|
|
|
|||
|
|
@ -1,24 +0,0 @@
|
|||
import { EModelEndpoint, agentsEndpointSchema } from 'librechat-data-provider';
|
||||
import type { TCustomConfig, TAgentsEndpoint } from 'librechat-data-provider';
|
||||
|
||||
/**
|
||||
* Sets up the Agents configuration from the config (`librechat.yaml`) file.
|
||||
* If no agents config is defined, uses the provided defaults or parses empty object.
|
||||
*
|
||||
* @param config - The loaded custom configuration.
|
||||
* @param [defaultConfig] - Default configuration from getConfigDefaults.
|
||||
* @returns The Agents endpoint configuration.
|
||||
*/
|
||||
export function agentsConfigSetup(
|
||||
config: TCustomConfig,
|
||||
defaultConfig: Partial<TAgentsEndpoint>,
|
||||
): Partial<TAgentsEndpoint> {
|
||||
const agentsConfig = config?.endpoints?.[EModelEndpoint.agents];
|
||||
|
||||
if (!agentsConfig) {
|
||||
return defaultConfig || agentsEndpointSchema.parse({});
|
||||
}
|
||||
|
||||
const parsedConfig = agentsEndpointSchema.parse(agentsConfig);
|
||||
return parsedConfig;
|
||||
}
|
||||
|
|
@ -1,4 +1,3 @@
|
|||
export * from './config';
|
||||
export * from './memory';
|
||||
export * from './migration';
|
||||
export * from './legacy';
|
||||
|
|
|
|||
|
|
@ -2,10 +2,9 @@ import { primeResources } from './resources';
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import { EModelEndpoint, EToolResources, AgentCapabilities } from 'librechat-data-provider';
|
||||
import type { TAgentsEndpoint, TFile } from 'librechat-data-provider';
|
||||
import type { IUser, AppConfig } from '@librechat/data-schemas';
|
||||
import type { Request as ServerRequest } from 'express';
|
||||
import type { IUser } from '@librechat/data-schemas';
|
||||
import type { TGetFiles } from './resources';
|
||||
import type { AppConfig } from '~/types';
|
||||
|
||||
// Mock logger
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
|
|
|
|||
|
|
@ -1,10 +1,9 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import { EModelEndpoint, EToolResources, AgentCapabilities } from 'librechat-data-provider';
|
||||
import type { AgentToolResources, TFile, AgentBaseResource } from 'librechat-data-provider';
|
||||
import type { IMongoFile, AppConfig, IUser } from '@librechat/data-schemas';
|
||||
import type { FilterQuery, QueryOptions, ProjectionType } from 'mongoose';
|
||||
import type { IMongoFile, IUser } from '@librechat/data-schemas';
|
||||
import type { Request as ServerRequest } from 'express';
|
||||
import type { AppConfig } from '~/types/';
|
||||
|
||||
/**
|
||||
* Function type for retrieving files from the database
|
||||
|
|
|
|||
157
packages/api/src/app/AppService.interface.spec.ts
Normal file
157
packages/api/src/app/AppService.interface.spec.ts
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
jest.mock('@librechat/data-schemas', () => ({
|
||||
...jest.requireActual('@librechat/data-schemas'),
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
import { AppService } from '@librechat/data-schemas';
|
||||
|
||||
describe('AppService interface configuration', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should set prompts to true when config specifies prompts as true', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
prompts: true,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
interfaceConfig: expect.objectContaining({
|
||||
prompts: true,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should set prompts and bookmarks to false when config specifies them as false', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
prompts: false,
|
||||
bookmarks: false,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
interfaceConfig: expect.objectContaining({
|
||||
prompts: false,
|
||||
bookmarks: false,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not set prompts and bookmarks when not provided in config', async () => {
|
||||
const config = {};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
interfaceConfig: expect.anything(),
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify that prompts and bookmarks are undefined when not provided
|
||||
expect(result.interfaceConfig?.prompts).toBeUndefined();
|
||||
expect(result.interfaceConfig?.bookmarks).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should set prompts and bookmarks to different values when specified differently in config', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
prompts: true,
|
||||
bookmarks: false,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
interfaceConfig: expect.objectContaining({
|
||||
prompts: true,
|
||||
bookmarks: false,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should correctly configure peoplePicker permissions including roles', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
interfaceConfig: expect.objectContaining({
|
||||
peoplePicker: expect.objectContaining({
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mixed peoplePicker permissions', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: false,
|
||||
roles: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
interfaceConfig: expect.objectContaining({
|
||||
peoplePicker: expect.objectContaining({
|
||||
users: true,
|
||||
groups: false,
|
||||
roles: true,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not set peoplePicker when not provided in config', async () => {
|
||||
const config = {};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
interfaceConfig: expect.anything(),
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify that peoplePicker is undefined when not provided
|
||||
expect(result.interfaceConfig?.peoplePicker).toBeUndefined();
|
||||
});
|
||||
});
|
||||
814
packages/api/src/app/AppService.spec.ts
Normal file
814
packages/api/src/app/AppService.spec.ts
Normal file
|
|
@ -0,0 +1,814 @@
|
|||
import {
|
||||
OCRStrategy,
|
||||
FileSources,
|
||||
EModelEndpoint,
|
||||
EImageOutputType,
|
||||
AgentCapabilities,
|
||||
defaultSocialLogins,
|
||||
validateAzureGroups,
|
||||
defaultAgentCapabilities,
|
||||
} from 'librechat-data-provider';
|
||||
import type { TCustomConfig } from 'librechat-data-provider';
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
...jest.requireActual('@librechat/data-schemas'),
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
error: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
import { AppService } from '@librechat/data-schemas';
|
||||
|
||||
const azureGroups = [
|
||||
{
|
||||
group: 'librechat-westus',
|
||||
apiKey: '${WESTUS_API_KEY}',
|
||||
instanceName: 'librechat-westus',
|
||||
version: '2023-12-01-preview',
|
||||
models: {
|
||||
'gpt-4-vision-preview': {
|
||||
deploymentName: 'gpt-4-vision-preview',
|
||||
version: '2024-02-15-preview',
|
||||
},
|
||||
'gpt-3.5-turbo': {
|
||||
deploymentName: 'gpt-35-turbo',
|
||||
},
|
||||
'gpt-3.5-turbo-1106': {
|
||||
deploymentName: 'gpt-35-turbo-1106',
|
||||
},
|
||||
'gpt-4': {
|
||||
deploymentName: 'gpt-4',
|
||||
},
|
||||
'gpt-4-1106-preview': {
|
||||
deploymentName: 'gpt-4-1106-preview',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
group: 'librechat-eastus',
|
||||
apiKey: '${EASTUS_API_KEY}',
|
||||
instanceName: 'librechat-eastus',
|
||||
deploymentName: 'gpt-4-turbo',
|
||||
version: '2024-02-15-preview',
|
||||
models: {
|
||||
'gpt-4-turbo': true,
|
||||
},
|
||||
} as const,
|
||||
];
|
||||
|
||||
describe('AppService', () => {
|
||||
const mockSystemTools = {
|
||||
ExampleTool: {
|
||||
type: 'function',
|
||||
function: {
|
||||
description: 'Example tool function',
|
||||
name: 'exampleFunction',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
param1: { type: 'string', description: 'An example parameter' },
|
||||
},
|
||||
required: ['param1'],
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
process.env.CDN_PROVIDER = undefined;
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should correctly assign process.env and initialize app config based on custom config', async () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
registration: { socialLogins: ['testLogin'] },
|
||||
fileStrategy: 'testStrategy' as FileSources,
|
||||
balance: {
|
||||
enabled: true,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config, systemTools: mockSystemTools });
|
||||
|
||||
expect(process.env.CDN_PROVIDER).toEqual('testStrategy');
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
config: expect.objectContaining({
|
||||
fileStrategy: 'testStrategy',
|
||||
}),
|
||||
registration: expect.objectContaining({
|
||||
socialLogins: ['testLogin'],
|
||||
}),
|
||||
fileStrategy: 'testStrategy',
|
||||
interfaceConfig: expect.objectContaining({
|
||||
endpointsMenu: true,
|
||||
modelSelect: true,
|
||||
parameters: true,
|
||||
sidePanel: true,
|
||||
presets: true,
|
||||
}),
|
||||
mcpConfig: null,
|
||||
imageOutputType: expect.any(String),
|
||||
fileConfig: undefined,
|
||||
secureImageLinks: undefined,
|
||||
balance: { enabled: true },
|
||||
filteredTools: undefined,
|
||||
includedTools: undefined,
|
||||
webSearch: expect.objectContaining({
|
||||
safeSearch: 1,
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
jinaApiUrl: '${JINA_API_URL}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
searxngApiKey: '${SEARXNG_API_KEY}',
|
||||
firecrawlApiKey: '${FIRECRAWL_API_KEY}',
|
||||
firecrawlApiUrl: '${FIRECRAWL_API_URL}',
|
||||
searxngInstanceUrl: '${SEARXNG_INSTANCE_URL}',
|
||||
}),
|
||||
memory: undefined,
|
||||
endpoints: expect.objectContaining({
|
||||
agents: expect.objectContaining({
|
||||
disableBuilder: false,
|
||||
capabilities: expect.arrayContaining([...defaultAgentCapabilities]),
|
||||
maxCitations: 30,
|
||||
maxCitationsPerFile: 7,
|
||||
minRelevanceScore: 0.45,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should change the `imageOutputType` based on config value', async () => {
|
||||
const config = {
|
||||
version: '0.10.0',
|
||||
imageOutputType: EImageOutputType.WEBP,
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
imageOutputType: EImageOutputType.WEBP,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should default to `PNG` `imageOutputType` with no provided type', async () => {
|
||||
const config = {
|
||||
version: '0.10.0',
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
imageOutputType: EImageOutputType.PNG,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should default to `PNG` `imageOutputType` with no provided config', async () => {
|
||||
const config = {};
|
||||
|
||||
const result = await AppService({ config });
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
imageOutputType: EImageOutputType.PNG,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should load and format tools accurately with defined structure', async () => {
|
||||
const config = {};
|
||||
|
||||
const result = await AppService({ config, systemTools: mockSystemTools });
|
||||
|
||||
// Verify tools are included in the returned config
|
||||
expect(result.availableTools).toBeDefined();
|
||||
expect(result.availableTools?.ExampleTool).toEqual({
|
||||
type: 'function',
|
||||
function: {
|
||||
description: 'Example tool function',
|
||||
name: 'exampleFunction',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
param1: { type: 'string', description: 'An example parameter' },
|
||||
},
|
||||
required: ['param1'],
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should correctly configure Assistants endpoint based on custom config', async () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
endpoints: {
|
||||
[EModelEndpoint.assistants]: {
|
||||
disableBuilder: true,
|
||||
pollIntervalMs: 5000,
|
||||
timeoutMs: 30000,
|
||||
supportedIds: ['id1', 'id2'],
|
||||
privateAssistants: false,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
[EModelEndpoint.assistants]: expect.objectContaining({
|
||||
disableBuilder: true,
|
||||
pollIntervalMs: 5000,
|
||||
timeoutMs: 30000,
|
||||
supportedIds: expect.arrayContaining(['id1', 'id2']),
|
||||
privateAssistants: false,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should correctly configure Agents endpoint based on custom config', async () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
endpoints: {
|
||||
[EModelEndpoint.agents]: {
|
||||
disableBuilder: true,
|
||||
recursionLimit: 10,
|
||||
maxRecursionLimit: 20,
|
||||
allowedProviders: ['openai', 'anthropic'],
|
||||
capabilities: [AgentCapabilities.tools, AgentCapabilities.actions],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
[EModelEndpoint.agents]: expect.objectContaining({
|
||||
disableBuilder: true,
|
||||
recursionLimit: 10,
|
||||
maxRecursionLimit: 20,
|
||||
allowedProviders: expect.arrayContaining(['openai', 'anthropic']),
|
||||
capabilities: expect.arrayContaining([
|
||||
AgentCapabilities.tools,
|
||||
AgentCapabilities.actions,
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should configure Agents endpoint with defaults when no config is provided', async () => {
|
||||
const config = {};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
[EModelEndpoint.agents]: expect.objectContaining({
|
||||
disableBuilder: false,
|
||||
capabilities: expect.arrayContaining([...defaultAgentCapabilities]),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should configure Agents endpoint with defaults when endpoints exist but agents is not defined', async () => {
|
||||
const config = {
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
[EModelEndpoint.agents]: expect.objectContaining({
|
||||
disableBuilder: false,
|
||||
capabilities: expect.arrayContaining([...defaultAgentCapabilities]),
|
||||
}),
|
||||
[EModelEndpoint.openAI]: expect.objectContaining({
|
||||
titleConvo: true,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should correctly configure minimum Azure OpenAI Assistant values', async () => {
|
||||
const assistantGroups = [azureGroups[0], { ...azureGroups[1], assistants: true }];
|
||||
const config = {
|
||||
endpoints: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
groups: assistantGroups,
|
||||
assistants: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
process.env.WESTUS_API_KEY = 'westus-key';
|
||||
process.env.EASTUS_API_KEY = 'eastus-key';
|
||||
|
||||
const result = await AppService({ config });
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
[EModelEndpoint.azureAssistants]: expect.objectContaining({
|
||||
capabilities: expect.arrayContaining([
|
||||
expect.any(String),
|
||||
expect.any(String),
|
||||
expect.any(String),
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should correctly configure Azure OpenAI endpoint based on custom config', async () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
endpoints: {
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
groups: azureGroups,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
process.env.WESTUS_API_KEY = 'westus-key';
|
||||
process.env.EASTUS_API_KEY = 'eastus-key';
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
const { modelNames, modelGroupMap, groupMap } = validateAzureGroups(azureGroups);
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
[EModelEndpoint.azureOpenAI]: expect.objectContaining({
|
||||
modelNames,
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not modify FILE_UPLOAD environment variables without rate limits', async () => {
|
||||
// Setup initial environment variables
|
||||
process.env.FILE_UPLOAD_IP_MAX = '10';
|
||||
process.env.FILE_UPLOAD_IP_WINDOW = '15';
|
||||
process.env.FILE_UPLOAD_USER_MAX = '5';
|
||||
process.env.FILE_UPLOAD_USER_WINDOW = '20';
|
||||
|
||||
const initialEnv = { ...process.env };
|
||||
const config = {};
|
||||
|
||||
await AppService({ config });
|
||||
|
||||
// Expect environment variables to remain unchanged
|
||||
expect(process.env.FILE_UPLOAD_IP_MAX).toEqual(initialEnv.FILE_UPLOAD_IP_MAX);
|
||||
expect(process.env.FILE_UPLOAD_IP_WINDOW).toEqual(initialEnv.FILE_UPLOAD_IP_WINDOW);
|
||||
expect(process.env.FILE_UPLOAD_USER_MAX).toEqual(initialEnv.FILE_UPLOAD_USER_MAX);
|
||||
expect(process.env.FILE_UPLOAD_USER_WINDOW).toEqual(initialEnv.FILE_UPLOAD_USER_WINDOW);
|
||||
});
|
||||
|
||||
it('should fallback to default FILE_UPLOAD environment variables when rate limits are unspecified', async () => {
|
||||
// Setup initial environment variables to non-default values
|
||||
process.env.FILE_UPLOAD_IP_MAX = 'initialMax';
|
||||
process.env.FILE_UPLOAD_IP_WINDOW = 'initialWindow';
|
||||
process.env.FILE_UPLOAD_USER_MAX = 'initialUserMax';
|
||||
process.env.FILE_UPLOAD_USER_WINDOW = 'initialUserWindow';
|
||||
const config = {};
|
||||
|
||||
await AppService({ config });
|
||||
|
||||
// Verify that process.env falls back to the initial values
|
||||
expect(process.env.FILE_UPLOAD_IP_MAX).toEqual('initialMax');
|
||||
expect(process.env.FILE_UPLOAD_IP_WINDOW).toEqual('initialWindow');
|
||||
expect(process.env.FILE_UPLOAD_USER_MAX).toEqual('initialUserMax');
|
||||
expect(process.env.FILE_UPLOAD_USER_WINDOW).toEqual('initialUserWindow');
|
||||
});
|
||||
|
||||
it('should not modify IMPORT environment variables without rate limits', async () => {
|
||||
// Setup initial environment variables
|
||||
process.env.IMPORT_IP_MAX = '10';
|
||||
process.env.IMPORT_IP_WINDOW = '15';
|
||||
process.env.IMPORT_USER_MAX = '5';
|
||||
process.env.IMPORT_USER_WINDOW = '20';
|
||||
|
||||
const initialEnv = { ...process.env };
|
||||
const config = {};
|
||||
|
||||
await AppService({ config });
|
||||
|
||||
// Expect environment variables to remain unchanged
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual(initialEnv.IMPORT_IP_MAX);
|
||||
expect(process.env.IMPORT_IP_WINDOW).toEqual(initialEnv.IMPORT_IP_WINDOW);
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual(initialEnv.IMPORT_USER_MAX);
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual(initialEnv.IMPORT_USER_WINDOW);
|
||||
});
|
||||
|
||||
it('should fallback to default IMPORT environment variables when rate limits are unspecified', async () => {
|
||||
// Setup initial environment variables to non-default values
|
||||
process.env.IMPORT_IP_MAX = 'initialMax';
|
||||
process.env.IMPORT_IP_WINDOW = 'initialWindow';
|
||||
process.env.IMPORT_USER_MAX = 'initialUserMax';
|
||||
process.env.IMPORT_USER_WINDOW = 'initialUserWindow';
|
||||
const config = {};
|
||||
|
||||
await AppService({ config });
|
||||
|
||||
// Verify that process.env falls back to the initial values
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual('initialMax');
|
||||
expect(process.env.IMPORT_IP_WINDOW).toEqual('initialWindow');
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual('initialUserMax');
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual('initialUserWindow');
|
||||
});
|
||||
|
||||
it('should correctly configure endpoint with titlePrompt, titleMethod, and titlePromptTemplate', async () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Custom title prompt for conversation',
|
||||
titlePromptTemplate: 'Summarize this conversation: {{conversation}}',
|
||||
},
|
||||
[EModelEndpoint.assistants]: {
|
||||
titleMethod: 'functions',
|
||||
titlePrompt: 'Generate a title for this assistant conversation',
|
||||
titlePromptTemplate: 'Assistant conversation template: {{messages}}',
|
||||
},
|
||||
[EModelEndpoint.azureOpenAI]: {
|
||||
groups: azureGroups,
|
||||
titleConvo: true,
|
||||
titleMethod: 'completion',
|
||||
titleModel: 'gpt-4',
|
||||
titlePrompt: 'Azure title prompt',
|
||||
titlePromptTemplate: 'Azure conversation: {{context}}',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
// Check OpenAI endpoint configuration
|
||||
[EModelEndpoint.openAI]: expect.objectContaining({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Custom title prompt for conversation',
|
||||
titlePromptTemplate: 'Summarize this conversation: {{conversation}}',
|
||||
}),
|
||||
// Check Assistants endpoint configuration
|
||||
[EModelEndpoint.assistants]: expect.objectContaining({
|
||||
titleMethod: 'functions',
|
||||
titlePrompt: 'Generate a title for this assistant conversation',
|
||||
titlePromptTemplate: 'Assistant conversation template: {{messages}}',
|
||||
}),
|
||||
// Check Azure OpenAI endpoint configuration
|
||||
[EModelEndpoint.azureOpenAI]: expect.objectContaining({
|
||||
titleConvo: true,
|
||||
titleMethod: 'completion',
|
||||
titleModel: 'gpt-4',
|
||||
titlePrompt: 'Azure title prompt',
|
||||
titlePromptTemplate: 'Azure conversation: {{context}}',
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should configure Agent endpoint with title generation settings', async () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
endpoints: {
|
||||
[EModelEndpoint.agents]: {
|
||||
disableBuilder: false,
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Generate a descriptive title for this agent conversation',
|
||||
titlePromptTemplate: 'Agent conversation summary: {{content}}',
|
||||
recursionLimit: 15,
|
||||
capabilities: [AgentCapabilities.tools, AgentCapabilities.actions],
|
||||
maxCitations: 30,
|
||||
maxCitationsPerFile: 7,
|
||||
minRelevanceScore: 0.45,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
[EModelEndpoint.agents]: expect.objectContaining({
|
||||
disableBuilder: false,
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Generate a descriptive title for this agent conversation',
|
||||
titlePromptTemplate: 'Agent conversation summary: {{content}}',
|
||||
recursionLimit: 15,
|
||||
capabilities: expect.arrayContaining([
|
||||
AgentCapabilities.tools,
|
||||
AgentCapabilities.actions,
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle missing title configuration options with defaults', async () => {
|
||||
const config = {
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
// titlePrompt and titlePromptTemplate are not provided
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
[EModelEndpoint.openAI]: expect.objectContaining({
|
||||
titleConvo: true,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify that optional fields are not set when not provided
|
||||
expect(result.endpoints[EModelEndpoint.openAI].titlePrompt).toBeUndefined();
|
||||
expect(result.endpoints[EModelEndpoint.openAI].titlePromptTemplate).toBeUndefined();
|
||||
expect(result.endpoints[EModelEndpoint.openAI].titleMethod).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should correctly configure titleEndpoint when specified', async () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
endpoints: {
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
titlePrompt: 'Generate a concise title',
|
||||
},
|
||||
[EModelEndpoint.agents]: {
|
||||
disableBuilder: false,
|
||||
capabilities: [AgentCapabilities.tools],
|
||||
maxCitations: 30,
|
||||
maxCitationsPerFile: 7,
|
||||
minRelevanceScore: 0.45,
|
||||
titleEndpoint: 'custom-provider',
|
||||
titleMethod: 'structured',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
// Check OpenAI endpoint has titleEndpoint
|
||||
[EModelEndpoint.openAI]: expect.objectContaining({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
titlePrompt: 'Generate a concise title',
|
||||
}),
|
||||
// Check Agents endpoint has titleEndpoint
|
||||
[EModelEndpoint.agents]: expect.objectContaining({
|
||||
titleEndpoint: 'custom-provider',
|
||||
titleMethod: 'structured',
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should correctly configure all endpoint when specified', async () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
endpoints: {
|
||||
all: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4o-mini',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Default title prompt for all endpoints',
|
||||
titlePromptTemplate: 'Default template: {{conversation}}',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
streamRate: 50,
|
||||
},
|
||||
[EModelEndpoint.openAI]: {
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
// Check that 'all' endpoint config is loaded
|
||||
endpoints: expect.objectContaining({
|
||||
all: expect.objectContaining({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-4o-mini',
|
||||
titleMethod: 'structured',
|
||||
titlePrompt: 'Default title prompt for all endpoints',
|
||||
titlePromptTemplate: 'Default template: {{conversation}}',
|
||||
titleEndpoint: EModelEndpoint.anthropic,
|
||||
streamRate: 50,
|
||||
}),
|
||||
// Check that OpenAI endpoint has its own config
|
||||
[EModelEndpoint.openAI]: expect.objectContaining({
|
||||
titleConvo: true,
|
||||
titleModel: 'gpt-3.5-turbo',
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('AppService updating app config and issuing warnings', () => {
|
||||
let initialEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Store initial environment variables to restore them after each test
|
||||
initialEnv = { ...process.env };
|
||||
|
||||
process.env.CDN_PROVIDER = undefined;
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore initial environment variables
|
||||
process.env = { ...initialEnv };
|
||||
});
|
||||
|
||||
it('should initialize app config with default values if config is empty', async () => {
|
||||
const config = {};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
config: {},
|
||||
fileStrategy: FileSources.local,
|
||||
registration: expect.objectContaining({
|
||||
socialLogins: defaultSocialLogins,
|
||||
}),
|
||||
balance: expect.objectContaining({
|
||||
enabled: false,
|
||||
startBalance: undefined,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should initialize app config with values from config', async () => {
|
||||
// Mock loadCustomConfig to return a specific config object with a complete balance config
|
||||
const config: Partial<TCustomConfig> = {
|
||||
fileStrategy: FileSources.firebase,
|
||||
registration: { socialLogins: ['testLogin'] },
|
||||
balance: {
|
||||
enabled: false,
|
||||
startBalance: 5000,
|
||||
autoRefillEnabled: true,
|
||||
refillIntervalValue: 15,
|
||||
refillIntervalUnit: 'hours',
|
||||
refillAmount: 5000,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
config,
|
||||
fileStrategy: config.fileStrategy,
|
||||
registration: expect.objectContaining({
|
||||
socialLogins: config.registration?.socialLogins,
|
||||
}),
|
||||
balance: config.balance,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should apply the assistants endpoint configuration correctly to app config', async () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
endpoints: {
|
||||
assistants: {
|
||||
version: 'v2',
|
||||
retrievalModels: ['gpt-4', 'gpt-3.5-turbo'],
|
||||
capabilities: [],
|
||||
disableBuilder: true,
|
||||
pollIntervalMs: 5000,
|
||||
timeoutMs: 30000,
|
||||
supportedIds: ['id1', 'id2'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
endpoints: expect.objectContaining({
|
||||
assistants: expect.objectContaining({
|
||||
disableBuilder: true,
|
||||
pollIntervalMs: 5000,
|
||||
timeoutMs: 30000,
|
||||
supportedIds: ['id1', 'id2'],
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
// Verify excludedIds is undefined when not provided
|
||||
expect(result.endpoints.assistants.excludedIds).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not parse environment variable references in OCR config', async () => {
|
||||
// Mock custom configuration with env variable references in OCR config
|
||||
const config: Partial<TCustomConfig> = {
|
||||
ocr: {
|
||||
apiKey: '${OCR_API_KEY_CUSTOM_VAR_NAME}',
|
||||
baseURL: '${OCR_BASEURL_CUSTOM_VAR_NAME}',
|
||||
strategy: OCRStrategy.MISTRAL_OCR,
|
||||
mistralModel: 'mistral-medium',
|
||||
},
|
||||
};
|
||||
|
||||
// Set actual environment variables with different values
|
||||
process.env.OCR_API_KEY_CUSTOM_VAR_NAME = 'actual-api-key';
|
||||
process.env.OCR_BASEURL_CUSTOM_VAR_NAME = 'https://actual-ocr-url.com';
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
// Verify that the raw string references were preserved and not interpolated
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
ocr: expect.objectContaining({
|
||||
apiKey: '${OCR_API_KEY_CUSTOM_VAR_NAME}',
|
||||
baseURL: '${OCR_BASEURL_CUSTOM_VAR_NAME}',
|
||||
strategy: 'mistral_ocr',
|
||||
mistralModel: 'mistral-medium',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should correctly configure peoplePicker permissions when specified', async () => {
|
||||
const config = {
|
||||
interface: {
|
||||
peoplePicker: {
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await AppService({ config });
|
||||
|
||||
// Check that interface config includes the permissions
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
interfaceConfig: expect.objectContaining({
|
||||
peoplePicker: expect.objectContaining({
|
||||
users: true,
|
||||
groups: true,
|
||||
roles: true,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
26
packages/api/src/app/cdn.ts
Normal file
26
packages/api/src/app/cdn.ts
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import { FileSources } from 'librechat-data-provider';
|
||||
import type { AppConfig } from '@librechat/data-schemas';
|
||||
import { initializeAzureBlobService } from '~/cdn/azure';
|
||||
import { initializeFirebase } from '~/cdn/firebase';
|
||||
import { initializeS3 } from '~/cdn/s3';
|
||||
|
||||
/**
|
||||
* Initializes file storage clients based on the configured file strategy.
|
||||
* This should be called after loading the app configuration.
|
||||
* @param {Object} options
|
||||
* @param {AppConfig} options.appConfig - The application configuration
|
||||
*/
|
||||
export function initializeFileStorage(appConfig: AppConfig) {
|
||||
const { fileStrategy } = appConfig;
|
||||
|
||||
if (fileStrategy === FileSources.firebase) {
|
||||
initializeFirebase();
|
||||
} else if (fileStrategy === FileSources.azure_blob) {
|
||||
initializeAzureBlobService().catch((error) => {
|
||||
logger.error('Error initializing Azure Blob Service:', error);
|
||||
});
|
||||
} else if (fileStrategy === FileSources.s3) {
|
||||
initializeS3();
|
||||
}
|
||||
}
|
||||
358
packages/api/src/app/checks.spec.ts
Normal file
358
packages/api/src/app/checks.spec.ts
Normal file
|
|
@ -0,0 +1,358 @@
|
|||
jest.mock('librechat-data-provider', () => ({
|
||||
...jest.requireActual('librechat-data-provider'),
|
||||
extractVariableName: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
...jest.requireActual('@librechat/data-schemas'),
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
import { handleRateLimits } from './limits';
|
||||
import { checkWebSearchConfig } from './checks';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { extractVariableName as extract } from 'librechat-data-provider';
|
||||
|
||||
const extractVariableName = extract as jest.MockedFunction<typeof extract>;
|
||||
|
||||
describe('checkWebSearchConfig', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Clear all mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Store original environment
|
||||
originalEnv = process.env;
|
||||
|
||||
// Reset process.env
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
describe('when webSearchConfig is undefined or null', () => {
|
||||
it('should return early without logging when config is undefined', () => {
|
||||
checkWebSearchConfig(undefined);
|
||||
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return early without logging when config is null', () => {
|
||||
checkWebSearchConfig(null);
|
||||
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config values are proper environment variable references', () => {
|
||||
it('should log debug message for each valid environment variable with value set', () => {
|
||||
const config = {
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
jinaApiKey: '${JINA_API_KEY}',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValueOnce('SERPER_API_KEY').mockReturnValueOnce('JINA_API_KEY');
|
||||
|
||||
process.env.SERPER_API_KEY = 'test-serper-key';
|
||||
process.env.JINA_API_KEY = 'test-jina-key';
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).toHaveBeenCalledWith('${SERPER_API_KEY}');
|
||||
expect(extractVariableName).toHaveBeenCalledWith('${JINA_API_KEY}');
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search serperApiKey: Using environment variable SERPER_API_KEY with value set',
|
||||
);
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search jinaApiKey: Using environment variable JINA_API_KEY with value set',
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should log debug message for environment variables not set in environment', () => {
|
||||
const config = {
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue('COHERE_API_KEY');
|
||||
|
||||
delete process.env.COHERE_API_KEY;
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
'Web search cohereApiKey: Using environment variable COHERE_API_KEY (not set in environment, user provided value)',
|
||||
);
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config values are actual values instead of environment variable references', () => {
|
||||
it('should warn when serperApiKey contains actual API key', () => {
|
||||
const config = {
|
||||
serperApiKey: 'sk-1234567890abcdef',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'❗ Web search configuration error: serperApiKey contains an actual value',
|
||||
),
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "sk-1234567..."'),
|
||||
);
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should warn when firecrawlApiUrl contains actual URL', () => {
|
||||
const config = {
|
||||
firecrawlApiUrl: 'https://api.firecrawl.dev',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'❗ Web search configuration error: firecrawlApiUrl contains an actual value',
|
||||
),
|
||||
);
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "https://ap..."'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should include documentation link in warning message', () => {
|
||||
const config = {
|
||||
firecrawlApiKey: 'fc-actual-key',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
'More info: https://www.librechat.ai/docs/configuration/librechat_yaml/web_search',
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when config contains mixed value types', () => {
|
||||
it('should only process string values and ignore non-string values', () => {
|
||||
const config = {
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
safeSearch: 1,
|
||||
scraperTimeout: 7500,
|
||||
jinaApiKey: 'actual-key',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValueOnce('SERPER_API_KEY').mockReturnValueOnce(null);
|
||||
|
||||
process.env.SERPER_API_KEY = 'test-key';
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).toHaveBeenCalledTimes(2);
|
||||
expect(logger.debug).toHaveBeenCalledTimes(1);
|
||||
expect(logger.warn).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle config with no web search keys', () => {
|
||||
const config = {
|
||||
someOtherKey: 'value',
|
||||
anotherKey: '${SOME_VAR}',
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
/** @ts-expect-error */
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(extractVariableName).not.toHaveBeenCalled();
|
||||
expect(logger.debug).not.toHaveBeenCalled();
|
||||
expect(logger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should truncate long values in warning messages', () => {
|
||||
const config = {
|
||||
serperApiKey: 'this-is-a-very-long-api-key-that-should-be-truncated-in-the-warning-message',
|
||||
};
|
||||
|
||||
extractVariableName.mockReturnValue(null);
|
||||
|
||||
checkWebSearchConfig(config);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Current value: "this-is-a-..."'),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleRateLimits', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Store original environment
|
||||
originalEnv = process.env;
|
||||
|
||||
// Reset process.env
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
it('should correctly set FILE_UPLOAD environment variables based on rate limits', () => {
|
||||
const rateLimits = {
|
||||
fileUploads: {
|
||||
ipMax: 100,
|
||||
ipWindowInMinutes: 60,
|
||||
userMax: 50,
|
||||
userWindowInMinutes: 30,
|
||||
},
|
||||
};
|
||||
|
||||
handleRateLimits(rateLimits);
|
||||
|
||||
// Verify that process.env has been updated according to the rate limits config
|
||||
expect(process.env.FILE_UPLOAD_IP_MAX).toEqual('100');
|
||||
expect(process.env.FILE_UPLOAD_IP_WINDOW).toEqual('60');
|
||||
expect(process.env.FILE_UPLOAD_USER_MAX).toEqual('50');
|
||||
expect(process.env.FILE_UPLOAD_USER_WINDOW).toEqual('30');
|
||||
});
|
||||
|
||||
it('should correctly set IMPORT environment variables based on rate limits', () => {
|
||||
const rateLimits = {
|
||||
conversationsImport: {
|
||||
ipMax: 150,
|
||||
ipWindowInMinutes: 60,
|
||||
userMax: 50,
|
||||
userWindowInMinutes: 30,
|
||||
},
|
||||
};
|
||||
|
||||
handleRateLimits(rateLimits);
|
||||
|
||||
// Verify that process.env has been updated according to the rate limits config
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual('150');
|
||||
expect(process.env.IMPORT_IP_WINDOW).toEqual('60');
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual('50');
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual('30');
|
||||
});
|
||||
|
||||
it('should not modify FILE_UPLOAD environment variables without rate limits', () => {
|
||||
// Setup initial environment variables
|
||||
process.env.FILE_UPLOAD_IP_MAX = '10';
|
||||
process.env.FILE_UPLOAD_IP_WINDOW = '15';
|
||||
process.env.FILE_UPLOAD_USER_MAX = '5';
|
||||
process.env.FILE_UPLOAD_USER_WINDOW = '20';
|
||||
|
||||
const initialEnv = { ...process.env };
|
||||
|
||||
handleRateLimits({});
|
||||
|
||||
// Expect environment variables to remain unchanged
|
||||
expect(process.env.FILE_UPLOAD_IP_MAX).toEqual(initialEnv.FILE_UPLOAD_IP_MAX);
|
||||
expect(process.env.FILE_UPLOAD_IP_WINDOW).toEqual(initialEnv.FILE_UPLOAD_IP_WINDOW);
|
||||
expect(process.env.FILE_UPLOAD_USER_MAX).toEqual(initialEnv.FILE_UPLOAD_USER_MAX);
|
||||
expect(process.env.FILE_UPLOAD_USER_WINDOW).toEqual(initialEnv.FILE_UPLOAD_USER_WINDOW);
|
||||
});
|
||||
|
||||
it('should not modify IMPORT environment variables without rate limits', () => {
|
||||
// Setup initial environment variables
|
||||
process.env.IMPORT_IP_MAX = '10';
|
||||
process.env.IMPORT_IP_WINDOW = '15';
|
||||
process.env.IMPORT_USER_MAX = '5';
|
||||
process.env.IMPORT_USER_WINDOW = '20';
|
||||
|
||||
const initialEnv = { ...process.env };
|
||||
|
||||
handleRateLimits({});
|
||||
|
||||
// Expect environment variables to remain unchanged
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual(initialEnv.IMPORT_IP_MAX);
|
||||
expect(process.env.IMPORT_IP_WINDOW).toEqual(initialEnv.IMPORT_IP_WINDOW);
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual(initialEnv.IMPORT_USER_MAX);
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual(initialEnv.IMPORT_USER_WINDOW);
|
||||
});
|
||||
|
||||
it('should handle undefined rateLimits parameter', () => {
|
||||
// Setup initial environment variables
|
||||
process.env.FILE_UPLOAD_IP_MAX = 'initial';
|
||||
process.env.IMPORT_IP_MAX = 'initial';
|
||||
|
||||
handleRateLimits(undefined);
|
||||
|
||||
// Should not modify any environment variables
|
||||
expect(process.env.FILE_UPLOAD_IP_MAX).toEqual('initial');
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual('initial');
|
||||
});
|
||||
|
||||
it('should handle partial rate limit configurations', () => {
|
||||
const rateLimits = {
|
||||
fileUploads: {
|
||||
ipMax: 200,
|
||||
// Only setting ipMax, other properties undefined
|
||||
},
|
||||
};
|
||||
|
||||
handleRateLimits(rateLimits);
|
||||
|
||||
expect(process.env.FILE_UPLOAD_IP_MAX).toEqual('200');
|
||||
// Other FILE_UPLOAD env vars should not be set
|
||||
expect(process.env.FILE_UPLOAD_IP_WINDOW).toBeUndefined();
|
||||
expect(process.env.FILE_UPLOAD_USER_MAX).toBeUndefined();
|
||||
expect(process.env.FILE_UPLOAD_USER_WINDOW).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should correctly set TTS and STT environment variables based on rate limits', () => {
|
||||
const rateLimits = {
|
||||
tts: {
|
||||
ipMax: 75,
|
||||
ipWindowInMinutes: 45,
|
||||
userMax: 25,
|
||||
userWindowInMinutes: 15,
|
||||
},
|
||||
stt: {
|
||||
ipMax: 80,
|
||||
ipWindowInMinutes: 50,
|
||||
userMax: 30,
|
||||
userWindowInMinutes: 20,
|
||||
},
|
||||
};
|
||||
|
||||
handleRateLimits(rateLimits);
|
||||
|
||||
// Verify TTS environment variables
|
||||
expect(process.env.TTS_IP_MAX).toEqual('75');
|
||||
expect(process.env.TTS_IP_WINDOW).toEqual('45');
|
||||
expect(process.env.TTS_USER_MAX).toEqual('25');
|
||||
expect(process.env.TTS_USER_WINDOW).toEqual('15');
|
||||
|
||||
// Verify STT environment variables
|
||||
expect(process.env.STT_IP_MAX).toEqual('80');
|
||||
expect(process.env.STT_IP_WINDOW).toEqual('50');
|
||||
expect(process.env.STT_USER_MAX).toEqual('30');
|
||||
expect(process.env.STT_USER_WINDOW).toEqual('20');
|
||||
});
|
||||
});
|
||||
307
packages/api/src/app/checks.ts
Normal file
307
packages/api/src/app/checks.ts
Normal file
|
|
@ -0,0 +1,307 @@
|
|||
import { logger, webSearchKeys } from '@librechat/data-schemas';
|
||||
import { Constants, extractVariableName } from 'librechat-data-provider';
|
||||
import type { TCustomConfig } from 'librechat-data-provider';
|
||||
import type { AppConfig } from '@librechat/data-schemas';
|
||||
import { isEnabled, checkEmailConfig } from '~/utils';
|
||||
import { handleRateLimits } from './limits';
|
||||
|
||||
const secretDefaults = {
|
||||
CREDS_KEY: 'f34be427ebb29de8d88c107a71546019685ed8b241d8f2ed00c3df97ad2566f0',
|
||||
CREDS_IV: 'e2341419ec3dd3d19b13a1a87fafcbfb',
|
||||
JWT_SECRET: '16f8c0ef4a5d391b26034086c628469d3f9f497f08163ab9b40137092f2909ef',
|
||||
JWT_REFRESH_SECRET: 'eaa5191f2914e30b9387fd84e254e4ba6fc51b4654968a9b0803b456a54b8418',
|
||||
};
|
||||
|
||||
const deprecatedVariables = [
|
||||
{
|
||||
key: 'CHECK_BALANCE',
|
||||
description:
|
||||
'Please use the `balance` field in the `librechat.yaml` config file instead.\nMore info: https://librechat.ai/docs/configuration/librechat_yaml/object_structure/balance#overview',
|
||||
},
|
||||
{
|
||||
key: 'START_BALANCE',
|
||||
description:
|
||||
'Please use the `balance` field in the `librechat.yaml` config file instead.\nMore info: https://librechat.ai/docs/configuration/librechat_yaml/object_structure/balance#overview',
|
||||
},
|
||||
{
|
||||
key: 'GOOGLE_API_KEY',
|
||||
description:
|
||||
'Please use the `GOOGLE_SEARCH_API_KEY` environment variable for the Google Search Tool instead.',
|
||||
},
|
||||
];
|
||||
|
||||
export const deprecatedAzureVariables = [
|
||||
/* "related to" precedes description text */
|
||||
{ key: 'AZURE_OPENAI_DEFAULT_MODEL', description: 'setting a default model' },
|
||||
{ key: 'AZURE_OPENAI_MODELS', description: 'setting models' },
|
||||
{
|
||||
key: 'AZURE_USE_MODEL_AS_DEPLOYMENT_NAME',
|
||||
description: 'using model names as deployment names',
|
||||
},
|
||||
{ key: 'AZURE_API_KEY', description: 'setting a single Azure API key' },
|
||||
{ key: 'AZURE_OPENAI_API_INSTANCE_NAME', description: 'setting a single Azure instance name' },
|
||||
{
|
||||
key: 'AZURE_OPENAI_API_DEPLOYMENT_NAME',
|
||||
description: 'setting a single Azure deployment name',
|
||||
},
|
||||
{ key: 'AZURE_OPENAI_API_VERSION', description: 'setting a single Azure API version' },
|
||||
{
|
||||
key: 'AZURE_OPENAI_API_COMPLETIONS_DEPLOYMENT_NAME',
|
||||
description: 'setting a single Azure completions deployment name',
|
||||
},
|
||||
{
|
||||
key: 'AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME',
|
||||
description: 'setting a single Azure embeddings deployment name',
|
||||
},
|
||||
{
|
||||
key: 'PLUGINS_USE_AZURE',
|
||||
description: 'using Azure for Plugins',
|
||||
},
|
||||
];
|
||||
|
||||
export const conflictingAzureVariables = [
|
||||
{
|
||||
key: 'INSTANCE_NAME',
|
||||
},
|
||||
{
|
||||
key: 'DEPLOYMENT_NAME',
|
||||
},
|
||||
];
|
||||
|
||||
/**
|
||||
* Checks the password reset configuration for security issues.
|
||||
*/
|
||||
function checkPasswordReset() {
|
||||
const emailEnabled = checkEmailConfig();
|
||||
const passwordResetAllowed = isEnabled(process.env.ALLOW_PASSWORD_RESET);
|
||||
|
||||
if (!emailEnabled && passwordResetAllowed) {
|
||||
logger.warn(
|
||||
`❗❗❗
|
||||
|
||||
Password reset is enabled with \`ALLOW_PASSWORD_RESET\` but email service is not configured.
|
||||
|
||||
This setup is insecure as password reset links will be issued with a recognized email.
|
||||
|
||||
Please configure email service for secure password reset functionality.
|
||||
|
||||
https://www.librechat.ai/docs/configuration/authentication/email
|
||||
|
||||
❗❗❗`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks environment variables for default secrets and deprecated variables.
|
||||
* Logs warnings for any default secret values being used and for usage of deprecated variables.
|
||||
* Advises on replacing default secrets and updating deprecated variables.
|
||||
* @param {Object} options
|
||||
* @param {Function} options.isEnabled - Function to check if a feature is enabled
|
||||
* @param {Function} options.checkEmailConfig - Function to check email configuration
|
||||
*/
|
||||
export function checkVariables() {
|
||||
let hasDefaultSecrets = false;
|
||||
for (const [key, value] of Object.entries(secretDefaults)) {
|
||||
if (process.env[key] === value) {
|
||||
logger.warn(`Default value for ${key} is being used.`);
|
||||
if (!hasDefaultSecrets) {
|
||||
hasDefaultSecrets = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasDefaultSecrets) {
|
||||
logger.info('Please replace any default secret values.');
|
||||
logger.info(`\u200B
|
||||
|
||||
For your convenience, use this tool to generate your own secret values:
|
||||
https://www.librechat.ai/toolkit/creds_generator
|
||||
|
||||
\u200B`);
|
||||
}
|
||||
|
||||
deprecatedVariables.forEach(({ key, description }) => {
|
||||
if (process.env[key]) {
|
||||
logger.warn(`The \`${key}\` environment variable is deprecated. ${description}`);
|
||||
}
|
||||
});
|
||||
|
||||
checkPasswordReset();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the health of auxiliary API's by attempting a fetch request to their respective `/health` endpoints.
|
||||
* Logs information or warning based on the API's availability and response.
|
||||
*/
|
||||
export async function checkHealth() {
|
||||
try {
|
||||
const response = await fetch(`${process.env.RAG_API_URL}/health`);
|
||||
if (response?.ok && response?.status === 200) {
|
||||
logger.info(`RAG API is running and reachable at ${process.env.RAG_API_URL}.`);
|
||||
}
|
||||
} catch {
|
||||
logger.warn(
|
||||
`RAG API is either not running or not reachable at ${process.env.RAG_API_URL}, you may experience errors with file uploads.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for the usage of deprecated and conflicting Azure variables.
|
||||
* Logs warnings for any deprecated or conflicting environment variables found, indicating potential issues with `azureOpenAI` endpoint configuration.
|
||||
*/
|
||||
function checkAzureVariables() {
|
||||
deprecatedAzureVariables.forEach(({ key, description }) => {
|
||||
if (process.env[key]) {
|
||||
logger.warn(
|
||||
`The \`${key}\` environment variable (related to ${description}) should not be used in combination with the \`azureOpenAI\` endpoint configuration, as you will experience conflicts and errors.`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
conflictingAzureVariables.forEach(({ key }) => {
|
||||
if (process.env[key]) {
|
||||
logger.warn(
|
||||
`The \`${key}\` environment variable should not be used in combination with the \`azureOpenAI\` endpoint configuration, as you may experience with the defined placeholders for mapping to the current model grouping using the same name.`,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function checkInterfaceConfig(appConfig: AppConfig) {
|
||||
const interfaceConfig = appConfig.interfaceConfig;
|
||||
let i = 0;
|
||||
const logSettings = () => {
|
||||
// log interface object and model specs object (without list) for reference
|
||||
logger.warn(`\`interface\` settings:\n${JSON.stringify(interfaceConfig, null, 2)}`);
|
||||
logger.warn(
|
||||
`\`modelSpecs\` settings:\n${JSON.stringify(
|
||||
{ ...(appConfig?.modelSpecs ?? {}), list: undefined },
|
||||
null,
|
||||
2,
|
||||
)}`,
|
||||
);
|
||||
};
|
||||
|
||||
// warn about config.modelSpecs.prioritize if true and presets are enabled, that default presets will conflict with prioritizing model specs.
|
||||
if (appConfig?.modelSpecs?.prioritize && interfaceConfig?.presets) {
|
||||
logger.warn(
|
||||
"Note: Prioritizing model specs can conflict with default presets if a default preset is set. It's recommended to disable presets from the interface or disable use of a default preset.",
|
||||
);
|
||||
if (i === 0) i++;
|
||||
}
|
||||
|
||||
// warn about config.modelSpecs.enforce if true and if any of these, endpointsMenu, modelSelect, presets, or parameters are enabled, that enforcing model specs can conflict with these options.
|
||||
if (
|
||||
appConfig?.modelSpecs?.enforce &&
|
||||
(interfaceConfig?.endpointsMenu ||
|
||||
interfaceConfig?.modelSelect ||
|
||||
interfaceConfig?.presets ||
|
||||
interfaceConfig?.parameters)
|
||||
) {
|
||||
logger.warn(
|
||||
"Note: Enforcing model specs can conflict with the interface options: endpointsMenu, modelSelect, presets, and parameters. It's recommended to disable these options from the interface or disable enforcing model specs.",
|
||||
);
|
||||
if (i === 0) i++;
|
||||
}
|
||||
// warn if enforce is true and prioritize is not, that enforcing model specs without prioritizing them can lead to unexpected behavior.
|
||||
if (appConfig?.modelSpecs?.enforce && !appConfig?.modelSpecs?.prioritize) {
|
||||
logger.warn(
|
||||
"Note: Enforcing model specs without prioritizing them can lead to unexpected behavior. It's recommended to enable prioritizing model specs if enforcing them.",
|
||||
);
|
||||
if (i === 0) i++;
|
||||
}
|
||||
|
||||
if (i > 0) {
|
||||
logSettings();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs startup checks including environment variable validation and health checks.
|
||||
* This should be called during application startup before initializing services.
|
||||
* @param [appConfig] - The application configuration object.
|
||||
*/
|
||||
export async function performStartupChecks(appConfig?: AppConfig) {
|
||||
checkVariables();
|
||||
if (appConfig?.endpoints?.azureOpenAI) {
|
||||
checkAzureVariables();
|
||||
}
|
||||
if (appConfig) {
|
||||
checkInterfaceConfig(appConfig);
|
||||
}
|
||||
if (appConfig?.config) {
|
||||
checkConfig(appConfig.config);
|
||||
}
|
||||
if (appConfig?.config?.webSearch) {
|
||||
checkWebSearchConfig(appConfig.config.webSearch);
|
||||
}
|
||||
if (appConfig?.config?.rateLimits) {
|
||||
handleRateLimits(appConfig.config.rateLimits);
|
||||
}
|
||||
await checkHealth();
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs basic checks on the loaded config object.
|
||||
* @param config - The loaded custom configuration.
|
||||
*/
|
||||
export function checkConfig(config: Partial<TCustomConfig>) {
|
||||
if (config.version !== Constants.CONFIG_VERSION) {
|
||||
logger.info(
|
||||
`\nOutdated Config version: ${config.version}
|
||||
Latest version: ${Constants.CONFIG_VERSION}
|
||||
|
||||
Check out the Config changelogs for the latest options and features added.
|
||||
|
||||
https://www.librechat.ai/changelog\n\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks web search configuration values to ensure they are environment variable references.
|
||||
* Warns if actual API keys or URLs are used instead of environment variable references.
|
||||
* Logs debug information for properly configured environment variable references.
|
||||
* @param webSearchConfig - The loaded web search configuration object.
|
||||
*/
|
||||
export function checkWebSearchConfig(webSearchConfig?: Partial<TCustomConfig['webSearch']> | null) {
|
||||
if (!webSearchConfig) {
|
||||
return;
|
||||
}
|
||||
|
||||
webSearchKeys.forEach((key) => {
|
||||
const value = webSearchConfig[key as keyof typeof webSearchConfig];
|
||||
|
||||
if (typeof value === 'string') {
|
||||
const varName = extractVariableName(value);
|
||||
|
||||
if (varName) {
|
||||
// This is a proper environment variable reference
|
||||
const actualValue = process.env[varName];
|
||||
if (actualValue) {
|
||||
logger.debug(`Web search ${key}: Using environment variable ${varName} with value set`);
|
||||
} else {
|
||||
logger.debug(
|
||||
`Web search ${key}: Using environment variable ${varName} (not set in environment, user provided value)`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// This is not an environment variable reference - warn user
|
||||
logger.warn(
|
||||
`❗ Web search configuration error: ${key} contains an actual value instead of an environment variable reference.
|
||||
|
||||
Current value: "${value.substring(0, 10)}..."
|
||||
|
||||
This is incorrect! You should use environment variable references in your librechat.yaml file, such as:
|
||||
${key}: "\${YOUR_ENV_VAR_NAME}"
|
||||
|
||||
Then set the actual API key in your .env file or environment variables.
|
||||
|
||||
More info: https://www.librechat.ai/docs/configuration/librechat_yaml/web_search`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
import { getTransactionsConfig, getBalanceConfig } from './config';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { FileSources } from 'librechat-data-provider';
|
||||
import type { AppConfig } from '~/types';
|
||||
import type { TCustomConfig } from 'librechat-data-provider';
|
||||
import type { AppConfig } from '@librechat/data-schemas';
|
||||
|
||||
// Helper function to create a minimal AppConfig for testing
|
||||
const createTestAppConfig = (overrides: Partial<AppConfig> = {}): AppConfig => {
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import { EModelEndpoint, removeNullishValues } from 'librechat-data-provider';
|
||||
import type { TCustomConfig, TEndpoint, TTransactionsConfig } from 'librechat-data-provider';
|
||||
import type { AppConfig } from '~/types';
|
||||
import type { AppConfig } from '@librechat/data-schemas';
|
||||
import { isEnabled, normalizeEndpointName } from '~/utils';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
|
||||
/**
|
||||
* Retrieves the balance configuration object
|
||||
|
|
@ -24,7 +24,7 @@ export function getBalanceConfig(appConfig?: AppConfig): Partial<TCustomConfig['
|
|||
/**
|
||||
* Retrieves the transactions configuration object
|
||||
* */
|
||||
export function getTransactionsConfig(appConfig?: AppConfig): TTransactionsConfig {
|
||||
export function getTransactionsConfig(appConfig?: AppConfig): Partial<TTransactionsConfig> {
|
||||
const defaultConfig: TTransactionsConfig = { enabled: true };
|
||||
|
||||
if (!appConfig) {
|
||||
|
|
@ -66,5 +66,5 @@ export const getCustomEndpointConfig = ({
|
|||
|
||||
export function hasCustomUserVars(appConfig?: AppConfig): boolean {
|
||||
const mcpServers = appConfig?.mcpConfig;
|
||||
return Object.values(mcpServers ?? {}).some((server) => server.customUserVars);
|
||||
return Object.values(mcpServers ?? {}).some((server) => server?.customUserVars);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
export * from './config';
|
||||
export * from './interface';
|
||||
export * from './permissions';
|
||||
export * from './cdn';
|
||||
export * from './checks';
|
||||
|
|
|
|||
|
|
@ -1,108 +0,0 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import { removeNullishValues } from 'librechat-data-provider';
|
||||
import type { TCustomConfig, TConfigDefaults } from 'librechat-data-provider';
|
||||
import type { AppConfig } from '~/types/config';
|
||||
import { isMemoryEnabled } from '~/memory/config';
|
||||
|
||||
/**
|
||||
* Loads the default interface object.
|
||||
* @param params - The loaded custom configuration.
|
||||
* @param params.config - The loaded custom configuration.
|
||||
* @param params.configDefaults - The custom configuration default values.
|
||||
* @returns default interface object.
|
||||
*/
|
||||
export async function loadDefaultInterface({
|
||||
config,
|
||||
configDefaults,
|
||||
}: {
|
||||
config?: Partial<TCustomConfig>;
|
||||
configDefaults: TConfigDefaults;
|
||||
}): Promise<AppConfig['interfaceConfig']> {
|
||||
const { interface: interfaceConfig } = config ?? {};
|
||||
const { interface: defaults } = configDefaults;
|
||||
const hasModelSpecs = (config?.modelSpecs?.list?.length ?? 0) > 0;
|
||||
const includesAddedEndpoints = (config?.modelSpecs?.addedEndpoints?.length ?? 0) > 0;
|
||||
|
||||
const memoryConfig = config?.memory;
|
||||
const memoryEnabled = isMemoryEnabled(memoryConfig);
|
||||
/** Only disable memories if memory config is present but disabled/invalid */
|
||||
const shouldDisableMemories = memoryConfig && !memoryEnabled;
|
||||
|
||||
const loadedInterface: AppConfig['interfaceConfig'] = removeNullishValues({
|
||||
// UI elements - use schema defaults
|
||||
endpointsMenu:
|
||||
interfaceConfig?.endpointsMenu ?? (hasModelSpecs ? false : defaults.endpointsMenu),
|
||||
modelSelect:
|
||||
interfaceConfig?.modelSelect ??
|
||||
(hasModelSpecs ? includesAddedEndpoints : defaults.modelSelect),
|
||||
parameters: interfaceConfig?.parameters ?? (hasModelSpecs ? false : defaults.parameters),
|
||||
presets: interfaceConfig?.presets ?? (hasModelSpecs ? false : defaults.presets),
|
||||
sidePanel: interfaceConfig?.sidePanel ?? defaults.sidePanel,
|
||||
privacyPolicy: interfaceConfig?.privacyPolicy ?? defaults.privacyPolicy,
|
||||
termsOfService: interfaceConfig?.termsOfService ?? defaults.termsOfService,
|
||||
mcpServers: interfaceConfig?.mcpServers ?? defaults.mcpServers,
|
||||
customWelcome: interfaceConfig?.customWelcome ?? defaults.customWelcome,
|
||||
|
||||
// Permissions - only include if explicitly configured
|
||||
bookmarks: interfaceConfig?.bookmarks,
|
||||
memories: shouldDisableMemories ? false : interfaceConfig?.memories,
|
||||
prompts: interfaceConfig?.prompts,
|
||||
multiConvo: interfaceConfig?.multiConvo,
|
||||
agents: interfaceConfig?.agents,
|
||||
temporaryChat: interfaceConfig?.temporaryChat,
|
||||
runCode: interfaceConfig?.runCode,
|
||||
webSearch: interfaceConfig?.webSearch,
|
||||
fileSearch: interfaceConfig?.fileSearch,
|
||||
fileCitations: interfaceConfig?.fileCitations,
|
||||
peoplePicker: interfaceConfig?.peoplePicker,
|
||||
marketplace: interfaceConfig?.marketplace,
|
||||
});
|
||||
|
||||
let i = 0;
|
||||
const logSettings = () => {
|
||||
// log interface object and model specs object (without list) for reference
|
||||
logger.warn(`\`interface\` settings:\n${JSON.stringify(loadedInterface, null, 2)}`);
|
||||
logger.warn(
|
||||
`\`modelSpecs\` settings:\n${JSON.stringify(
|
||||
{ ...(config?.modelSpecs ?? {}), list: undefined },
|
||||
null,
|
||||
2,
|
||||
)}`,
|
||||
);
|
||||
};
|
||||
|
||||
// warn about config.modelSpecs.prioritize if true and presets are enabled, that default presets will conflict with prioritizing model specs.
|
||||
if (config?.modelSpecs?.prioritize && loadedInterface.presets) {
|
||||
logger.warn(
|
||||
"Note: Prioritizing model specs can conflict with default presets if a default preset is set. It's recommended to disable presets from the interface or disable use of a default preset.",
|
||||
);
|
||||
if (i === 0) i++;
|
||||
}
|
||||
|
||||
// warn about config.modelSpecs.enforce if true and if any of these, endpointsMenu, modelSelect, presets, or parameters are enabled, that enforcing model specs can conflict with these options.
|
||||
if (
|
||||
config?.modelSpecs?.enforce &&
|
||||
(loadedInterface.endpointsMenu ||
|
||||
loadedInterface.modelSelect ||
|
||||
loadedInterface.presets ||
|
||||
loadedInterface.parameters)
|
||||
) {
|
||||
logger.warn(
|
||||
"Note: Enforcing model specs can conflict with the interface options: endpointsMenu, modelSelect, presets, and parameters. It's recommended to disable these options from the interface or disable enforcing model specs.",
|
||||
);
|
||||
if (i === 0) i++;
|
||||
}
|
||||
// warn if enforce is true and prioritize is not, that enforcing model specs without prioritizing them can lead to unexpected behavior.
|
||||
if (config?.modelSpecs?.enforce && !config?.modelSpecs?.prioritize) {
|
||||
logger.warn(
|
||||
"Note: Enforcing model specs without prioritizing them can lead to unexpected behavior. It's recommended to enable prioritizing model specs if enforcing them.",
|
||||
);
|
||||
if (i === 0) i++;
|
||||
}
|
||||
|
||||
if (i > 0) {
|
||||
logSettings();
|
||||
}
|
||||
|
||||
return loadedInterface;
|
||||
}
|
||||
55
packages/api/src/app/limits.ts
Normal file
55
packages/api/src/app/limits.ts
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
import { RateLimitPrefix } from 'librechat-data-provider';
|
||||
import type { TCustomConfig } from 'librechat-data-provider';
|
||||
|
||||
/**
|
||||
*
|
||||
* @param rateLimits
|
||||
*/
|
||||
export const handleRateLimits = (rateLimits?: TCustomConfig['rateLimits']) => {
|
||||
if (!rateLimits) {
|
||||
return;
|
||||
}
|
||||
|
||||
const rateLimitKeys = {
|
||||
fileUploads: RateLimitPrefix.FILE_UPLOAD,
|
||||
conversationsImport: RateLimitPrefix.IMPORT,
|
||||
tts: RateLimitPrefix.TTS,
|
||||
stt: RateLimitPrefix.STT,
|
||||
};
|
||||
|
||||
Object.entries(rateLimitKeys).forEach(([key, prefix]) => {
|
||||
const rateLimit = rateLimits[key as keyof typeof rateLimitKeys];
|
||||
if (rateLimit) {
|
||||
setRateLimitEnvVars(prefix, rateLimit);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
type RateLimitConfig = {
|
||||
ipMax?: number | undefined;
|
||||
ipWindowInMinutes?: number | undefined;
|
||||
userMax?: number | undefined;
|
||||
userWindowInMinutes?: number | undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set environment variables for rate limit configurations
|
||||
*
|
||||
* @param prefix - Prefix for environment variable names
|
||||
* @param rateLimit - Rate limit configuration object
|
||||
*/
|
||||
const setRateLimitEnvVars = (prefix: string, rateLimit: RateLimitConfig) => {
|
||||
const envVarsMapping = {
|
||||
ipMax: `${prefix}_IP_MAX`,
|
||||
ipWindowInMinutes: `${prefix}_IP_WINDOW`,
|
||||
userMax: `${prefix}_USER_MAX`,
|
||||
userWindowInMinutes: `${prefix}_USER_WINDOW`,
|
||||
};
|
||||
|
||||
Object.entries(envVarsMapping).forEach(([key, envVar]) => {
|
||||
const value = rateLimit[key as keyof RateLimitConfig];
|
||||
if (value !== undefined) {
|
||||
process.env[envVar] = value.toString();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
import { loadDefaultInterface } from '@librechat/data-schemas';
|
||||
import { SystemRoles, Permissions, PermissionTypes, roleDefaults } from 'librechat-data-provider';
|
||||
import type { TConfigDefaults, TCustomConfig } from 'librechat-data-provider';
|
||||
import type { AppConfig } from '~/types/config';
|
||||
import type { AppConfig } from '@librechat/data-schemas';
|
||||
import { updateInterfacePermissions } from './permissions';
|
||||
import { loadDefaultInterface } from './interface';
|
||||
|
||||
const mockUpdateAccessPermissions = jest.fn();
|
||||
const mockGetRoleByName = jest.fn();
|
||||
|
|
|
|||
|
|
@ -6,8 +6,7 @@ import {
|
|||
PermissionTypes,
|
||||
getConfigDefaults,
|
||||
} from 'librechat-data-provider';
|
||||
import type { IRole } from '@librechat/data-schemas';
|
||||
import type { AppConfig } from '~/types/config';
|
||||
import type { IRole, AppConfig } from '@librechat/data-schemas';
|
||||
import { isMemoryEnabled } from '~/memory/config';
|
||||
|
||||
/**
|
||||
|
|
|
|||
182
packages/api/src/cache/__tests__/cacheConfig.spec.ts
vendored
Normal file
182
packages/api/src/cache/__tests__/cacheConfig.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
describe('cacheConfig', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear all related env vars first
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.REDIS_CA;
|
||||
delete process.env.REDIS_KEY_PREFIX_VAR;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES;
|
||||
|
||||
// Clear module cache
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
describe('REDIS_KEY_PREFIX validation and resolution', () => {
|
||||
test('should throw error when both REDIS_KEY_PREFIX_VAR and REDIS_KEY_PREFIX are set', async () => {
|
||||
process.env.REDIS_KEY_PREFIX_VAR = 'DEPLOYMENT_ID';
|
||||
process.env.REDIS_KEY_PREFIX = 'manual-prefix';
|
||||
|
||||
await expect(async () => {
|
||||
await import('../cacheConfig');
|
||||
}).rejects.toThrow('Only either REDIS_KEY_PREFIX_VAR or REDIS_KEY_PREFIX can be set.');
|
||||
});
|
||||
|
||||
test('should resolve REDIS_KEY_PREFIX from variable reference', async () => {
|
||||
process.env.REDIS_KEY_PREFIX_VAR = 'DEPLOYMENT_ID';
|
||||
process.env.DEPLOYMENT_ID = 'test-deployment-123';
|
||||
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('test-deployment-123');
|
||||
});
|
||||
|
||||
test('should use direct REDIS_KEY_PREFIX value', async () => {
|
||||
process.env.REDIS_KEY_PREFIX = 'direct-prefix';
|
||||
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('direct-prefix');
|
||||
});
|
||||
|
||||
test('should default to empty string when no prefix is configured', async () => {
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('');
|
||||
});
|
||||
|
||||
test('should handle empty variable reference', async () => {
|
||||
process.env.REDIS_KEY_PREFIX_VAR = 'EMPTY_VAR';
|
||||
process.env.EMPTY_VAR = '';
|
||||
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('');
|
||||
});
|
||||
|
||||
test('should handle undefined variable reference', async () => {
|
||||
process.env.REDIS_KEY_PREFIX_VAR = 'UNDEFINED_VAR';
|
||||
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('USE_REDIS and REDIS_URI validation', () => {
|
||||
test('should throw error when USE_REDIS is enabled but REDIS_URI is not set', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
|
||||
await expect(async () => {
|
||||
await import('../cacheConfig');
|
||||
}).rejects.toThrow('USE_REDIS is enabled but REDIS_URI is not set.');
|
||||
});
|
||||
|
||||
test('should not throw error when USE_REDIS is enabled and REDIS_URI is set', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.REDIS_URI = 'redis://localhost:6379';
|
||||
|
||||
const importModule = async () => {
|
||||
await import('../cacheConfig');
|
||||
};
|
||||
await expect(importModule()).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
test('should handle empty REDIS_URI when USE_REDIS is enabled', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.REDIS_URI = '';
|
||||
|
||||
await expect(async () => {
|
||||
await import('../cacheConfig');
|
||||
}).rejects.toThrow('USE_REDIS is enabled but REDIS_URI is not set.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('USE_REDIS_CLUSTER configuration', () => {
|
||||
test('should default to false when USE_REDIS_CLUSTER is not set', async () => {
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(false);
|
||||
});
|
||||
|
||||
test('should be false when USE_REDIS_CLUSTER is set to false', async () => {
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(false);
|
||||
});
|
||||
|
||||
test('should be true when USE_REDIS_CLUSTER is set to true', async () => {
|
||||
process.env.USE_REDIS_CLUSTER = 'true';
|
||||
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(true);
|
||||
});
|
||||
|
||||
test('should work with USE_REDIS enabled and REDIS_URI set', async () => {
|
||||
process.env.USE_REDIS_CLUSTER = 'true';
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.REDIS_URI = 'redis://localhost:6379';
|
||||
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(true);
|
||||
expect(cacheConfig.USE_REDIS).toBe(true);
|
||||
expect(cacheConfig.REDIS_URI).toBe('redis://localhost:6379');
|
||||
});
|
||||
});
|
||||
|
||||
describe('REDIS_CA file reading', () => {
|
||||
test('should be null when REDIS_CA is not set', async () => {
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_CA).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('REDIS_PING_INTERVAL configuration', () => {
|
||||
test('should default to 0 when REDIS_PING_INTERVAL is not set', async () => {
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_PING_INTERVAL).toBe(0);
|
||||
});
|
||||
|
||||
test('should use provided REDIS_PING_INTERVAL value', async () => {
|
||||
process.env.REDIS_PING_INTERVAL = '300';
|
||||
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_PING_INTERVAL).toBe(300);
|
||||
});
|
||||
});
|
||||
|
||||
describe('FORCED_IN_MEMORY_CACHE_NAMESPACES validation', () => {
|
||||
test('should parse comma-separated cache keys correctly', async () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = ' ROLES, MESSAGES ';
|
||||
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual(['ROLES', 'MESSAGES']);
|
||||
});
|
||||
|
||||
test('should throw error for invalid cache keys', async () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = 'INVALID_KEY,ROLES';
|
||||
|
||||
await expect(async () => {
|
||||
await import('../cacheConfig');
|
||||
}).rejects.toThrow('Invalid cache keys in FORCED_IN_MEMORY_CACHE_NAMESPACES: INVALID_KEY');
|
||||
});
|
||||
|
||||
test('should handle empty string gracefully', async () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = '';
|
||||
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle undefined env var gracefully', async () => {
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
113
packages/api/src/cache/__tests__/cacheFactory/limiterCache.integration.spec.ts
vendored
Normal file
113
packages/api/src/cache/__tests__/cacheFactory/limiterCache.integration.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
import type { RedisStore } from 'rate-limit-redis';
|
||||
|
||||
describe('limiterCache', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
let testStore: RedisStore | undefined = undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear cache-related env vars
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
|
||||
// Set test configuration
|
||||
process.env.REDIS_PING_INTERVAL = '0';
|
||||
process.env.REDIS_KEY_PREFIX = 'Cache-Integration-Test';
|
||||
process.env.REDIS_RETRY_MAX_ATTEMPTS = '5';
|
||||
|
||||
// Clear require cache to reload modules
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
test('should throw error when prefix is not provided', async () => {
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
expect(() => cacheFactory.limiterCache('')).toThrow('prefix is required');
|
||||
});
|
||||
|
||||
test('should return undefined when USE_REDIS is false', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
testStore = cacheFactory.limiterCache('test-limiter');
|
||||
|
||||
expect(testStore).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should return RedisStore with sendCommand when USE_REDIS is true', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
testStore = cacheFactory.limiterCache('test-limiter');
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
// Verify it returns a RedisStore instance
|
||||
expect(testStore).toBeDefined();
|
||||
expect(testStore!.constructor.name).toBe('RedisStore');
|
||||
expect(testStore!.prefix).toBe('test-limiter:');
|
||||
expect(typeof testStore!.sendCommand).toBe('function');
|
||||
|
||||
const testKey = 'user:123';
|
||||
|
||||
// SET operation
|
||||
await testStore!.sendCommand('SET', testKey, '1', 'EX', '60');
|
||||
|
||||
// Verify the key was created WITHOUT prefix using ioredis
|
||||
// Note: Using call method since get method seems to have issues in test environment
|
||||
// Type assertion for ioredis call method
|
||||
type RedisClientWithCall = typeof ioredisClient & {
|
||||
call: (command: string, key: string) => Promise<string | null>;
|
||||
};
|
||||
const directValue = await (ioredisClient as RedisClientWithCall).call('GET', testKey);
|
||||
|
||||
expect(directValue).toBe('1');
|
||||
|
||||
// GET operation
|
||||
const value = await testStore!.sendCommand('GET', testKey);
|
||||
expect(value).toBe('1');
|
||||
|
||||
// INCR operation
|
||||
const incremented = await testStore!.sendCommand('INCR', testKey);
|
||||
expect(incremented).toBe(2);
|
||||
|
||||
// Verify increment worked with ioredis
|
||||
const incrementedValue = await (ioredisClient as RedisClientWithCall).call('GET', testKey);
|
||||
expect(incrementedValue).toBe('2');
|
||||
|
||||
// TTL operation
|
||||
const ttl = (await testStore!.sendCommand('TTL', testKey)) as number;
|
||||
expect(ttl).toBeGreaterThan(0);
|
||||
expect(ttl).toBeLessThanOrEqual(60);
|
||||
|
||||
// DEL operation
|
||||
const deleted = await testStore!.sendCommand('DEL', testKey);
|
||||
expect(deleted).toBe(1);
|
||||
|
||||
// Verify deletion
|
||||
const afterDelete = await testStore!.sendCommand('GET', testKey);
|
||||
expect(afterDelete).toBeNull();
|
||||
const directAfterDelete = await ioredisClient!.get(testKey);
|
||||
expect(directAfterDelete).toBeNull();
|
||||
|
||||
// Test error handling
|
||||
await expect(testStore!.sendCommand('INVALID_COMMAND')).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
211
packages/api/src/cache/__tests__/cacheFactory/sessionCache.integration.spec.ts
vendored
Normal file
211
packages/api/src/cache/__tests__/cacheFactory/sessionCache.integration.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
interface SessionData {
|
||||
[key: string]: unknown;
|
||||
cookie?: { maxAge: number };
|
||||
user?: { id: string; name: string };
|
||||
userId?: string;
|
||||
}
|
||||
|
||||
interface SessionStore {
|
||||
prefix?: string;
|
||||
set: (id: string, data: SessionData, callback?: (err?: Error) => void) => void;
|
||||
get: (id: string, callback: (err: Error | null, data?: SessionData | null) => void) => void;
|
||||
destroy: (id: string, callback?: (err?: Error) => void) => void;
|
||||
touch: (id: string, data: SessionData, callback?: (err?: Error) => void) => void;
|
||||
on?: (event: string, handler: (...args: unknown[]) => void) => void;
|
||||
}
|
||||
|
||||
describe('sessionCache', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
// Helper to make session stores async
|
||||
const asyncStore = (store: SessionStore) => ({
|
||||
set: (id: string, data: SessionData) =>
|
||||
new Promise<void>((resolve) => store.set(id, data, () => resolve())),
|
||||
get: (id: string) =>
|
||||
new Promise<SessionData | null | undefined>((resolve) =>
|
||||
store.get(id, (_, data) => resolve(data)),
|
||||
),
|
||||
destroy: (id: string) => new Promise<void>((resolve) => store.destroy(id, () => resolve())),
|
||||
touch: (id: string, data: SessionData) =>
|
||||
new Promise<void>((resolve) => store.touch(id, data, () => resolve())),
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear cache-related env vars
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
|
||||
// Set test configuration
|
||||
process.env.REDIS_PING_INTERVAL = '0';
|
||||
process.env.REDIS_KEY_PREFIX = 'Cache-Integration-Test';
|
||||
process.env.REDIS_RETRY_MAX_ATTEMPTS = '5';
|
||||
|
||||
// Clear require cache to reload modules
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
test('should return ConnectRedis store when USE_REDIS is true', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const store = cacheFactory.sessionCache('test-sessions', 3600);
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
// Verify it returns a ConnectRedis instance
|
||||
expect(store).toBeDefined();
|
||||
expect(store.constructor.name).toBe('RedisStore');
|
||||
expect(store.prefix).toBe('test-sessions:');
|
||||
|
||||
// Test session operations
|
||||
const sessionId = 'sess:123456';
|
||||
const sessionData: SessionData = {
|
||||
user: { id: 'user123', name: 'Test User' },
|
||||
cookie: { maxAge: 3600000 },
|
||||
};
|
||||
|
||||
const async = asyncStore(store);
|
||||
|
||||
// Set session
|
||||
await async.set(sessionId, sessionData);
|
||||
|
||||
// Get session
|
||||
const retrieved = await async.get(sessionId);
|
||||
expect(retrieved).toEqual(sessionData);
|
||||
|
||||
// Touch session (update expiry)
|
||||
await async.touch(sessionId, sessionData);
|
||||
|
||||
// Destroy session
|
||||
await async.destroy(sessionId);
|
||||
|
||||
// Verify deletion
|
||||
const afterDelete = await async.get(sessionId);
|
||||
expect(afterDelete).toBeNull();
|
||||
});
|
||||
|
||||
test('should return MemoryStore when USE_REDIS is false', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const store = cacheFactory.sessionCache('test-sessions', 3600);
|
||||
|
||||
// Verify it returns a MemoryStore instance
|
||||
expect(store).toBeDefined();
|
||||
expect(store.constructor.name).toBe('MemoryStore');
|
||||
|
||||
// Test session operations
|
||||
const sessionId = 'mem:789012';
|
||||
const sessionData: SessionData = {
|
||||
user: { id: 'user456', name: 'Memory User' },
|
||||
cookie: { maxAge: 3600000 },
|
||||
};
|
||||
|
||||
const async = asyncStore(store);
|
||||
|
||||
// Set session
|
||||
await async.set(sessionId, sessionData);
|
||||
|
||||
// Get session
|
||||
const retrieved = await async.get(sessionId);
|
||||
expect(retrieved).toEqual(sessionData);
|
||||
|
||||
// Destroy session
|
||||
await async.destroy(sessionId);
|
||||
|
||||
// Verify deletion
|
||||
const afterDelete = await async.get(sessionId);
|
||||
expect(afterDelete).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should handle namespace with and without trailing colon', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
|
||||
const store1 = cacheFactory.sessionCache('namespace1');
|
||||
const store2 = cacheFactory.sessionCache('namespace2:');
|
||||
|
||||
expect(store1.prefix).toBe('namespace1:');
|
||||
expect(store2.prefix).toBe('namespace2:');
|
||||
});
|
||||
|
||||
test('should register error handler for Redis connection', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
|
||||
// Spy on ioredisClient.on
|
||||
const onSpy = jest.spyOn(ioredisClient!, 'on');
|
||||
|
||||
// Create session store
|
||||
cacheFactory.sessionCache('error-test');
|
||||
|
||||
// Verify error handler was registered
|
||||
expect(onSpy).toHaveBeenCalledWith('error', expect.any(Function));
|
||||
|
||||
onSpy.mockRestore();
|
||||
});
|
||||
|
||||
test('should handle session expiration with TTL', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const ttl = 1; // 1 second TTL
|
||||
const store = cacheFactory.sessionCache('ttl-sessions', ttl);
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
const sessionId = 'ttl:12345';
|
||||
const sessionData: SessionData = { userId: 'ttl-user' };
|
||||
const async = asyncStore(store);
|
||||
|
||||
// Set session with short TTL
|
||||
await async.set(sessionId, sessionData);
|
||||
|
||||
// Verify session exists immediately
|
||||
const immediate = await async.get(sessionId);
|
||||
expect(immediate).toEqual(sessionData);
|
||||
|
||||
// Wait for TTL to expire
|
||||
await new Promise((resolve) => setTimeout(resolve, (ttl + 0.5) * 1000));
|
||||
|
||||
// Verify session has expired
|
||||
const expired = await async.get(sessionId);
|
||||
expect(expired).toBeNull();
|
||||
});
|
||||
});
|
||||
185
packages/api/src/cache/__tests__/cacheFactory/standardCache.integration.spec.ts
vendored
Normal file
185
packages/api/src/cache/__tests__/cacheFactory/standardCache.integration.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
import type { Keyv } from 'keyv';
|
||||
|
||||
// Mock GLOBAL_PREFIX_SEPARATOR
|
||||
jest.mock('../../redisClients', () => {
|
||||
const originalModule = jest.requireActual('../../redisClients');
|
||||
return {
|
||||
...originalModule,
|
||||
GLOBAL_PREFIX_SEPARATOR: '>>',
|
||||
};
|
||||
});
|
||||
|
||||
describe('standardCache', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
let testCache: Keyv | null = null;
|
||||
|
||||
// Helper function to verify Redis keys exist
|
||||
const expectRedisKeysExist = async (expectedKeys: string[]) => {
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
if (!ioredisClient) throw new Error('ioredisClient is null');
|
||||
const allKeys = await ioredisClient.keys('Cache-Integration-Test*');
|
||||
expectedKeys.forEach((expectedKey) => {
|
||||
expect(allKeys).toContain(expectedKey);
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear cache-related env vars
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
delete process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES;
|
||||
|
||||
// Set test configuration
|
||||
process.env.REDIS_PING_INTERVAL = '0';
|
||||
process.env.REDIS_KEY_PREFIX = 'Cache-Integration-Test';
|
||||
process.env.REDIS_RETRY_MAX_ATTEMPTS = '5';
|
||||
|
||||
// Clear require cache to reload modules
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up test keys using prefix and test namespaces
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
if (ioredisClient && ioredisClient.status === 'ready') {
|
||||
try {
|
||||
const patterns = [
|
||||
'Cache-Integration-Test>>*',
|
||||
'Cache-Integration-Test>>test-namespace:*',
|
||||
'Cache-Integration-Test>>another-namespace:*',
|
||||
];
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const keys = await ioredisClient.keys(pattern);
|
||||
if (keys.length > 0) {
|
||||
await ioredisClient.del(...keys);
|
||||
}
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
console.warn('Error cleaning up test keys:', error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up cache instance
|
||||
if (testCache) {
|
||||
try {
|
||||
await testCache.clear();
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
console.warn('Error clearing cache:', error.message);
|
||||
}
|
||||
}
|
||||
testCache = null;
|
||||
}
|
||||
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
describe('when USE_REDIS is false', () => {
|
||||
test('should create in-memory cache', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
testCache = cacheFactory.standardCache('test-namespace');
|
||||
|
||||
expect(testCache).toBeDefined();
|
||||
expect(testCache.constructor.name).toBe('Keyv');
|
||||
});
|
||||
|
||||
test('should use fallback store when provided', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
const fallbackStore = new Map();
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
testCache = cacheFactory.standardCache('test-namespace', 200, fallbackStore);
|
||||
|
||||
expect(testCache).toBeDefined();
|
||||
// Type assertion to access internal options
|
||||
const cacheWithOpts = testCache as Keyv & {
|
||||
opts: { store: unknown; namespace: string; ttl: number };
|
||||
};
|
||||
expect(cacheWithOpts.opts.store).toBe(fallbackStore);
|
||||
expect(cacheWithOpts.opts.namespace).toBe('test-namespace');
|
||||
expect(cacheWithOpts.opts.ttl).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when connecting to a Redis server', () => {
|
||||
test('should handle different namespaces with correct prefixes', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
|
||||
const cache1 = cacheFactory.standardCache('namespace-one');
|
||||
const cache2 = cacheFactory.standardCache('namespace-two');
|
||||
|
||||
await cache1.set('key1', 'value1');
|
||||
await cache2.set('key2', 'value2');
|
||||
|
||||
// Verify both caches work independently
|
||||
expect(await cache1.get('key1')).toBe('value1');
|
||||
expect(await cache2.get('key2')).toBe('value2');
|
||||
expect(await cache1.get('key2')).toBeUndefined();
|
||||
expect(await cache2.get('key1')).toBeUndefined();
|
||||
|
||||
// Verify Redis keys have correct prefixes for different namespaces
|
||||
await expectRedisKeysExist([
|
||||
'Cache-Integration-Test>>namespace-one:key1',
|
||||
'Cache-Integration-Test>>namespace-two:key2',
|
||||
]);
|
||||
|
||||
await cache1.clear();
|
||||
await cache2.clear();
|
||||
});
|
||||
|
||||
test('should respect FORCED_IN_MEMORY_CACHE_NAMESPACES', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = 'ROLES'; // Use a valid cache key
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
|
||||
// This should create an in-memory cache despite USE_REDIS being true
|
||||
testCache = cacheFactory.standardCache('ROLES', 5000);
|
||||
|
||||
expect(testCache).toBeDefined();
|
||||
expect(testCache.constructor.name).toBe('Keyv');
|
||||
// Type assertion to access internal options
|
||||
const cacheWithOpts = testCache as Keyv & { opts: { namespace: string; ttl: number } };
|
||||
expect(cacheWithOpts.opts.namespace).toBe('ROLES');
|
||||
expect(cacheWithOpts.opts.ttl).toBe(5000);
|
||||
});
|
||||
|
||||
test('should handle TTL correctly', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
testCache = cacheFactory.standardCache('ttl-test', 1000); // 1 second TTL
|
||||
|
||||
const testKey = 'ttl-key';
|
||||
const testValue = 'ttl-value';
|
||||
|
||||
await testCache.set(testKey, testValue);
|
||||
expect(await testCache.get(testKey)).toBe(testValue);
|
||||
|
||||
// Wait for TTL to expire
|
||||
await new Promise((resolve) => setTimeout(resolve, 1100));
|
||||
expect(await testCache.get(testKey)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
241
packages/api/src/cache/__tests__/cacheFactory/violationCache.integration.spec.ts
vendored
Normal file
241
packages/api/src/cache/__tests__/cacheFactory/violationCache.integration.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,241 @@
|
|||
interface ViolationData {
|
||||
count?: number;
|
||||
timestamp?: number;
|
||||
namespace?: number;
|
||||
data?: string;
|
||||
userId?: string;
|
||||
violations?: Array<{
|
||||
type: string;
|
||||
timestamp: number;
|
||||
severity: string;
|
||||
}>;
|
||||
metadata?: {
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
nested: {
|
||||
deep: {
|
||||
value: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
describe('violationCache', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear cache-related env vars
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
|
||||
// Set test configuration
|
||||
process.env.REDIS_PING_INTERVAL = '0';
|
||||
process.env.REDIS_KEY_PREFIX = 'Cache-Integration-Test';
|
||||
process.env.REDIS_RETRY_MAX_ATTEMPTS = '5';
|
||||
|
||||
// Clear require cache to reload modules
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
test('should create violation cache with Redis when USE_REDIS is true', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const cache = cacheFactory.violationCache('test-violations', 60000); // 60 second TTL
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
// Verify it returns a Keyv instance
|
||||
expect(cache).toBeDefined();
|
||||
expect(cache.constructor.name).toBe('Keyv');
|
||||
|
||||
// Test basic cache operations
|
||||
const testKey = 'user:456:violation';
|
||||
const testValue: ViolationData = { count: 1, timestamp: Date.now() };
|
||||
|
||||
// SET operation
|
||||
await cache.set(testKey, testValue);
|
||||
|
||||
// GET operation
|
||||
const retrievedValue = await cache.get(testKey);
|
||||
expect(retrievedValue).toEqual(testValue);
|
||||
|
||||
// DELETE operation
|
||||
const deleted = await cache.delete(testKey);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
// Verify deletion
|
||||
const afterDelete = await cache.get(testKey);
|
||||
expect(afterDelete).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should use fallback store when USE_REDIS is false', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const cache = cacheFactory.violationCache('test-violations');
|
||||
|
||||
// Verify it returns a Keyv instance
|
||||
expect(cache).toBeDefined();
|
||||
expect(cache.constructor.name).toBe('Keyv');
|
||||
|
||||
// Test basic operations with fallback store
|
||||
const testKey = 'user:789:violation';
|
||||
const testValue: ViolationData = { count: 2, timestamp: Date.now() };
|
||||
|
||||
// SET operation
|
||||
await cache.set(testKey, testValue);
|
||||
|
||||
// GET operation
|
||||
const retrievedValue = await cache.get(testKey);
|
||||
expect(retrievedValue).toEqual(testValue);
|
||||
|
||||
// DELETE operation
|
||||
const deleted = await cache.delete(testKey);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
// Verify deletion
|
||||
const afterDelete = await cache.get(testKey);
|
||||
expect(afterDelete).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should respect namespace prefixing', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const cache1 = cacheFactory.violationCache('namespace1');
|
||||
const cache2 = cacheFactory.violationCache('namespace2');
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
const testKey = 'shared-key';
|
||||
const value1: ViolationData = { namespace: 1 };
|
||||
const value2: ViolationData = { namespace: 2 };
|
||||
|
||||
// Set same key in different namespaces
|
||||
await cache1.set(testKey, value1);
|
||||
await cache2.set(testKey, value2);
|
||||
|
||||
// Verify namespace isolation
|
||||
const retrieved1 = await cache1.get(testKey);
|
||||
const retrieved2 = await cache2.get(testKey);
|
||||
|
||||
expect(retrieved1).toEqual(value1);
|
||||
expect(retrieved2).toEqual(value2);
|
||||
|
||||
// Clean up
|
||||
await cache1.delete(testKey);
|
||||
await cache2.delete(testKey);
|
||||
});
|
||||
|
||||
test('should respect TTL settings', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const ttl = 1000; // 1 second TTL
|
||||
const cache = cacheFactory.violationCache('ttl-test', ttl);
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
const testKey = 'ttl-key';
|
||||
const testValue: ViolationData = { data: 'expires soon' };
|
||||
|
||||
// Set value with TTL
|
||||
await cache.set(testKey, testValue);
|
||||
|
||||
// Verify value exists immediately
|
||||
const immediate = await cache.get(testKey);
|
||||
expect(immediate).toEqual(testValue);
|
||||
|
||||
// Wait for TTL to expire
|
||||
await new Promise((resolve) => setTimeout(resolve, ttl + 100));
|
||||
|
||||
// Verify value has expired
|
||||
const expired = await cache.get(testKey);
|
||||
expect(expired).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should handle complex violation data structures', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const cache = cacheFactory.violationCache('complex-violations');
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
const complexData: ViolationData = {
|
||||
userId: 'user123',
|
||||
violations: [
|
||||
{ type: 'rate_limit', timestamp: Date.now(), severity: 'warning' },
|
||||
{ type: 'spam', timestamp: Date.now() - 1000, severity: 'critical' },
|
||||
],
|
||||
metadata: {
|
||||
ip: '192.168.1.1',
|
||||
userAgent: 'Mozilla/5.0',
|
||||
nested: {
|
||||
deep: {
|
||||
value: 'test',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const key = 'complex-violation-data';
|
||||
|
||||
// Store complex data
|
||||
await cache.set(key, complexData);
|
||||
|
||||
// Retrieve and verify
|
||||
const retrieved = await cache.get(key);
|
||||
expect(retrieved).toEqual(complexData);
|
||||
|
||||
// Clean up
|
||||
await cache.delete(key);
|
||||
});
|
||||
});
|
||||
168
packages/api/src/cache/__tests__/redisClients.integration.spec.ts
vendored
Normal file
168
packages/api/src/cache/__tests__/redisClients.integration.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
import type { Redis, Cluster } from 'ioredis';
|
||||
import type { RedisClientType, RedisClusterType } from '@redis/client';
|
||||
|
||||
type RedisClient = RedisClientType | RedisClusterType | Redis | Cluster;
|
||||
|
||||
describe('redisClients Integration Tests', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
let ioredisClient: Redis | Cluster | null = null;
|
||||
let keyvRedisClient: RedisClientType | RedisClusterType | null = null;
|
||||
|
||||
// Helper function to test set/get/delete operations
|
||||
const testRedisOperations = async (client: RedisClient, keyPrefix: string): Promise<void> => {
|
||||
// Wait cluster to fully initialize
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
|
||||
const testKey = `${keyPrefix}-test-key`;
|
||||
const testValue = `${keyPrefix}-test-value`;
|
||||
|
||||
// Test set operation
|
||||
await client.set(testKey, testValue);
|
||||
|
||||
// Test get operation
|
||||
const result = await client.get(testKey);
|
||||
expect(result).toBe(testValue);
|
||||
|
||||
// Test delete operation
|
||||
const deleteResult = await client.del(testKey);
|
||||
expect(deleteResult).toBe(1);
|
||||
|
||||
// Verify key is deleted
|
||||
const deletedResult = await client.get(testKey);
|
||||
expect(deletedResult).toBeNull();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear Redis-related env vars
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
|
||||
// Set common test configuration
|
||||
process.env.REDIS_PING_INTERVAL = '0';
|
||||
process.env.REDIS_KEY_PREFIX = 'Redis-Integration-Test';
|
||||
process.env.REDIS_RETRY_MAX_ATTEMPTS = '5';
|
||||
process.env.REDIS_PING_INTERVAL = '1000';
|
||||
|
||||
// Clear module cache to reload module
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up test keys using the prefix
|
||||
if (ioredisClient && ioredisClient.status === 'ready') {
|
||||
try {
|
||||
const keys = await ioredisClient.keys('Redis-Integration-Test::*');
|
||||
if (keys.length > 0) {
|
||||
await ioredisClient.del(...keys);
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.warn('Error cleaning up test keys:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup Redis connections
|
||||
if (ioredisClient) {
|
||||
try {
|
||||
if (ioredisClient.status === 'ready') {
|
||||
ioredisClient.disconnect();
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.warn('Error disconnecting ioredis client:', error.message);
|
||||
}
|
||||
ioredisClient = null;
|
||||
}
|
||||
|
||||
if (keyvRedisClient) {
|
||||
try {
|
||||
// Try to disconnect - keyv/redis client doesn't have an isReady property
|
||||
await keyvRedisClient.disconnect();
|
||||
} catch (error: any) {
|
||||
console.warn('Error disconnecting keyv redis client:', error.message);
|
||||
}
|
||||
keyvRedisClient = null;
|
||||
}
|
||||
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
describe('ioredis Client Tests', () => {
|
||||
describe('when USE_REDIS is false', () => {
|
||||
test('should have null client', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
ioredisClient = clients.ioredisClient;
|
||||
|
||||
expect(ioredisClient).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when connecting to a Redis instance', () => {
|
||||
test('should connect and perform set/get/delete operations', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
ioredisClient = clients.ioredisClient;
|
||||
await testRedisOperations(ioredisClient!, 'ioredis-single');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when connecting to a Redis cluster', () => {
|
||||
test('should connect to cluster and perform set/get/delete operations', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'true';
|
||||
process.env.REDIS_URI =
|
||||
'redis://127.0.0.1:7001,redis://127.0.0.1:7002,redis://127.0.0.1:7003';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
ioredisClient = clients.ioredisClient;
|
||||
await testRedisOperations(ioredisClient!, 'ioredis-cluster');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('keyvRedisClient Tests', () => {
|
||||
describe('when USE_REDIS is false', () => {
|
||||
test('should have null client', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
keyvRedisClient = clients.keyvRedisClient;
|
||||
expect(keyvRedisClient).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when connecting to a Redis instance', () => {
|
||||
test('should connect and perform set/get/delete operations', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
keyvRedisClient = clients.keyvRedisClient;
|
||||
await testRedisOperations(keyvRedisClient!, 'keyv-single');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when connecting to a Redis cluster', () => {
|
||||
test('should connect to cluster and perform set/get/delete operations', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'true';
|
||||
process.env.REDIS_URI =
|
||||
'redis://127.0.0.1:7001,redis://127.0.0.1:7002,redis://127.0.0.1:7003';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
keyvRedisClient = clients.keyvRedisClient;
|
||||
await testRedisOperations(keyvRedisClient!, 'keyv-cluster');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
89
packages/api/src/cache/cacheConfig.ts
vendored
Normal file
89
packages/api/src/cache/cacheConfig.ts
vendored
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
import { readFileSync, existsSync } from 'fs';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { CacheKeys } from 'librechat-data-provider';
|
||||
import { math, isEnabled } from '~/utils';
|
||||
|
||||
// To ensure that different deployments do not interfere with each other's cache, we use a prefix for the Redis keys.
|
||||
// This prefix is usually the deployment ID, which is often passed to the container or pod as an env var.
|
||||
// Set REDIS_KEY_PREFIX_VAR to the env var that contains the deployment ID.
|
||||
const REDIS_KEY_PREFIX_VAR = process.env.REDIS_KEY_PREFIX_VAR;
|
||||
const REDIS_KEY_PREFIX = process.env.REDIS_KEY_PREFIX;
|
||||
if (REDIS_KEY_PREFIX_VAR && REDIS_KEY_PREFIX) {
|
||||
throw new Error('Only either REDIS_KEY_PREFIX_VAR or REDIS_KEY_PREFIX can be set.');
|
||||
}
|
||||
|
||||
const USE_REDIS = isEnabled(process.env.USE_REDIS);
|
||||
if (USE_REDIS && !process.env.REDIS_URI) {
|
||||
throw new Error('USE_REDIS is enabled but REDIS_URI is not set.');
|
||||
}
|
||||
|
||||
// Comma-separated list of cache namespaces that should be forced to use in-memory storage
|
||||
// even when Redis is enabled. This allows selective performance optimization for specific caches.
|
||||
const FORCED_IN_MEMORY_CACHE_NAMESPACES = process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES
|
||||
? process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES.split(',').map((key) => key.trim())
|
||||
: [];
|
||||
|
||||
// Validate against CacheKeys enum
|
||||
if (FORCED_IN_MEMORY_CACHE_NAMESPACES.length > 0) {
|
||||
const validKeys = Object.values(CacheKeys) as string[];
|
||||
const invalidKeys = FORCED_IN_MEMORY_CACHE_NAMESPACES.filter((key) => !validKeys.includes(key));
|
||||
|
||||
if (invalidKeys.length > 0) {
|
||||
throw new Error(
|
||||
`Invalid cache keys in FORCED_IN_MEMORY_CACHE_NAMESPACES: ${invalidKeys.join(', ')}. Valid keys: ${validKeys.join(', ')}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/** Helper function to safely read Redis CA certificate from file
|
||||
* @returns {string|null} The contents of the CA certificate file, or null if not set or on error
|
||||
*/
|
||||
const getRedisCA = (): string | null => {
|
||||
const caPath = process.env.REDIS_CA;
|
||||
if (!caPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
if (existsSync(caPath)) {
|
||||
return readFileSync(caPath, 'utf8');
|
||||
} else {
|
||||
logger.warn(`Redis CA certificate file not found: ${caPath}`);
|
||||
return null;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to read Redis CA certificate file '${caPath}':`, error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const cacheConfig = {
|
||||
FORCED_IN_MEMORY_CACHE_NAMESPACES,
|
||||
USE_REDIS,
|
||||
REDIS_URI: process.env.REDIS_URI,
|
||||
REDIS_USERNAME: process.env.REDIS_USERNAME,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
REDIS_CA: getRedisCA(),
|
||||
REDIS_KEY_PREFIX: process.env[REDIS_KEY_PREFIX_VAR ?? ''] || REDIS_KEY_PREFIX || '',
|
||||
REDIS_MAX_LISTENERS: math(process.env.REDIS_MAX_LISTENERS, 40),
|
||||
REDIS_PING_INTERVAL: math(process.env.REDIS_PING_INTERVAL, 0),
|
||||
/** Max delay between reconnection attempts in ms */
|
||||
REDIS_RETRY_MAX_DELAY: math(process.env.REDIS_RETRY_MAX_DELAY, 3000),
|
||||
/** Max number of reconnection attempts (0 = infinite) */
|
||||
REDIS_RETRY_MAX_ATTEMPTS: math(process.env.REDIS_RETRY_MAX_ATTEMPTS, 10),
|
||||
/** Connection timeout in ms */
|
||||
REDIS_CONNECT_TIMEOUT: math(process.env.REDIS_CONNECT_TIMEOUT, 10000),
|
||||
/** Queue commands when disconnected */
|
||||
REDIS_ENABLE_OFFLINE_QUEUE: isEnabled(process.env.REDIS_ENABLE_OFFLINE_QUEUE ?? 'true'),
|
||||
/** flag to modify redis connection by adding dnsLookup this is required when connecting to elasticache for ioredis
|
||||
* see "Special Note: Aws Elasticache Clusters with TLS" on this webpage: https://www.npmjs.com/package/ioredis **/
|
||||
REDIS_USE_ALTERNATIVE_DNS_LOOKUP: isEnabled(process.env.REDIS_USE_ALTERNATIVE_DNS_LOOKUP),
|
||||
/** Enable redis cluster without the need of multiple URIs */
|
||||
USE_REDIS_CLUSTER: isEnabled(process.env.USE_REDIS_CLUSTER ?? 'false'),
|
||||
CI: isEnabled(process.env.CI),
|
||||
DEBUG_MEMORY_CACHE: isEnabled(process.env.DEBUG_MEMORY_CACHE),
|
||||
|
||||
BAN_DURATION: math(process.env.BAN_DURATION, 7200000), // 2 hours
|
||||
};
|
||||
|
||||
export { cacheConfig };
|
||||
116
packages/api/src/cache/cacheFactory.ts
vendored
Normal file
116
packages/api/src/cache/cacheFactory.ts
vendored
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
/**
|
||||
* @keyv/redis exports its default class in a non-standard way:
|
||||
* module.exports = { default: KeyvRedis, ... } instead of module.exports = KeyvRedis
|
||||
* This breaks ES6 imports when the module is marked as external in rollup.
|
||||
* We must use require() to access the .default property directly.
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const KeyvRedis = require('@keyv/redis').default as typeof import('@keyv/redis').default;
|
||||
import { Keyv } from 'keyv';
|
||||
import createMemoryStore from 'memorystore';
|
||||
import { RedisStore } from 'rate-limit-redis';
|
||||
import { Time } from 'librechat-data-provider';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import session, { MemoryStore } from 'express-session';
|
||||
import { RedisStore as ConnectRedis } from 'connect-redis';
|
||||
import type { SendCommandFn } from 'rate-limit-redis';
|
||||
import { keyvRedisClient, ioredisClient, GLOBAL_PREFIX_SEPARATOR } from './redisClients';
|
||||
import { cacheConfig } from './cacheConfig';
|
||||
import { violationFile } from './keyvFiles';
|
||||
|
||||
/**
|
||||
* Creates a cache instance using Redis or a fallback store. Suitable for general caching needs.
|
||||
* @param namespace - The cache namespace.
|
||||
* @param ttl - Time to live for cache entries.
|
||||
* @param fallbackStore - Optional fallback store if Redis is not used.
|
||||
* @returns Cache instance.
|
||||
*/
|
||||
export const standardCache = (namespace: string, ttl?: number, fallbackStore?: object): Keyv => {
|
||||
if (keyvRedisClient && !cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES?.includes(namespace)) {
|
||||
try {
|
||||
const keyvRedis = new KeyvRedis(keyvRedisClient);
|
||||
const cache = new Keyv(keyvRedis, { namespace, ttl });
|
||||
keyvRedis.namespace = cacheConfig.REDIS_KEY_PREFIX;
|
||||
keyvRedis.keyPrefixSeparator = GLOBAL_PREFIX_SEPARATOR;
|
||||
|
||||
cache.on('error', (err) => {
|
||||
logger.error(`Cache error in namespace ${namespace}:`, err);
|
||||
});
|
||||
|
||||
return cache;
|
||||
} catch (err) {
|
||||
logger.error(`Failed to create Redis cache for namespace ${namespace}:`, err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (fallbackStore) {
|
||||
return new Keyv({ store: fallbackStore, namespace, ttl });
|
||||
}
|
||||
return new Keyv({ namespace, ttl });
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a cache instance for storing violation data.
|
||||
* Uses a file-based fallback store if Redis is not enabled.
|
||||
* @param namespace - The cache namespace for violations.
|
||||
* @param ttl - Time to live for cache entries.
|
||||
* @returns Cache instance for violations.
|
||||
*/
|
||||
export const violationCache = (namespace: string, ttl?: number): Keyv => {
|
||||
return standardCache(`violations:${namespace}`, ttl, violationFile);
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a session cache instance using Redis or in-memory store.
|
||||
* @param namespace - The session namespace.
|
||||
* @param ttl - Time to live for session entries.
|
||||
* @returns Session store instance.
|
||||
*/
|
||||
export const sessionCache = (namespace: string, ttl?: number): MemoryStore | ConnectRedis => {
|
||||
namespace = namespace.endsWith(':') ? namespace : `${namespace}:`;
|
||||
if (!cacheConfig.USE_REDIS) {
|
||||
const MemoryStore = createMemoryStore(session);
|
||||
return new MemoryStore({ ttl, checkPeriod: Time.ONE_DAY });
|
||||
}
|
||||
const store = new ConnectRedis({ client: ioredisClient, ttl, prefix: namespace });
|
||||
if (ioredisClient) {
|
||||
ioredisClient.on('error', (err) => {
|
||||
logger.error(`Session store Redis error for namespace ${namespace}:`, err);
|
||||
});
|
||||
}
|
||||
return store;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a rate limiter cache using Redis.
|
||||
* @param prefix - The key prefix for rate limiting.
|
||||
* @returns RedisStore instance or undefined if Redis is not used.
|
||||
*/
|
||||
export const limiterCache = (prefix: string): RedisStore | undefined => {
|
||||
if (!prefix) {
|
||||
throw new Error('prefix is required');
|
||||
}
|
||||
if (!cacheConfig.USE_REDIS) {
|
||||
return undefined;
|
||||
}
|
||||
// TODO: The prefix is not actually applied. Also needs to account for global prefix.
|
||||
prefix = prefix.endsWith(':') ? prefix : `${prefix}:`;
|
||||
|
||||
try {
|
||||
const sendCommand: SendCommandFn = (async (...args: string[]) => {
|
||||
if (ioredisClient == null) {
|
||||
throw new Error('Redis client not available');
|
||||
}
|
||||
try {
|
||||
return await ioredisClient.call(args[0], ...args.slice(1));
|
||||
} catch (err) {
|
||||
logger.error('Redis command execution failed:', err);
|
||||
throw err;
|
||||
}
|
||||
}) as SendCommandFn;
|
||||
return new RedisStore({ sendCommand, prefix });
|
||||
} catch (err) {
|
||||
logger.error(`Failed to create Redis rate limiter for prefix ${prefix}:`, err);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
5
packages/api/src/cache/index.ts
vendored
Normal file
5
packages/api/src/cache/index.ts
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
export * from './cacheConfig';
|
||||
export * from './redisClients';
|
||||
export * from './keyvFiles';
|
||||
export { default as keyvMongo } from './keyvMongo';
|
||||
export * from './cacheFactory';
|
||||
6
packages/api/src/cache/keyvFiles.ts
vendored
Normal file
6
packages/api/src/cache/keyvFiles.ts
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
import { KeyvFile } from 'keyv-file';
|
||||
|
||||
export const logFile = new KeyvFile({ filename: './data/logs.json' }).setMaxListeners(20);
|
||||
export const violationFile = new KeyvFile({ filename: './data/violations.json' }).setMaxListeners(
|
||||
20,
|
||||
);
|
||||
280
packages/api/src/cache/keyvMongo.ts
vendored
Normal file
280
packages/api/src/cache/keyvMongo.ts
vendored
Normal file
|
|
@ -0,0 +1,280 @@
|
|||
import mongoose from 'mongoose';
|
||||
import { EventEmitter } from 'events';
|
||||
import { GridFSBucket } from 'mongodb';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import type { Db, ReadPreference, Collection } from 'mongodb';
|
||||
|
||||
interface KeyvMongoOptions {
|
||||
url?: string;
|
||||
collection?: string;
|
||||
useGridFS?: boolean;
|
||||
readPreference?: ReadPreference;
|
||||
}
|
||||
|
||||
interface GridFSClient {
|
||||
bucket: GridFSBucket;
|
||||
store: Collection;
|
||||
db: Db;
|
||||
}
|
||||
|
||||
interface CollectionClient {
|
||||
store: Collection;
|
||||
db: Db;
|
||||
}
|
||||
|
||||
type Client = GridFSClient | CollectionClient;
|
||||
|
||||
const storeMap = new Map<string, Client>();
|
||||
|
||||
class KeyvMongoCustom extends EventEmitter {
|
||||
private opts: KeyvMongoOptions;
|
||||
public ttlSupport: boolean;
|
||||
public namespace?: string;
|
||||
|
||||
constructor(options: KeyvMongoOptions = {}) {
|
||||
super();
|
||||
|
||||
this.opts = {
|
||||
url: 'mongodb://127.0.0.1:27017',
|
||||
collection: 'keyv',
|
||||
...options,
|
||||
};
|
||||
|
||||
this.ttlSupport = false;
|
||||
}
|
||||
|
||||
// Helper to access the store WITHOUT storing a promise on the instance
|
||||
private async _getClient(): Promise<Client> {
|
||||
const storeKey = `${this.opts.collection}:${this.opts.useGridFS ? 'gridfs' : 'collection'}`;
|
||||
|
||||
// If we already have the store initialized, return it directly
|
||||
if (storeMap.has(storeKey)) {
|
||||
return storeMap.get(storeKey)!;
|
||||
}
|
||||
|
||||
// Check mongoose connection state
|
||||
if (mongoose.connection.readyState !== 1) {
|
||||
throw new Error('Mongoose connection not ready. Ensure connectDb() is called first.');
|
||||
}
|
||||
|
||||
try {
|
||||
const db = mongoose.connection.db as unknown as Db | undefined;
|
||||
if (!db) {
|
||||
throw new Error('MongoDB database not available');
|
||||
}
|
||||
|
||||
let client: Client;
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
const bucket = new GridFSBucket(db, {
|
||||
readPreference: this.opts.readPreference,
|
||||
bucketName: this.opts.collection,
|
||||
});
|
||||
const store = db.collection(`${this.opts.collection}.files`);
|
||||
client = { bucket, store, db };
|
||||
} else {
|
||||
const collection = this.opts.collection || 'keyv';
|
||||
const store = db.collection(collection);
|
||||
client = { store, db };
|
||||
}
|
||||
|
||||
storeMap.set(storeKey, client);
|
||||
return client;
|
||||
} catch (error) {
|
||||
this.emit('error', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async get(key: string): Promise<unknown> {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS && this.isGridFSClient(client)) {
|
||||
await client.store.updateOne(
|
||||
{
|
||||
filename: key,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
'metadata.lastAccessed': new Date(),
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const stream = client.bucket.openDownloadStreamByName(key);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const resp: Uint8Array[] = [];
|
||||
stream.on('error', () => {
|
||||
resolve(undefined);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
const data = Buffer.concat(resp).toString('utf8');
|
||||
resolve(data);
|
||||
});
|
||||
|
||||
stream.on('data', (chunk: Uint8Array) => {
|
||||
resp.push(chunk);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const document = await client.store.findOne({ key: { $eq: key } });
|
||||
|
||||
if (!document) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return document.value;
|
||||
}
|
||||
|
||||
async getMany(keys: string[]): Promise<unknown[]> {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
const promises = [];
|
||||
for (const key of keys) {
|
||||
promises.push(this.get(key));
|
||||
}
|
||||
|
||||
const values = await Promise.allSettled(promises);
|
||||
const data: unknown[] = [];
|
||||
for (const value of values) {
|
||||
data.push(value.status === 'fulfilled' ? value.value : undefined);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
const values = await client.store
|
||||
.find({ key: { $in: keys } })
|
||||
.project({ _id: 0, value: 1, key: 1 })
|
||||
.toArray();
|
||||
|
||||
const results: unknown[] = [...keys];
|
||||
let i = 0;
|
||||
for (const key of keys) {
|
||||
const rowIndex = values.findIndex((row) => row.key === key);
|
||||
results[i] = rowIndex > -1 ? values[rowIndex].value : undefined;
|
||||
i++;
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async set(key: string, value: string, ttl?: number): Promise<unknown> {
|
||||
const client = await this._getClient();
|
||||
const expiresAt = typeof ttl === 'number' ? new Date(Date.now() + ttl) : null;
|
||||
|
||||
if (this.opts.useGridFS && this.isGridFSClient(client)) {
|
||||
const stream = client.bucket.openUploadStream(key, {
|
||||
metadata: {
|
||||
expiresAt,
|
||||
lastAccessed: new Date(),
|
||||
},
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
stream.on('finish', () => {
|
||||
resolve(stream);
|
||||
});
|
||||
stream.end(value);
|
||||
});
|
||||
}
|
||||
|
||||
await client.store.updateOne(
|
||||
{ key: { $eq: key } },
|
||||
{ $set: { key, value, expiresAt } },
|
||||
{ upsert: true },
|
||||
);
|
||||
}
|
||||
|
||||
async delete(key: string): Promise<boolean> {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS && this.isGridFSClient(client)) {
|
||||
try {
|
||||
const bucket = new GridFSBucket(client.db, {
|
||||
bucketName: this.opts.collection,
|
||||
});
|
||||
const files = await bucket.find({ filename: key }).toArray();
|
||||
if (files.length > 0) {
|
||||
await client.bucket.delete(files[0]._id);
|
||||
}
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const object = await client.store.deleteOne({ key: { $eq: key } });
|
||||
return object.deletedCount > 0;
|
||||
}
|
||||
|
||||
async deleteMany(keys: string[]): Promise<boolean> {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS && this.isGridFSClient(client)) {
|
||||
const bucket = new GridFSBucket(client.db, {
|
||||
bucketName: this.opts.collection,
|
||||
});
|
||||
const files = await bucket.find({ filename: { $in: keys } }).toArray();
|
||||
if (files.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
await Promise.all(files.map(async (file) => client.bucket.delete(file._id)));
|
||||
return true;
|
||||
}
|
||||
|
||||
const object = await client.store.deleteMany({ key: { $in: keys } });
|
||||
return object.deletedCount > 0;
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS && this.isGridFSClient(client)) {
|
||||
try {
|
||||
await client.bucket.drop();
|
||||
} catch (error: unknown) {
|
||||
// Throw error if not "namespace not found" error
|
||||
const errorCode =
|
||||
error instanceof Error && 'code' in error ? (error as { code?: number }).code : undefined;
|
||||
if (errorCode !== 26) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await client.store.deleteMany({
|
||||
key: { $regex: this.namespace ? `^${this.namespace}:*` : '' },
|
||||
});
|
||||
}
|
||||
|
||||
async has(key: string): Promise<boolean> {
|
||||
const client = await this._getClient();
|
||||
const filter = { [this.opts.useGridFS ? 'filename' : 'key']: { $eq: key } };
|
||||
const document = await client.store.countDocuments(filter, { limit: 1 });
|
||||
return document !== 0;
|
||||
}
|
||||
|
||||
// No-op disconnect
|
||||
async disconnect(): Promise<boolean> {
|
||||
// This is a no-op since we don't want to close the shared mongoose connection
|
||||
return true;
|
||||
}
|
||||
|
||||
private isGridFSClient(client: Client): client is GridFSClient {
|
||||
return (client as GridFSClient).bucket != null;
|
||||
}
|
||||
}
|
||||
|
||||
const keyvMongo = new KeyvMongoCustom({
|
||||
collection: 'logs',
|
||||
});
|
||||
|
||||
keyvMongo.on('error', (err) => logger.error('KeyvMongo connection error:', err));
|
||||
|
||||
export default keyvMongo;
|
||||
195
packages/api/src/cache/redisClients.ts
vendored
Normal file
195
packages/api/src/cache/redisClients.ts
vendored
Normal file
|
|
@ -0,0 +1,195 @@
|
|||
import IoRedis from 'ioredis';
|
||||
import type { Redis, Cluster } from 'ioredis';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { createClient, createCluster } from '@keyv/redis';
|
||||
import type { RedisClientType, RedisClusterType } from '@redis/client';
|
||||
import { cacheConfig } from './cacheConfig';
|
||||
|
||||
const GLOBAL_PREFIX_SEPARATOR = '::';
|
||||
|
||||
const urls = cacheConfig.REDIS_URI?.split(',').map((uri) => new URL(uri)) || [];
|
||||
const username = urls?.[0]?.username || cacheConfig.REDIS_USERNAME;
|
||||
const password = urls?.[0]?.password || cacheConfig.REDIS_PASSWORD;
|
||||
const ca = cacheConfig.REDIS_CA;
|
||||
|
||||
let ioredisClient: Redis | Cluster | null = null;
|
||||
if (cacheConfig.USE_REDIS) {
|
||||
const redisOptions: Record<string, unknown> = {
|
||||
username: username,
|
||||
password: password,
|
||||
tls: ca ? { ca } : undefined,
|
||||
keyPrefix: `${cacheConfig.REDIS_KEY_PREFIX}${GLOBAL_PREFIX_SEPARATOR}`,
|
||||
maxListeners: cacheConfig.REDIS_MAX_LISTENERS,
|
||||
retryStrategy: (times: number) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
times > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
) {
|
||||
logger.error(
|
||||
`ioredis giving up after ${cacheConfig.REDIS_RETRY_MAX_ATTEMPTS} reconnection attempts`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
const delay = Math.min(times * 50, cacheConfig.REDIS_RETRY_MAX_DELAY);
|
||||
logger.info(`ioredis reconnecting... attempt ${times}, delay ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
reconnectOnError: (err: Error) => {
|
||||
const targetError = 'READONLY';
|
||||
if (err.message.includes(targetError)) {
|
||||
logger.warn('ioredis reconnecting due to READONLY error');
|
||||
return 2; // Return retry delay instead of boolean
|
||||
}
|
||||
return false;
|
||||
},
|
||||
enableOfflineQueue: cacheConfig.REDIS_ENABLE_OFFLINE_QUEUE,
|
||||
connectTimeout: cacheConfig.REDIS_CONNECT_TIMEOUT,
|
||||
maxRetriesPerRequest: 3,
|
||||
};
|
||||
|
||||
ioredisClient =
|
||||
urls.length === 1 && !cacheConfig.USE_REDIS_CLUSTER
|
||||
? new IoRedis(cacheConfig.REDIS_URI!, redisOptions)
|
||||
: new IoRedis.Cluster(
|
||||
urls.map((url) => ({ host: url.hostname, port: parseInt(url.port, 10) || 6379 })),
|
||||
{
|
||||
...(cacheConfig.REDIS_USE_ALTERNATIVE_DNS_LOOKUP
|
||||
? {
|
||||
dnsLookup: (
|
||||
address: string,
|
||||
callback: (err: Error | null, address: string) => void,
|
||||
) => callback(null, address),
|
||||
}
|
||||
: {}),
|
||||
redisOptions,
|
||||
clusterRetryStrategy: (times: number) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
times > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
) {
|
||||
logger.error(
|
||||
`ioredis cluster giving up after ${cacheConfig.REDIS_RETRY_MAX_ATTEMPTS} reconnection attempts`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
const delay = Math.min(times * 100, cacheConfig.REDIS_RETRY_MAX_DELAY);
|
||||
logger.info(`ioredis cluster reconnecting... attempt ${times}, delay ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
enableOfflineQueue: cacheConfig.REDIS_ENABLE_OFFLINE_QUEUE,
|
||||
},
|
||||
);
|
||||
|
||||
ioredisClient.on('error', (err) => {
|
||||
logger.error('ioredis client error:', err);
|
||||
});
|
||||
|
||||
ioredisClient.on('connect', () => {
|
||||
logger.info('ioredis client connected');
|
||||
});
|
||||
|
||||
ioredisClient.on('ready', () => {
|
||||
logger.info('ioredis client ready');
|
||||
});
|
||||
|
||||
ioredisClient.on('reconnecting', (delay: number) => {
|
||||
logger.info(`ioredis client reconnecting in ${delay}ms`);
|
||||
});
|
||||
|
||||
ioredisClient.on('close', () => {
|
||||
logger.warn('ioredis client connection closed');
|
||||
});
|
||||
|
||||
/** Ping Interval to keep the Redis server connection alive (if enabled) */
|
||||
let pingInterval: NodeJS.Timeout | null = null;
|
||||
const clearPingInterval = () => {
|
||||
if (pingInterval) {
|
||||
clearInterval(pingInterval);
|
||||
pingInterval = null;
|
||||
}
|
||||
};
|
||||
|
||||
if (cacheConfig.REDIS_PING_INTERVAL > 0) {
|
||||
pingInterval = setInterval(() => {
|
||||
if (ioredisClient && ioredisClient.status === 'ready') {
|
||||
ioredisClient.ping().catch((err) => {
|
||||
logger.error('ioredis ping failed:', err);
|
||||
});
|
||||
}
|
||||
}, cacheConfig.REDIS_PING_INTERVAL * 1000);
|
||||
ioredisClient.on('close', clearPingInterval);
|
||||
ioredisClient.on('end', clearPingInterval);
|
||||
}
|
||||
}
|
||||
|
||||
let keyvRedisClient: RedisClientType | RedisClusterType | null = null;
|
||||
if (cacheConfig.USE_REDIS) {
|
||||
/**
|
||||
* ** WARNING ** Keyv Redis client does not support Prefix like ioredis above.
|
||||
* The prefix feature will be handled by the Keyv-Redis store in cacheFactory.js
|
||||
*/
|
||||
const redisOptions: Record<string, unknown> = {
|
||||
username,
|
||||
password,
|
||||
socket: {
|
||||
tls: ca != null,
|
||||
ca,
|
||||
connectTimeout: cacheConfig.REDIS_CONNECT_TIMEOUT,
|
||||
reconnectStrategy: (retries: number) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
retries > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
) {
|
||||
logger.error(
|
||||
`@keyv/redis client giving up after ${cacheConfig.REDIS_RETRY_MAX_ATTEMPTS} reconnection attempts`,
|
||||
);
|
||||
return new Error('Max reconnection attempts reached');
|
||||
}
|
||||
const delay = Math.min(retries * 100, cacheConfig.REDIS_RETRY_MAX_DELAY);
|
||||
logger.info(`@keyv/redis reconnecting... attempt ${retries}, delay ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
},
|
||||
disableOfflineQueue: !cacheConfig.REDIS_ENABLE_OFFLINE_QUEUE,
|
||||
...(cacheConfig.REDIS_PING_INTERVAL > 0
|
||||
? { pingInterval: cacheConfig.REDIS_PING_INTERVAL * 1000 }
|
||||
: {}),
|
||||
};
|
||||
|
||||
keyvRedisClient =
|
||||
urls.length === 1 && !cacheConfig.USE_REDIS_CLUSTER
|
||||
? createClient({ url: cacheConfig.REDIS_URI, ...redisOptions })
|
||||
: createCluster({
|
||||
rootNodes: urls.map((url) => ({ url: url.href })),
|
||||
defaults: redisOptions,
|
||||
});
|
||||
|
||||
keyvRedisClient.setMaxListeners(cacheConfig.REDIS_MAX_LISTENERS);
|
||||
|
||||
keyvRedisClient.on('error', (err) => {
|
||||
logger.error('@keyv/redis client error:', err);
|
||||
});
|
||||
|
||||
keyvRedisClient.on('connect', () => {
|
||||
logger.info('@keyv/redis client connected');
|
||||
});
|
||||
|
||||
keyvRedisClient.on('ready', () => {
|
||||
logger.info('@keyv/redis client ready');
|
||||
});
|
||||
|
||||
keyvRedisClient.on('reconnecting', () => {
|
||||
logger.info('@keyv/redis client reconnecting...');
|
||||
});
|
||||
|
||||
keyvRedisClient.on('disconnect', () => {
|
||||
logger.warn('@keyv/redis client disconnected');
|
||||
});
|
||||
|
||||
keyvRedisClient.connect().catch((err) => {
|
||||
logger.error('@keyv/redis initial connection failed:', err);
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
export { ioredisClient, keyvRedisClient, GLOBAL_PREFIX_SEPARATOR };
|
||||
54
packages/api/src/cdn/azure.ts
Normal file
54
packages/api/src/cdn/azure.ts
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import { DefaultAzureCredential } from '@azure/identity';
|
||||
import type { ContainerClient, BlobServiceClient } from '@azure/storage-blob';
|
||||
|
||||
let blobServiceClient: BlobServiceClient | null = null;
|
||||
let azureWarningLogged = false;
|
||||
|
||||
/**
|
||||
* Initializes the Azure Blob Service client.
|
||||
* This function establishes a connection by checking if a connection string is provided.
|
||||
* If available, the connection string is used; otherwise, Managed Identity (via DefaultAzureCredential) is utilized.
|
||||
* Note: Container creation (and its public access settings) is handled later in the CRUD functions.
|
||||
* @returns The initialized client, or null if the required configuration is missing.
|
||||
*/
|
||||
export const initializeAzureBlobService = async (): Promise<BlobServiceClient | null> => {
|
||||
if (blobServiceClient) {
|
||||
return blobServiceClient;
|
||||
}
|
||||
const connectionString = process.env.AZURE_STORAGE_CONNECTION_STRING;
|
||||
if (connectionString) {
|
||||
const { BlobServiceClient } = await import('@azure/storage-blob');
|
||||
blobServiceClient = BlobServiceClient.fromConnectionString(connectionString);
|
||||
logger.info('Azure Blob Service initialized using connection string');
|
||||
} else {
|
||||
const accountName = process.env.AZURE_STORAGE_ACCOUNT_NAME;
|
||||
if (!accountName) {
|
||||
if (!azureWarningLogged) {
|
||||
logger.error(
|
||||
'[initializeAzureBlobService] Azure Blob Service not initialized. Connection string missing and AZURE_STORAGE_ACCOUNT_NAME not provided.',
|
||||
);
|
||||
azureWarningLogged = true;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const url = `https://${accountName}.blob.core.windows.net`;
|
||||
const credential = new DefaultAzureCredential();
|
||||
const { BlobServiceClient } = await import('@azure/storage-blob');
|
||||
blobServiceClient = new BlobServiceClient(url, credential);
|
||||
logger.info('Azure Blob Service initialized using Managed Identity');
|
||||
}
|
||||
return blobServiceClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves the Azure ContainerClient for the given container name.
|
||||
* @param [containerName=process.env.AZURE_CONTAINER_NAME || 'files'] - The container name.
|
||||
* @returns The Azure ContainerClient.
|
||||
*/
|
||||
export const getAzureContainerClient = async (
|
||||
containerName = process.env.AZURE_CONTAINER_NAME || 'files',
|
||||
): Promise<ContainerClient | null> => {
|
||||
const serviceClient = await initializeAzureBlobService();
|
||||
return serviceClient ? serviceClient.getContainerClient(containerName) : null;
|
||||
};
|
||||
42
packages/api/src/cdn/firebase.ts
Normal file
42
packages/api/src/cdn/firebase.ts
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import firebase from 'firebase/app';
|
||||
import { getStorage } from 'firebase/storage';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import type { FirebaseStorage } from 'firebase/storage';
|
||||
import type { FirebaseApp } from 'firebase/app';
|
||||
|
||||
let firebaseInitCount = 0;
|
||||
let firebaseApp: FirebaseApp | null = null;
|
||||
|
||||
export const initializeFirebase = () => {
|
||||
if (firebaseApp) {
|
||||
return firebaseApp;
|
||||
}
|
||||
|
||||
const firebaseConfig = {
|
||||
apiKey: process.env.FIREBASE_API_KEY,
|
||||
authDomain: process.env.FIREBASE_AUTH_DOMAIN,
|
||||
projectId: process.env.FIREBASE_PROJECT_ID,
|
||||
storageBucket: process.env.FIREBASE_STORAGE_BUCKET,
|
||||
messagingSenderId: process.env.FIREBASE_MESSAGING_SENDER_ID,
|
||||
appId: process.env.FIREBASE_APP_ID,
|
||||
};
|
||||
|
||||
if (Object.values(firebaseConfig).some((value) => !value)) {
|
||||
if (firebaseInitCount === 0) {
|
||||
logger.info(
|
||||
'[Optional] Firebase CDN not initialized. To enable, set FIREBASE_API_KEY, FIREBASE_AUTH_DOMAIN, FIREBASE_PROJECT_ID, FIREBASE_STORAGE_BUCKET, FIREBASE_MESSAGING_SENDER_ID, and FIREBASE_APP_ID environment variables.',
|
||||
);
|
||||
}
|
||||
firebaseInitCount++;
|
||||
return null;
|
||||
}
|
||||
|
||||
firebaseApp = firebase.initializeApp(firebaseConfig);
|
||||
logger.info('Firebase CDN initialized');
|
||||
return firebaseApp;
|
||||
};
|
||||
|
||||
export const getFirebaseStorage = (): FirebaseStorage | null => {
|
||||
const app = initializeFirebase();
|
||||
return app ? getStorage(app) : null;
|
||||
};
|
||||
3
packages/api/src/cdn/index.ts
Normal file
3
packages/api/src/cdn/index.ts
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
export * from './azure';
|
||||
export * from './firebase';
|
||||
export * from './s3';
|
||||
51
packages/api/src/cdn/s3.ts
Normal file
51
packages/api/src/cdn/s3.ts
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
import { S3Client } from '@aws-sdk/client-s3';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
|
||||
let s3: S3Client | null = null;
|
||||
|
||||
/**
|
||||
* Initializes and returns an instance of the AWS S3 client.
|
||||
*
|
||||
* If AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are provided, they will be used.
|
||||
* Otherwise, the AWS SDK's default credentials chain (including IRSA) is used.
|
||||
*
|
||||
* If AWS_ENDPOINT_URL is provided, it will be used as the endpoint.
|
||||
*
|
||||
* @returns An instance of S3Client if the region is provided; otherwise, null.
|
||||
*/
|
||||
export const initializeS3 = (): S3Client | null => {
|
||||
if (s3) {
|
||||
return s3;
|
||||
}
|
||||
|
||||
const region = process.env.AWS_REGION;
|
||||
if (!region) {
|
||||
logger.error('[initializeS3] AWS_REGION is not set. Cannot initialize S3.');
|
||||
return null;
|
||||
}
|
||||
|
||||
// Read the custom endpoint if provided.
|
||||
const endpoint = process.env.AWS_ENDPOINT_URL;
|
||||
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;
|
||||
const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
|
||||
|
||||
const config = {
|
||||
region,
|
||||
// Conditionally add the endpoint if it is provided
|
||||
...(endpoint ? { endpoint } : {}),
|
||||
};
|
||||
|
||||
if (accessKeyId && secretAccessKey) {
|
||||
s3 = new S3Client({
|
||||
...config,
|
||||
credentials: { accessKeyId, secretAccessKey },
|
||||
});
|
||||
logger.info('[initializeS3] S3 initialized with provided credentials.');
|
||||
} else {
|
||||
// When using IRSA, credentials are automatically provided via the IAM Role attached to the ServiceAccount.
|
||||
s3 = new S3Client(config);
|
||||
logger.info('[initializeS3] S3 initialized using default credentials (IRSA).');
|
||||
}
|
||||
|
||||
return s3;
|
||||
};
|
||||
74
packages/api/src/files/encode/audio.ts
Normal file
74
packages/api/src/files/encode/audio.ts
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import { Providers } from '@librechat/agents';
|
||||
import { isDocumentSupportedProvider } from 'librechat-data-provider';
|
||||
import type { IMongoFile } from '@librechat/data-schemas';
|
||||
import type { Request } from 'express';
|
||||
import type { StrategyFunctions, AudioResult } from '~/types/files';
|
||||
import { validateAudio } from '~/files/validation';
|
||||
import { getFileStream } from './utils';
|
||||
|
||||
/**
|
||||
* Encodes and formats audio files for different providers
|
||||
* @param req - The request object
|
||||
* @param files - Array of audio files
|
||||
* @param provider - The provider to format for (currently only google is supported)
|
||||
* @param getStrategyFunctions - Function to get strategy functions
|
||||
* @returns Promise that resolves to audio and file metadata
|
||||
*/
|
||||
export async function encodeAndFormatAudios(
|
||||
req: Request,
|
||||
files: IMongoFile[],
|
||||
provider: Providers,
|
||||
getStrategyFunctions: (source: string) => StrategyFunctions,
|
||||
): Promise<AudioResult> {
|
||||
if (!files?.length) {
|
||||
return { audios: [], files: [] };
|
||||
}
|
||||
|
||||
const encodingMethods: Record<string, StrategyFunctions> = {};
|
||||
const result: AudioResult = { audios: [], files: [] };
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
files.map((file) => getFileStream(req, file, encodingMethods, getStrategyFunctions)),
|
||||
);
|
||||
|
||||
for (const settledResult of results) {
|
||||
if (settledResult.status === 'rejected') {
|
||||
console.error('Audio processing failed:', settledResult.reason);
|
||||
continue;
|
||||
}
|
||||
|
||||
const processed = settledResult.value;
|
||||
if (!processed) continue;
|
||||
|
||||
const { file, content, metadata } = processed;
|
||||
|
||||
if (!content || !file) {
|
||||
if (metadata) result.files.push(metadata);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!file.type.startsWith('audio/') || !isDocumentSupportedProvider(provider)) {
|
||||
result.files.push(metadata);
|
||||
continue;
|
||||
}
|
||||
|
||||
const audioBuffer = Buffer.from(content, 'base64');
|
||||
const validation = await validateAudio(audioBuffer, audioBuffer.length, provider);
|
||||
|
||||
if (!validation.isValid) {
|
||||
throw new Error(`Audio validation failed: ${validation.error}`);
|
||||
}
|
||||
|
||||
if (provider === Providers.GOOGLE || provider === Providers.VERTEXAI) {
|
||||
result.audios.push({
|
||||
type: 'audio',
|
||||
mimeType: file.type,
|
||||
data: content,
|
||||
});
|
||||
}
|
||||
|
||||
result.files.push(metadata);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
108
packages/api/src/files/encode/document.ts
Normal file
108
packages/api/src/files/encode/document.ts
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
import { Providers } from '@librechat/agents';
|
||||
import { isOpenAILikeProvider, isDocumentSupportedProvider } from 'librechat-data-provider';
|
||||
import type { IMongoFile } from '@librechat/data-schemas';
|
||||
import type { Request } from 'express';
|
||||
import type { StrategyFunctions, DocumentResult } from '~/types/files';
|
||||
import { validatePdf } from '~/files/validation';
|
||||
import { getFileStream } from './utils';
|
||||
|
||||
/**
|
||||
* Processes and encodes document files for various providers
|
||||
* @param req - Express request object
|
||||
* @param files - Array of file objects to process
|
||||
* @param provider - The provider name
|
||||
* @param getStrategyFunctions - Function to get strategy functions
|
||||
* @returns Promise that resolves to documents and file metadata
|
||||
*/
|
||||
export async function encodeAndFormatDocuments(
|
||||
req: Request,
|
||||
files: IMongoFile[],
|
||||
{ provider, useResponsesApi }: { provider: Providers; useResponsesApi?: boolean },
|
||||
getStrategyFunctions: (source: string) => StrategyFunctions,
|
||||
): Promise<DocumentResult> {
|
||||
if (!files?.length) {
|
||||
return { documents: [], files: [] };
|
||||
}
|
||||
|
||||
const encodingMethods: Record<string, StrategyFunctions> = {};
|
||||
const result: DocumentResult = { documents: [], files: [] };
|
||||
|
||||
const documentFiles = files.filter(
|
||||
(file) => file.type === 'application/pdf' || file.type?.startsWith('application/'),
|
||||
);
|
||||
|
||||
if (!documentFiles.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
documentFiles.map((file) => {
|
||||
if (file.type !== 'application/pdf' || !isDocumentSupportedProvider(provider)) {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
return getFileStream(req, file, encodingMethods, getStrategyFunctions);
|
||||
}),
|
||||
);
|
||||
|
||||
for (const settledResult of results) {
|
||||
if (settledResult.status === 'rejected') {
|
||||
console.error('Document processing failed:', settledResult.reason);
|
||||
continue;
|
||||
}
|
||||
|
||||
const processed = settledResult.value;
|
||||
if (!processed) continue;
|
||||
|
||||
const { file, content, metadata } = processed;
|
||||
|
||||
if (!content || !file) {
|
||||
if (metadata) result.files.push(metadata);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (file.type === 'application/pdf' && isDocumentSupportedProvider(provider)) {
|
||||
const pdfBuffer = Buffer.from(content, 'base64');
|
||||
const validation = await validatePdf(pdfBuffer, pdfBuffer.length, provider);
|
||||
|
||||
if (!validation.isValid) {
|
||||
throw new Error(`PDF validation failed: ${validation.error}`);
|
||||
}
|
||||
|
||||
if (provider === Providers.ANTHROPIC) {
|
||||
result.documents.push({
|
||||
type: 'document',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: 'application/pdf',
|
||||
data: content,
|
||||
},
|
||||
cache_control: { type: 'ephemeral' },
|
||||
citations: { enabled: true },
|
||||
});
|
||||
} else if (useResponsesApi) {
|
||||
result.documents.push({
|
||||
type: 'input_file',
|
||||
filename: file.filename,
|
||||
file_data: `data:application/pdf;base64,${content}`,
|
||||
});
|
||||
} else if (provider === Providers.GOOGLE || provider === Providers.VERTEXAI) {
|
||||
result.documents.push({
|
||||
type: 'document',
|
||||
mimeType: 'application/pdf',
|
||||
data: content,
|
||||
});
|
||||
} else if (isOpenAILikeProvider(provider) && provider != Providers.AZURE) {
|
||||
result.documents.push({
|
||||
type: 'file',
|
||||
file: {
|
||||
filename: file.filename,
|
||||
file_data: `data:application/pdf;base64,${content}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
result.files.push(metadata);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
3
packages/api/src/files/encode/index.ts
Normal file
3
packages/api/src/files/encode/index.ts
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
export * from './audio';
|
||||
export * from './document';
|
||||
export * from './video';
|
||||
46
packages/api/src/files/encode/utils.ts
Normal file
46
packages/api/src/files/encode/utils.ts
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import getStream from 'get-stream';
|
||||
import { FileSources } from 'librechat-data-provider';
|
||||
import type { IMongoFile } from '@librechat/data-schemas';
|
||||
import type { Request } from 'express';
|
||||
import type { StrategyFunctions, ProcessedFile } from '~/types/files';
|
||||
|
||||
/**
|
||||
* Processes a file by downloading and encoding it to base64
|
||||
* @param req - Express request object
|
||||
* @param file - File object to process
|
||||
* @param encodingMethods - Cache of encoding methods by source
|
||||
* @param getStrategyFunctions - Function to get strategy functions for a source
|
||||
* @returns Processed file with content and metadata, or null if filepath missing
|
||||
*/
|
||||
export async function getFileStream(
|
||||
req: Request,
|
||||
file: IMongoFile,
|
||||
encodingMethods: Record<string, StrategyFunctions>,
|
||||
getStrategyFunctions: (source: string) => StrategyFunctions,
|
||||
): Promise<ProcessedFile | null> {
|
||||
if (!file?.filepath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const source = file.source ?? FileSources.local;
|
||||
if (!encodingMethods[source]) {
|
||||
encodingMethods[source] = getStrategyFunctions(source);
|
||||
}
|
||||
|
||||
const { getDownloadStream } = encodingMethods[source];
|
||||
const stream = await getDownloadStream(req, file.filepath);
|
||||
const buffer = await getStream.buffer(stream);
|
||||
|
||||
return {
|
||||
file,
|
||||
content: buffer.toString('base64'),
|
||||
metadata: {
|
||||
file_id: file.file_id,
|
||||
temp_file_id: file.temp_file_id,
|
||||
filepath: file.filepath,
|
||||
source: file.source,
|
||||
filename: file.filename,
|
||||
type: file.type,
|
||||
},
|
||||
};
|
||||
}
|
||||
74
packages/api/src/files/encode/video.ts
Normal file
74
packages/api/src/files/encode/video.ts
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import { Providers } from '@librechat/agents';
|
||||
import { isDocumentSupportedProvider } from 'librechat-data-provider';
|
||||
import type { IMongoFile } from '@librechat/data-schemas';
|
||||
import type { Request } from 'express';
|
||||
import type { StrategyFunctions, VideoResult } from '~/types/files';
|
||||
import { validateVideo } from '~/files/validation';
|
||||
import { getFileStream } from './utils';
|
||||
|
||||
/**
|
||||
* Encodes and formats video files for different providers
|
||||
* @param req - The request object
|
||||
* @param files - Array of video files
|
||||
* @param provider - The provider to format for
|
||||
* @param getStrategyFunctions - Function to get strategy functions
|
||||
* @returns Promise that resolves to videos and file metadata
|
||||
*/
|
||||
export async function encodeAndFormatVideos(
|
||||
req: Request,
|
||||
files: IMongoFile[],
|
||||
provider: Providers,
|
||||
getStrategyFunctions: (source: string) => StrategyFunctions,
|
||||
): Promise<VideoResult> {
|
||||
if (!files?.length) {
|
||||
return { videos: [], files: [] };
|
||||
}
|
||||
|
||||
const encodingMethods: Record<string, StrategyFunctions> = {};
|
||||
const result: VideoResult = { videos: [], files: [] };
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
files.map((file) => getFileStream(req, file, encodingMethods, getStrategyFunctions)),
|
||||
);
|
||||
|
||||
for (const settledResult of results) {
|
||||
if (settledResult.status === 'rejected') {
|
||||
console.error('Video processing failed:', settledResult.reason);
|
||||
continue;
|
||||
}
|
||||
|
||||
const processed = settledResult.value;
|
||||
if (!processed) continue;
|
||||
|
||||
const { file, content, metadata } = processed;
|
||||
|
||||
if (!content || !file) {
|
||||
if (metadata) result.files.push(metadata);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!file.type.startsWith('video/') || !isDocumentSupportedProvider(provider)) {
|
||||
result.files.push(metadata);
|
||||
continue;
|
||||
}
|
||||
|
||||
const videoBuffer = Buffer.from(content, 'base64');
|
||||
const validation = await validateVideo(videoBuffer, videoBuffer.length, provider);
|
||||
|
||||
if (!validation.isValid) {
|
||||
throw new Error(`Video validation failed: ${validation.error}`);
|
||||
}
|
||||
|
||||
if (provider === Providers.GOOGLE || provider === Providers.VERTEXAI) {
|
||||
result.videos.push({
|
||||
type: 'video',
|
||||
mimeType: file.type,
|
||||
data: content,
|
||||
});
|
||||
}
|
||||
|
||||
result.files.push(metadata);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
export * from './audio';
|
||||
export * from './encode';
|
||||
export * from './mistral/crud';
|
||||
export * from './ocr';
|
||||
export * from './parse';
|
||||
export * from './validation';
|
||||
export * from './text';
|
||||
|
|
|
|||
186
packages/api/src/files/validation.ts
Normal file
186
packages/api/src/files/validation.ts
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
import { Providers } from '@librechat/agents';
|
||||
import { mbToBytes, isOpenAILikeProvider } from 'librechat-data-provider';
|
||||
|
||||
export interface PDFValidationResult {
|
||||
isValid: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface VideoValidationResult {
|
||||
isValid: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface AudioValidationResult {
|
||||
isValid: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export async function validatePdf(
|
||||
pdfBuffer: Buffer,
|
||||
fileSize: number,
|
||||
provider: Providers,
|
||||
): Promise<PDFValidationResult> {
|
||||
if (provider === Providers.ANTHROPIC) {
|
||||
return validateAnthropicPdf(pdfBuffer, fileSize);
|
||||
}
|
||||
|
||||
if (isOpenAILikeProvider(provider)) {
|
||||
return validateOpenAIPdf(fileSize);
|
||||
}
|
||||
|
||||
if (provider === Providers.GOOGLE || provider === Providers.VERTEXAI) {
|
||||
return validateGooglePdf(fileSize);
|
||||
}
|
||||
|
||||
return { isValid: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates if a PDF meets Anthropic's requirements
|
||||
* @param pdfBuffer - The PDF file as a buffer
|
||||
* @param fileSize - The file size in bytes
|
||||
* @returns Promise that resolves to validation result
|
||||
*/
|
||||
async function validateAnthropicPdf(
|
||||
pdfBuffer: Buffer,
|
||||
fileSize: number,
|
||||
): Promise<PDFValidationResult> {
|
||||
try {
|
||||
if (fileSize > mbToBytes(32)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `PDF file size (${Math.round(fileSize / (1024 * 1024))}MB) exceeds Anthropic's 32MB limit`,
|
||||
};
|
||||
}
|
||||
|
||||
if (!pdfBuffer || pdfBuffer.length < 5) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid PDF file: too small or corrupted',
|
||||
};
|
||||
}
|
||||
|
||||
const pdfHeader = pdfBuffer.subarray(0, 5).toString();
|
||||
if (!pdfHeader.startsWith('%PDF-')) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid PDF file: missing PDF header',
|
||||
};
|
||||
}
|
||||
|
||||
const pdfContent = pdfBuffer.toString('binary');
|
||||
if (
|
||||
pdfContent.includes('/Encrypt ') ||
|
||||
pdfContent.includes('/U (') ||
|
||||
pdfContent.includes('/O (')
|
||||
) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'PDF is password-protected or encrypted. Anthropic requires unencrypted PDFs.',
|
||||
};
|
||||
}
|
||||
|
||||
const pageMatches = pdfContent.match(/\/Type[\s]*\/Page[^s]/g);
|
||||
const estimatedPages = pageMatches ? pageMatches.length : 1;
|
||||
|
||||
if (estimatedPages > 100) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `PDF has approximately ${estimatedPages} pages, exceeding Anthropic's 100-page limit`,
|
||||
};
|
||||
}
|
||||
|
||||
return { isValid: true };
|
||||
} catch (error) {
|
||||
console.error('PDF validation error:', error);
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Failed to validate PDF file',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function validateOpenAIPdf(fileSize: number): Promise<PDFValidationResult> {
|
||||
if (fileSize > 10 * 1024 * 1024) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: "PDF file size exceeds OpenAI's 10MB limit",
|
||||
};
|
||||
}
|
||||
|
||||
return { isValid: true };
|
||||
}
|
||||
|
||||
async function validateGooglePdf(fileSize: number): Promise<PDFValidationResult> {
|
||||
if (fileSize > 20 * 1024 * 1024) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: "PDF file size exceeds Google's 20MB limit",
|
||||
};
|
||||
}
|
||||
|
||||
return { isValid: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates video files for different providers
|
||||
* @param videoBuffer - The video file as a buffer
|
||||
* @param fileSize - The file size in bytes
|
||||
* @param provider - The provider to validate for
|
||||
* @returns Promise that resolves to validation result
|
||||
*/
|
||||
export async function validateVideo(
|
||||
videoBuffer: Buffer,
|
||||
fileSize: number,
|
||||
provider: Providers,
|
||||
): Promise<VideoValidationResult> {
|
||||
if (provider === Providers.GOOGLE || provider === Providers.VERTEXAI) {
|
||||
if (fileSize > 20 * 1024 * 1024) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `Video file size (${Math.round(fileSize / (1024 * 1024))}MB) exceeds Google's 20MB limit`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (!videoBuffer || videoBuffer.length < 10) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid video file: too small or corrupted',
|
||||
};
|
||||
}
|
||||
|
||||
return { isValid: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates audio files for different providers
|
||||
* @param audioBuffer - The audio file as a buffer
|
||||
* @param fileSize - The file size in bytes
|
||||
* @param provider - The provider to validate for
|
||||
* @returns Promise that resolves to validation result
|
||||
*/
|
||||
export async function validateAudio(
|
||||
audioBuffer: Buffer,
|
||||
fileSize: number,
|
||||
provider: Providers,
|
||||
): Promise<AudioValidationResult> {
|
||||
if (provider === Providers.GOOGLE || provider === Providers.VERTEXAI) {
|
||||
if (fileSize > 20 * 1024 * 1024) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `Audio file size (${Math.round(fileSize / (1024 * 1024))}MB) exceeds Google's 20MB limit`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (!audioBuffer || audioBuffer.length < 10) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid audio file: too small or corrupted',
|
||||
};
|
||||
}
|
||||
|
||||
return { isValid: true };
|
||||
}
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
export * from './app';
|
||||
export * from './cdn';
|
||||
/* Auth */
|
||||
export * from './auth';
|
||||
/* MCP */
|
||||
|
|
@ -35,6 +36,8 @@ export * from './files';
|
|||
export * from './tools';
|
||||
/* web search */
|
||||
export * from './web';
|
||||
/* Cache */
|
||||
export * from './cache';
|
||||
/* types */
|
||||
export type * from './mcp/types';
|
||||
export type * from './flow/types';
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ export class MCPManager extends UserConnectionManager {
|
|||
/** Initializes the MCPManager by setting up server registry and app connections */
|
||||
public async initialize() {
|
||||
await this.serversRegistry.initialize();
|
||||
this.appConnections = new ConnectionsRepository(this.serversRegistry.appServerConfigs!);
|
||||
this.appConnections = new ConnectionsRepository(this.serversRegistry.appServerConfigs);
|
||||
}
|
||||
|
||||
/** Retrieves an app-level or user-specific connection based on provided arguments */
|
||||
|
|
@ -63,22 +63,23 @@ export class MCPManager extends UserConnectionManager {
|
|||
}
|
||||
|
||||
/** Get servers that require OAuth */
|
||||
public getOAuthServers(): Set<string> | null {
|
||||
return this.serversRegistry.oauthServers!;
|
||||
public getOAuthServers(): Set<string> {
|
||||
return this.serversRegistry.oauthServers;
|
||||
}
|
||||
|
||||
/** Get all servers */
|
||||
public getAllServers(): t.MCPServers | null {
|
||||
return this.serversRegistry.rawConfigs!;
|
||||
public getAllServers(): t.MCPServers {
|
||||
return this.serversRegistry.rawConfigs;
|
||||
}
|
||||
|
||||
/** Returns all available tool functions from app-level connections */
|
||||
public getAppToolFunctions(): t.LCAvailableTools | null {
|
||||
return this.serversRegistry.toolFunctions!;
|
||||
public getAppToolFunctions(): t.LCAvailableTools {
|
||||
return this.serversRegistry.toolFunctions;
|
||||
}
|
||||
|
||||
/** Returns all available tool functions from all connections available to user */
|
||||
public async getAllToolFunctions(userId: string): Promise<t.LCAvailableTools | null> {
|
||||
const allToolFunctions: t.LCAvailableTools = this.getAppToolFunctions() ?? {};
|
||||
const allToolFunctions: t.LCAvailableTools = this.getAppToolFunctions();
|
||||
const userConnections = this.getUserConnections(userId);
|
||||
if (!userConnections || userConnections.size === 0) {
|
||||
return allToolFunctions;
|
||||
|
|
@ -96,22 +97,30 @@ export class MCPManager extends UserConnectionManager {
|
|||
userId: string,
|
||||
serverName: string,
|
||||
): Promise<t.LCAvailableTools | null> {
|
||||
if (this.appConnections?.has(serverName)) {
|
||||
return this.serversRegistry.getToolFunctions(
|
||||
serverName,
|
||||
await this.appConnections.get(serverName),
|
||||
try {
|
||||
if (this.appConnections?.has(serverName)) {
|
||||
return this.serversRegistry.getToolFunctions(
|
||||
serverName,
|
||||
await this.appConnections.get(serverName),
|
||||
);
|
||||
}
|
||||
|
||||
const userConnections = this.getUserConnections(userId);
|
||||
if (!userConnections || userConnections.size === 0) {
|
||||
return null;
|
||||
}
|
||||
if (!userConnections.has(serverName)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.serversRegistry.getToolFunctions(serverName, userConnections.get(serverName)!);
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`[getServerToolFunctions] Error getting tool functions for server ${serverName}`,
|
||||
error,
|
||||
);
|
||||
}
|
||||
|
||||
const userConnections = this.getUserConnections(userId);
|
||||
if (!userConnections || userConnections.size === 0) {
|
||||
return null;
|
||||
}
|
||||
if (!userConnections.has(serverName)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.serversRegistry.getToolFunctions(serverName, userConnections.get(serverName)!);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -120,7 +129,7 @@ export class MCPManager extends UserConnectionManager {
|
|||
* @returns Object mapping server names to their instructions
|
||||
*/
|
||||
public getInstructions(serverNames?: string[]): Record<string, string> {
|
||||
const instructions = this.serversRegistry.serverInstructions!;
|
||||
const instructions = this.serversRegistry.serverInstructions;
|
||||
if (!serverNames) return instructions;
|
||||
return pick(instructions, serverNames);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,16 +1,22 @@
|
|||
import pick from 'lodash/pick';
|
||||
import pickBy from 'lodash/pickBy';
|
||||
import mapValues from 'lodash/mapValues';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { Constants } from 'librechat-data-provider';
|
||||
import type { JsonSchemaType } from '@librechat/data-schemas';
|
||||
import type { MCPConnection } from '~/mcp/connection';
|
||||
import type { JsonSchemaType } from '~/types';
|
||||
import type * as t from '~/mcp/types';
|
||||
import { ConnectionsRepository } from '~/mcp/ConnectionsRepository';
|
||||
import { detectOAuthRequirement } from '~/mcp/oauth';
|
||||
import { sanitizeUrlForLogging } from '~/mcp/utils';
|
||||
import { processMCPEnv, isEnabled } from '~/utils';
|
||||
|
||||
const DEFAULT_MCP_INIT_TIMEOUT_MS = 30_000;
|
||||
|
||||
function getMCPInitTimeout(): number {
|
||||
return process.env.MCP_INIT_TIMEOUT_MS != null
|
||||
? parseInt(process.env.MCP_INIT_TIMEOUT_MS)
|
||||
: DEFAULT_MCP_INIT_TIMEOUT_MS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Manages MCP server configurations and metadata discovery.
|
||||
* Fetches server capabilities, OAuth requirements, and tool definitions for registry.
|
||||
|
|
@ -20,19 +26,21 @@ import { processMCPEnv, isEnabled } from '~/utils';
|
|||
export class MCPServersRegistry {
|
||||
private initialized: boolean = false;
|
||||
private connections: ConnectionsRepository;
|
||||
private initTimeoutMs: number;
|
||||
|
||||
public readonly rawConfigs: t.MCPServers;
|
||||
public readonly parsedConfigs: Record<string, t.ParsedServerConfig>;
|
||||
|
||||
public oauthServers: Set<string> | null = null;
|
||||
public serverInstructions: Record<string, string> | null = null;
|
||||
public toolFunctions: t.LCAvailableTools | null = null;
|
||||
public appServerConfigs: t.MCPServers | null = null;
|
||||
public oauthServers: Set<string> = new Set();
|
||||
public serverInstructions: Record<string, string> = {};
|
||||
public toolFunctions: t.LCAvailableTools = {};
|
||||
public appServerConfigs: t.MCPServers = {};
|
||||
|
||||
constructor(configs: t.MCPServers) {
|
||||
this.rawConfigs = configs;
|
||||
this.parsedConfigs = mapValues(configs, (con) => processMCPEnv({ options: con }));
|
||||
this.connections = new ConnectionsRepository(configs);
|
||||
this.initTimeoutMs = getMCPInitTimeout();
|
||||
}
|
||||
|
||||
/** Initializes all startup-enabled servers by gathering their metadata asynchronously */
|
||||
|
|
@ -42,21 +50,43 @@ export class MCPServersRegistry {
|
|||
|
||||
const serverNames = Object.keys(this.parsedConfigs);
|
||||
|
||||
await Promise.allSettled(serverNames.map((serverName) => this.gatherServerInfo(serverName)));
|
||||
|
||||
this.setOAuthServers();
|
||||
this.setServerInstructions();
|
||||
this.setAppServerConfigs();
|
||||
await this.setAppToolFunctions();
|
||||
|
||||
this.connections.disconnectAll();
|
||||
await Promise.allSettled(
|
||||
serverNames.map((serverName) => this.initializeServerWithTimeout(serverName)),
|
||||
);
|
||||
}
|
||||
|
||||
/** Fetches all metadata for a single server in parallel */
|
||||
private async gatherServerInfo(serverName: string): Promise<void> {
|
||||
/** Wraps server initialization with a timeout to prevent hanging */
|
||||
private async initializeServerWithTimeout(serverName: string): Promise<void> {
|
||||
let timeoutId: NodeJS.Timeout | null = null;
|
||||
|
||||
try {
|
||||
await Promise.race([
|
||||
this.initializeServer(serverName),
|
||||
new Promise<never>((_, reject) => {
|
||||
timeoutId = setTimeout(() => {
|
||||
reject(new Error('Server initialization timed out'));
|
||||
}, this.initTimeoutMs);
|
||||
}),
|
||||
]);
|
||||
} catch (error) {
|
||||
logger.warn(`${this.prefix(serverName)} Server initialization failed:`, error);
|
||||
throw error;
|
||||
} finally {
|
||||
if (timeoutId != null) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Initializes a single server with all its metadata and adds it to appropriate collections */
|
||||
private async initializeServer(serverName: string): Promise<void> {
|
||||
const start = Date.now();
|
||||
|
||||
const config = this.parsedConfigs[serverName];
|
||||
|
||||
// 1. Detect OAuth requirements if not already specified
|
||||
try {
|
||||
await this.fetchOAuthRequirement(serverName);
|
||||
const config = this.parsedConfigs[serverName];
|
||||
|
||||
if (config.startup !== false && !config.requiresOAuth) {
|
||||
await Promise.allSettled([
|
||||
|
|
@ -68,54 +98,49 @@ export class MCPServersRegistry {
|
|||
),
|
||||
]);
|
||||
}
|
||||
|
||||
this.logUpdatedConfig(serverName);
|
||||
} catch (error) {
|
||||
logger.warn(`${this.prefix(serverName)} Failed to initialize server:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/** Sets app-level server configs (startup enabled, non-OAuth servers) */
|
||||
private setAppServerConfigs(): void {
|
||||
const appServers = Object.keys(
|
||||
pickBy(
|
||||
this.parsedConfigs,
|
||||
(config) => config.startup !== false && config.requiresOAuth === false,
|
||||
),
|
||||
);
|
||||
this.appServerConfigs = pick(this.rawConfigs, appServers);
|
||||
}
|
||||
|
||||
/** Creates set of server names that require OAuth authentication */
|
||||
private setOAuthServers(): Set<string> {
|
||||
if (this.oauthServers) return this.oauthServers;
|
||||
this.oauthServers = new Set(
|
||||
Object.keys(pickBy(this.parsedConfigs, (config) => config.requiresOAuth)),
|
||||
);
|
||||
return this.oauthServers;
|
||||
}
|
||||
|
||||
/** Collects server instructions from all configured servers */
|
||||
private setServerInstructions(): void {
|
||||
this.serverInstructions = mapValues(
|
||||
pickBy(this.parsedConfigs, (config) => config.serverInstructions),
|
||||
(config) => config.serverInstructions as string,
|
||||
);
|
||||
}
|
||||
|
||||
/** Builds registry of all available tool functions from loaded connections */
|
||||
private async setAppToolFunctions(): Promise<void> {
|
||||
const connections = (await this.connections.getLoaded()).entries();
|
||||
const allToolFunctions: t.LCAvailableTools = {};
|
||||
for (const [serverName, conn] of connections) {
|
||||
// 2. Fetch tool functions for this server if a connection was established
|
||||
const getToolFunctions = async (): Promise<t.LCAvailableTools | null> => {
|
||||
try {
|
||||
const toolFunctions = await this.getToolFunctions(serverName, conn);
|
||||
Object.assign(allToolFunctions, toolFunctions);
|
||||
const loadedConns = await this.connections.getLoaded();
|
||||
const conn = loadedConns.get(serverName);
|
||||
if (conn == null) {
|
||||
return null;
|
||||
}
|
||||
return this.getToolFunctions(serverName, conn);
|
||||
} catch (error) {
|
||||
logger.warn(`${this.prefix(serverName)} Error fetching tool functions:`, error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
const toolFunctions = await getToolFunctions();
|
||||
|
||||
// 3. Disconnect this server's connection if it was established (fire-and-forget)
|
||||
void this.connections.disconnect(serverName);
|
||||
|
||||
// 4. Side effects
|
||||
// 4.1 Add to OAuth servers if needed
|
||||
if (config.requiresOAuth) {
|
||||
this.oauthServers.add(serverName);
|
||||
}
|
||||
this.toolFunctions = allToolFunctions;
|
||||
// 4.2 Add server instructions if available
|
||||
if (config.serverInstructions != null) {
|
||||
this.serverInstructions[serverName] = config.serverInstructions as string;
|
||||
}
|
||||
// 4.3 Add to app server configs if eligible (startup enabled, non-OAuth servers)
|
||||
if (config.startup !== false && config.requiresOAuth === false) {
|
||||
this.appServerConfigs[serverName] = this.rawConfigs[serverName];
|
||||
}
|
||||
// 4.4 Add tool functions if available
|
||||
if (toolFunctions != null) {
|
||||
Object.assign(this.toolFunctions, toolFunctions);
|
||||
}
|
||||
|
||||
const duration = Date.now() - start;
|
||||
this.logUpdatedConfig(serverName, duration);
|
||||
}
|
||||
|
||||
/** Converts server tools to LibreChat-compatible tool functions format */
|
||||
|
|
@ -185,7 +210,7 @@ export class MCPServersRegistry {
|
|||
}
|
||||
|
||||
// Logs server configuration summary after initialization
|
||||
private logUpdatedConfig(serverName: string): void {
|
||||
private logUpdatedConfig(serverName: string, initDuration: number): void {
|
||||
const prefix = this.prefix(serverName);
|
||||
const config = this.parsedConfigs[serverName];
|
||||
logger.info(`${prefix} -------------------------------------------------┐`);
|
||||
|
|
@ -194,6 +219,7 @@ export class MCPServersRegistry {
|
|||
logger.info(`${prefix} Capabilities: ${config.capabilities}`);
|
||||
logger.info(`${prefix} Tools: ${config.tools}`);
|
||||
logger.info(`${prefix} Server Instructions: ${config.serverInstructions}`);
|
||||
logger.info(`${prefix} Initialized in: ${initDuration}ms`);
|
||||
logger.info(`${prefix} -------------------------------------------------┘`);
|
||||
}
|
||||
|
||||
|
|
|
|||
169
packages/api/src/mcp/__tests__/MCPManager.test.ts
Normal file
169
packages/api/src/mcp/__tests__/MCPManager.test.ts
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import type * as t from '~/mcp/types';
|
||||
import { MCPManager } from '~/mcp/MCPManager';
|
||||
import { MCPServersRegistry } from '~/mcp/MCPServersRegistry';
|
||||
import { ConnectionsRepository } from '~/mcp/ConnectionsRepository';
|
||||
import { MCPConnection } from '../connection';
|
||||
|
||||
// Mock external dependencies
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
error: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('~/mcp/MCPServersRegistry');
|
||||
jest.mock('~/mcp/ConnectionsRepository');
|
||||
|
||||
const mockLogger = logger as jest.Mocked<typeof logger>;
|
||||
|
||||
describe('MCPManager', () => {
|
||||
const userId = 'test-user-123';
|
||||
const serverName = 'test_server';
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset MCPManager singleton state
|
||||
(MCPManager as unknown as { instance: null }).instance = null;
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
function mockRegistry(
|
||||
registryConfig: Partial<MCPServersRegistry>,
|
||||
): jest.MockedClass<typeof MCPServersRegistry> {
|
||||
const mock = {
|
||||
initialize: jest.fn().mockResolvedValue(undefined),
|
||||
getToolFunctions: jest.fn().mockResolvedValue(null),
|
||||
...registryConfig,
|
||||
};
|
||||
return (MCPServersRegistry as jest.MockedClass<typeof MCPServersRegistry>).mockImplementation(
|
||||
() => mock as unknown as MCPServersRegistry,
|
||||
);
|
||||
}
|
||||
|
||||
function mockAppConnections(
|
||||
appConnectionsConfig: Partial<ConnectionsRepository>,
|
||||
): jest.MockedClass<typeof ConnectionsRepository> {
|
||||
const mock = {
|
||||
has: jest.fn().mockReturnValue(false),
|
||||
get: jest.fn().mockResolvedValue({} as unknown as MCPConnection),
|
||||
...appConnectionsConfig,
|
||||
};
|
||||
return (
|
||||
ConnectionsRepository as jest.MockedClass<typeof ConnectionsRepository>
|
||||
).mockImplementation(() => mock as unknown as ConnectionsRepository);
|
||||
}
|
||||
|
||||
function newMCPServersConfig(serverNameOverride?: string): t.MCPServers {
|
||||
return {
|
||||
[serverNameOverride ?? serverName]: {
|
||||
type: 'stdio',
|
||||
command: 'test',
|
||||
args: [],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
describe('getServerToolFunctions', () => {
|
||||
it('should catch and handle errors gracefully', async () => {
|
||||
mockRegistry({
|
||||
getToolFunctions: jest.fn(() => {
|
||||
throw new Error('Connection failed');
|
||||
}),
|
||||
});
|
||||
|
||||
mockAppConnections({
|
||||
has: jest.fn().mockReturnValue(true),
|
||||
});
|
||||
|
||||
const manager = await MCPManager.createInstance(newMCPServersConfig());
|
||||
|
||||
const result = await manager.getServerToolFunctions(userId, serverName);
|
||||
|
||||
expect(result).toBeNull();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
`[getServerToolFunctions] Error getting tool functions for server ${serverName}`,
|
||||
expect.any(Error),
|
||||
);
|
||||
});
|
||||
|
||||
it('should catch synchronous errors from getUserConnections', async () => {
|
||||
mockRegistry({
|
||||
getToolFunctions: jest.fn().mockResolvedValue({}),
|
||||
});
|
||||
|
||||
mockAppConnections({
|
||||
has: jest.fn().mockReturnValue(false),
|
||||
});
|
||||
|
||||
const manager = await MCPManager.createInstance(newMCPServersConfig());
|
||||
|
||||
const spy = jest.spyOn(manager, 'getUserConnections').mockImplementation(() => {
|
||||
throw new Error('Failed to get user connections');
|
||||
});
|
||||
|
||||
const result = await manager.getServerToolFunctions(userId, serverName);
|
||||
|
||||
expect(result).toBeNull();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
`[getServerToolFunctions] Error getting tool functions for server ${serverName}`,
|
||||
expect.any(Error),
|
||||
);
|
||||
expect(spy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return tools successfully when no errors occur', async () => {
|
||||
const expectedTools: t.LCAvailableTools = {
|
||||
[`test_tool_mcp_${serverName}`]: {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: `test_tool_mcp_${serverName}`,
|
||||
description: 'Test tool',
|
||||
parameters: { type: 'object' },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
mockRegistry({
|
||||
getToolFunctions: jest.fn().mockResolvedValue(expectedTools),
|
||||
});
|
||||
|
||||
mockAppConnections({
|
||||
has: jest.fn().mockReturnValue(true),
|
||||
});
|
||||
|
||||
const manager = await MCPManager.createInstance(newMCPServersConfig());
|
||||
|
||||
const result = await manager.getServerToolFunctions(userId, serverName);
|
||||
|
||||
expect(result).toEqual(expectedTools);
|
||||
expect(mockLogger.warn).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should include specific server name in error messages', async () => {
|
||||
const specificServerName = 'github_mcp_server';
|
||||
|
||||
mockRegistry({
|
||||
getToolFunctions: jest.fn(() => {
|
||||
throw new Error('Server specific error');
|
||||
}),
|
||||
});
|
||||
|
||||
mockAppConnections({
|
||||
has: jest.fn().mockReturnValue(true),
|
||||
});
|
||||
|
||||
const manager = await MCPManager.createInstance(newMCPServersConfig(specificServerName));
|
||||
|
||||
const result = await manager.getServerToolFunctions(userId, specificServerName);
|
||||
|
||||
expect(result).toBeNull();
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
`[getServerToolFunctions] Error getting tool functions for server ${specificServerName}`,
|
||||
expect.any(Error),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -113,6 +113,7 @@ describe('MCPServersRegistry - Initialize Function', () => {
|
|||
get: jest.fn(),
|
||||
getLoaded: jest.fn(),
|
||||
disconnectAll: jest.fn(),
|
||||
disconnect: jest.fn().mockResolvedValue(undefined),
|
||||
} as unknown as jest.Mocked<ConnectionsRepository>;
|
||||
|
||||
mockConnectionsRepo.get.mockImplementation((serverName: string) => {
|
||||
|
|
@ -160,6 +161,7 @@ describe('MCPServersRegistry - Initialize Function', () => {
|
|||
});
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env.MCP_INIT_TIMEOUT_MS;
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
|
|
@ -179,15 +181,14 @@ describe('MCPServersRegistry - Initialize Function', () => {
|
|||
const registry = new MCPServersRegistry(rawConfigs);
|
||||
|
||||
// Verify initial state
|
||||
expect(registry.oauthServers).toBeNull();
|
||||
expect(registry.serverInstructions).toBeNull();
|
||||
expect(registry.toolFunctions).toBeNull();
|
||||
expect(registry.appServerConfigs).toBeNull();
|
||||
expect(registry.oauthServers.size).toBe(0);
|
||||
expect(registry.serverInstructions).toEqual({});
|
||||
expect(registry.toolFunctions).toEqual({});
|
||||
expect(registry.appServerConfigs).toEqual({});
|
||||
|
||||
await registry.initialize();
|
||||
|
||||
// Test oauthServers Set
|
||||
expect(registry.oauthServers).toBeInstanceOf(Set);
|
||||
expect(registry.oauthServers).toEqual(
|
||||
new Set(['oauth_server', 'oauth_predefined', 'oauth_startup_enabled']),
|
||||
);
|
||||
|
|
@ -228,18 +229,49 @@ describe('MCPServersRegistry - Initialize Function', () => {
|
|||
expect(registry.toolFunctions).toEqual(expectedToolFunctions);
|
||||
});
|
||||
|
||||
it('should handle errors gracefully and continue initialization', async () => {
|
||||
it('should handle errors gracefully and continue initialization of other servers', async () => {
|
||||
const registry = new MCPServersRegistry(rawConfigs);
|
||||
|
||||
// Make one server throw an error
|
||||
mockDetectOAuthRequirement.mockRejectedValueOnce(new Error('OAuth detection failed'));
|
||||
// Make one specific server throw an error during OAuth detection
|
||||
mockDetectOAuthRequirement.mockImplementation((url: string) => {
|
||||
if (url === 'https://api.github.com/mcp') {
|
||||
return Promise.reject(new Error('OAuth detection failed'));
|
||||
}
|
||||
// Return normal responses for other servers
|
||||
const oauthResults: Record<string, OAuthDetectionResult> = {
|
||||
'https://api.disabled.com/mcp': {
|
||||
requiresOAuth: false,
|
||||
method: 'no-metadata-found',
|
||||
metadata: null,
|
||||
},
|
||||
'https://api.public.com/mcp': {
|
||||
requiresOAuth: false,
|
||||
method: 'no-metadata-found',
|
||||
metadata: null,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(
|
||||
oauthResults[url] ?? {
|
||||
requiresOAuth: false,
|
||||
method: 'no-metadata-found',
|
||||
metadata: null,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
await registry.initialize();
|
||||
|
||||
// Should still initialize successfully
|
||||
// Should still initialize successfully for other servers
|
||||
expect(registry.oauthServers).toBeInstanceOf(Set);
|
||||
expect(registry.toolFunctions).toBeDefined();
|
||||
|
||||
// The failed server should not be in oauthServers (since it failed OAuth detection)
|
||||
expect(registry.oauthServers.has('oauth_server')).toBe(false);
|
||||
|
||||
// But other servers should still be processed successfully
|
||||
expect(registry.appServerConfigs).toHaveProperty('stdio_server');
|
||||
expect(registry.appServerConfigs).toHaveProperty('non_oauth_server');
|
||||
|
||||
// Error should be logged as a warning at the higher level
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('[MCP][oauth_server] Failed to initialize server:'),
|
||||
|
|
@ -247,12 +279,15 @@ describe('MCPServersRegistry - Initialize Function', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('should disconnect all connections after initialization', async () => {
|
||||
it('should disconnect individual connections after each server initialization', async () => {
|
||||
const registry = new MCPServersRegistry(rawConfigs);
|
||||
|
||||
await registry.initialize();
|
||||
|
||||
expect(mockConnectionsRepo.disconnectAll).toHaveBeenCalledTimes(1);
|
||||
// Verify disconnect was called for each server during initialization
|
||||
// All servers attempt to connect during initialization for metadata gathering
|
||||
const serverNames = Object.keys(rawConfigs);
|
||||
expect(mockConnectionsRepo.disconnect).toHaveBeenCalledTimes(serverNames.length);
|
||||
});
|
||||
|
||||
it('should log configuration updates for each startup-enabled server', async () => {
|
||||
|
|
@ -357,5 +392,204 @@ describe('MCPServersRegistry - Initialize Function', () => {
|
|||
// Verify getInstructions was called for both "true" cases
|
||||
expect(mockClient.getInstructions).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should use Promise.allSettled for individual server initialization', async () => {
|
||||
const registry = new MCPServersRegistry(rawConfigs);
|
||||
|
||||
// Spy on Promise.allSettled to verify it's being used
|
||||
const allSettledSpy = jest.spyOn(Promise, 'allSettled');
|
||||
|
||||
await registry.initialize();
|
||||
|
||||
// Verify Promise.allSettled was called with an array of server initialization promises
|
||||
expect(allSettledSpy).toHaveBeenCalledWith(expect.arrayContaining([expect.any(Promise)]));
|
||||
|
||||
// Verify it was called with the correct number of server promises
|
||||
const serverNames = Object.keys(rawConfigs);
|
||||
expect(allSettledSpy).toHaveBeenCalledWith(
|
||||
expect.arrayContaining(new Array(serverNames.length).fill(expect.any(Promise))),
|
||||
);
|
||||
|
||||
allSettledSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should isolate server failures and not affect other servers', async () => {
|
||||
const registry = new MCPServersRegistry(rawConfigs);
|
||||
|
||||
// Make multiple servers fail in different ways
|
||||
mockConnectionsRepo.get.mockImplementation((serverName: string) => {
|
||||
if (serverName === 'stdio_server') {
|
||||
// First server fails
|
||||
throw new Error('Connection failed for stdio_server');
|
||||
}
|
||||
if (serverName === 'websocket_server') {
|
||||
// Second server fails
|
||||
throw new Error('Connection failed for websocket_server');
|
||||
}
|
||||
// Other servers succeed
|
||||
const connection = mockConnections.get(serverName);
|
||||
if (!connection) {
|
||||
throw new Error(`Connection not found for server: ${serverName}`);
|
||||
}
|
||||
return Promise.resolve(connection);
|
||||
});
|
||||
|
||||
await registry.initialize();
|
||||
|
||||
// Despite failures, initialization should complete
|
||||
expect(registry.oauthServers).toBeInstanceOf(Set);
|
||||
expect(registry.toolFunctions).toBeDefined();
|
||||
|
||||
// Successful servers should still be processed
|
||||
expect(registry.appServerConfigs).toHaveProperty('non_oauth_server');
|
||||
|
||||
// Failed servers should not crash the whole initialization
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('[MCP][stdio_server] Failed to fetch server capabilities:'),
|
||||
expect.any(Error),
|
||||
);
|
||||
expect(mockLogger.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('[MCP][websocket_server] Failed to fetch server capabilities:'),
|
||||
expect.any(Error),
|
||||
);
|
||||
});
|
||||
|
||||
it('should properly clean up connections even when some servers fail', async () => {
|
||||
const registry = new MCPServersRegistry(rawConfigs);
|
||||
|
||||
// Track disconnect failures but suppress unhandled rejections
|
||||
const disconnectErrors: Error[] = [];
|
||||
mockConnectionsRepo.disconnect.mockImplementation((serverName: string) => {
|
||||
if (serverName === 'stdio_server') {
|
||||
const error = new Error('Disconnect failed');
|
||||
disconnectErrors.push(error);
|
||||
return Promise.reject(error).catch(() => {}); // Suppress unhandled rejection
|
||||
}
|
||||
return Promise.resolve();
|
||||
});
|
||||
|
||||
await registry.initialize();
|
||||
|
||||
// Should still attempt to disconnect all servers during initialization
|
||||
const serverNames = Object.keys(rawConfigs);
|
||||
expect(mockConnectionsRepo.disconnect).toHaveBeenCalledTimes(serverNames.length);
|
||||
expect(disconnectErrors).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should timeout individual server initialization after configured timeout', async () => {
|
||||
const timeout = 2000;
|
||||
// Create registry with a short timeout for testing
|
||||
process.env.MCP_INIT_TIMEOUT_MS = `${timeout}`;
|
||||
|
||||
const registry = new MCPServersRegistry(rawConfigs);
|
||||
|
||||
// Make one server hang indefinitely during OAuth detection
|
||||
mockDetectOAuthRequirement.mockImplementation((url: string) => {
|
||||
if (url === 'https://api.github.com/mcp') {
|
||||
// Slow init
|
||||
return new Promise((res) => setTimeout(res, timeout * 2));
|
||||
}
|
||||
// Return normal responses for other servers
|
||||
return Promise.resolve({
|
||||
requiresOAuth: false,
|
||||
method: 'no-metadata-found',
|
||||
metadata: null,
|
||||
});
|
||||
});
|
||||
|
||||
const start = Date.now();
|
||||
await registry.initialize();
|
||||
const duration = Date.now() - start;
|
||||
|
||||
// Should complete within reasonable time despite one server hanging
|
||||
// Allow some buffer for test execution overhead
|
||||
expect(duration).toBeLessThan(timeout * 1.5);
|
||||
|
||||
// The timeout should prevent the hanging server from blocking initialization
|
||||
// Other servers should still be processed successfully
|
||||
expect(registry.appServerConfigs).toHaveProperty('stdio_server');
|
||||
expect(registry.appServerConfigs).toHaveProperty('non_oauth_server');
|
||||
}, 10_000); // 10 second Jest timeout
|
||||
|
||||
it('should skip tool function fetching if connection was not established', async () => {
|
||||
const testConfig: t.MCPServers = {
|
||||
server_with_connection: {
|
||||
type: 'stdio',
|
||||
args: [],
|
||||
command: 'test-command',
|
||||
},
|
||||
server_without_connection: {
|
||||
type: 'stdio',
|
||||
args: [],
|
||||
command: 'failing-command',
|
||||
},
|
||||
};
|
||||
|
||||
const registry = new MCPServersRegistry(testConfig);
|
||||
|
||||
const mockClient = {
|
||||
listTools: jest.fn().mockResolvedValue({
|
||||
tools: [
|
||||
{
|
||||
name: 'test_tool',
|
||||
description: 'Test tool',
|
||||
inputSchema: { type: 'object', properties: {} },
|
||||
},
|
||||
],
|
||||
}),
|
||||
getInstructions: jest.fn().mockReturnValue(undefined),
|
||||
getServerCapabilities: jest.fn().mockReturnValue({ tools: {} }),
|
||||
};
|
||||
const mockConnection = {
|
||||
client: mockClient,
|
||||
} as unknown as jest.Mocked<MCPConnection>;
|
||||
|
||||
mockConnectionsRepo.get.mockImplementation((serverName: string) => {
|
||||
if (serverName === 'server_with_connection') {
|
||||
return Promise.resolve(mockConnection);
|
||||
}
|
||||
throw new Error('Connection failed');
|
||||
});
|
||||
|
||||
// Mock getLoaded to return connections map - the real implementation returns all loaded connections at once
|
||||
mockConnectionsRepo.getLoaded.mockResolvedValue(
|
||||
new Map([['server_with_connection', mockConnection]]),
|
||||
);
|
||||
|
||||
mockDetectOAuthRequirement.mockResolvedValue({
|
||||
requiresOAuth: false,
|
||||
method: 'no-metadata-found',
|
||||
metadata: null,
|
||||
});
|
||||
|
||||
await registry.initialize();
|
||||
|
||||
expect(registry.toolFunctions).toHaveProperty('test_tool_mcp_server_with_connection');
|
||||
expect(Object.keys(registry.toolFunctions)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle getLoaded returning empty map gracefully', async () => {
|
||||
const testConfig: t.MCPServers = {
|
||||
test_server: {
|
||||
type: 'stdio',
|
||||
args: [],
|
||||
command: 'test-command',
|
||||
},
|
||||
};
|
||||
|
||||
const registry = new MCPServersRegistry(testConfig);
|
||||
|
||||
mockConnectionsRepo.get.mockRejectedValue(new Error('All connections failed'));
|
||||
mockConnectionsRepo.getLoaded.mockResolvedValue(new Map());
|
||||
mockDetectOAuthRequirement.mockResolvedValue({
|
||||
requiresOAuth: false,
|
||||
method: 'no-metadata-found',
|
||||
metadata: null,
|
||||
});
|
||||
|
||||
await registry.initialize();
|
||||
|
||||
expect(registry.toolFunctions).toEqual({});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
// zod.spec.ts
|
||||
import { z } from 'zod';
|
||||
import type { JsonSchemaType } from '~/types';
|
||||
import type { JsonSchemaType } from '@librechat/data-schemas';
|
||||
import { resolveJsonSchemaRefs, convertJsonSchemaToZod, convertWithResolvedRefs } from '../zod';
|
||||
|
||||
describe('convertJsonSchemaToZod', () => {
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ export class OAuthReconnectionManager {
|
|||
|
||||
// 1. derive the servers to reconnect
|
||||
const serversToReconnect = [];
|
||||
for (const serverName of this.mcpManager.getOAuthServers() ?? []) {
|
||||
for (const serverName of this.mcpManager.getOAuthServers()) {
|
||||
const canReconnect = await this.canReconnect(userId, serverName);
|
||||
if (canReconnect) {
|
||||
serversToReconnect.push(serverName);
|
||||
|
|
|
|||
|
|
@ -10,9 +10,8 @@ import {
|
|||
} from 'librechat-data-provider';
|
||||
import type { SearchResultData, UIResource, TPlugin, TUser } from 'librechat-data-provider';
|
||||
import type * as t from '@modelcontextprotocol/sdk/types.js';
|
||||
import type { TokenMethods } from '@librechat/data-schemas';
|
||||
import type { TokenMethods, JsonSchemaType } from '@librechat/data-schemas';
|
||||
import type { FlowStateManager } from '~/flow/manager';
|
||||
import type { JsonSchemaType } from '~/types/zod';
|
||||
import type { RequestBody } from '~/types/http';
|
||||
import type * as o from '~/mcp/oauth/types';
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { z } from 'zod';
|
||||
import type { JsonSchemaType, ConvertJsonSchemaToZodOptions } from '~/types';
|
||||
import type { JsonSchemaType, ConvertJsonSchemaToZodOptions } from '@librechat/data-schemas';
|
||||
|
||||
function isEmptyObjectSchema(jsonSchema?: JsonSchemaType): boolean {
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import type { NextFunction, Request as ServerRequest, Response as ServerResponse } from 'express';
|
||||
import type { IBalance, IUser, BalanceConfig, ObjectId } from '@librechat/data-schemas';
|
||||
import type { IBalance, IUser, BalanceConfig, ObjectId, AppConfig } from '@librechat/data-schemas';
|
||||
import type { Model } from 'mongoose';
|
||||
import type { AppConfig, BalanceUpdateFields } from '~/types';
|
||||
import type { BalanceUpdateFields } from '~/types';
|
||||
import { getBalanceConfig } from '~/app/config';
|
||||
|
||||
export interface BalanceMiddlewareOptions {
|
||||
|
|
|
|||
|
|
@ -1,92 +0,0 @@
|
|||
import type {
|
||||
TEndpoint,
|
||||
FileSources,
|
||||
TAzureConfig,
|
||||
TCustomConfig,
|
||||
TMemoryConfig,
|
||||
EModelEndpoint,
|
||||
TAgentsEndpoint,
|
||||
TCustomEndpoints,
|
||||
TAssistantEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import type { FunctionTool } from './tools';
|
||||
|
||||
/**
|
||||
* Application configuration object
|
||||
* Based on the configuration defined in api/server/services/Config/getAppConfig.js
|
||||
*/
|
||||
export interface AppConfig {
|
||||
/** The main custom configuration */
|
||||
config: TCustomConfig;
|
||||
/** OCR configuration */
|
||||
ocr?: TCustomConfig['ocr'];
|
||||
/** File paths configuration */
|
||||
paths: {
|
||||
uploads: string;
|
||||
imageOutput: string;
|
||||
publicPath: string;
|
||||
[key: string]: string;
|
||||
};
|
||||
/** Memory configuration */
|
||||
memory?: TMemoryConfig;
|
||||
/** Web search configuration */
|
||||
webSearch?: TCustomConfig['webSearch'];
|
||||
/** File storage strategy ('local', 's3', 'firebase', 'azure_blob') */
|
||||
fileStrategy: FileSources.local | FileSources.s3 | FileSources.firebase | FileSources.azure_blob;
|
||||
/** File strategies configuration */
|
||||
fileStrategies: TCustomConfig['fileStrategies'];
|
||||
/** Registration configurations */
|
||||
registration?: TCustomConfig['registration'];
|
||||
/** Actions configurations */
|
||||
actions?: TCustomConfig['actions'];
|
||||
/** Admin-filtered tools */
|
||||
filteredTools?: string[];
|
||||
/** Admin-included tools */
|
||||
includedTools?: string[];
|
||||
/** Image output type configuration */
|
||||
imageOutputType: string;
|
||||
/** Interface configuration */
|
||||
interfaceConfig?: TCustomConfig['interface'];
|
||||
/** Turnstile configuration */
|
||||
turnstileConfig?: TCustomConfig['turnstile'];
|
||||
/** Balance configuration */
|
||||
balance?: TCustomConfig['balance'];
|
||||
/** Transactions configuration */
|
||||
transactions?: TCustomConfig['transactions'];
|
||||
/** Speech configuration */
|
||||
speech?: TCustomConfig['speech'];
|
||||
/** MCP server configuration */
|
||||
mcpConfig?: TCustomConfig['mcpServers'] | null;
|
||||
/** File configuration */
|
||||
fileConfig?: TCustomConfig['fileConfig'];
|
||||
/** Secure image links configuration */
|
||||
secureImageLinks?: TCustomConfig['secureImageLinks'];
|
||||
/** Processed model specifications */
|
||||
modelSpecs?: TCustomConfig['modelSpecs'];
|
||||
/** Available tools */
|
||||
availableTools?: Record<string, FunctionTool>;
|
||||
endpoints?: {
|
||||
/** OpenAI endpoint configuration */
|
||||
openAI?: TEndpoint;
|
||||
/** Google endpoint configuration */
|
||||
google?: TEndpoint;
|
||||
/** Bedrock endpoint configuration */
|
||||
bedrock?: TEndpoint;
|
||||
/** Anthropic endpoint configuration */
|
||||
anthropic?: TEndpoint;
|
||||
/** GPT plugins endpoint configuration */
|
||||
gptPlugins?: TEndpoint;
|
||||
/** Azure OpenAI endpoint configuration */
|
||||
azureOpenAI?: TAzureConfig;
|
||||
/** Assistants endpoint configuration */
|
||||
assistants?: TAssistantEndpoint;
|
||||
/** Azure assistants endpoint configuration */
|
||||
azureAssistants?: TAssistantEndpoint;
|
||||
/** Agents endpoint configuration */
|
||||
[EModelEndpoint.agents]?: TAgentsEndpoint;
|
||||
/** Custom endpoints configuration */
|
||||
[EModelEndpoint.custom]?: TCustomEndpoints;
|
||||
/** Global endpoint configuration */
|
||||
all?: TEndpoint;
|
||||
};
|
||||
}
|
||||
|
|
@ -1,4 +1,7 @@
|
|||
import type { IMongoFile } from '@librechat/data-schemas';
|
||||
import type { ServerRequest } from './http';
|
||||
import type { Readable } from 'stream';
|
||||
import type { Request } from 'express';
|
||||
export interface STTService {
|
||||
getInstance(): Promise<STTService>;
|
||||
getProviderSchema(req: ServerRequest): Promise<[string, object]>;
|
||||
|
|
@ -26,3 +29,85 @@ export interface AudioProcessingResult {
|
|||
text: string;
|
||||
bytes: number;
|
||||
}
|
||||
|
||||
export interface VideoResult {
|
||||
videos: Array<{
|
||||
type: string;
|
||||
mimeType: string;
|
||||
data: string;
|
||||
}>;
|
||||
files: Array<{
|
||||
file_id?: string;
|
||||
temp_file_id?: string;
|
||||
filepath: string;
|
||||
source?: string;
|
||||
filename: string;
|
||||
type: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface DocumentResult {
|
||||
documents: Array<{
|
||||
type: 'document' | 'file' | 'input_file';
|
||||
/** Anthropic File Format, `document` */
|
||||
source?: {
|
||||
type: string;
|
||||
media_type: string;
|
||||
data: string;
|
||||
};
|
||||
cache_control?: { type: string };
|
||||
citations?: { enabled: boolean };
|
||||
/** Google File Format, `document` */
|
||||
mimeType?: string;
|
||||
data?: string;
|
||||
/** OpenAI File Format, `file` */
|
||||
file?: {
|
||||
filename?: string;
|
||||
file_data?: string;
|
||||
};
|
||||
/** OpenAI Responses API File Format, `input_file` */
|
||||
filename?: string;
|
||||
file_data?: string;
|
||||
}>;
|
||||
files: Array<{
|
||||
file_id?: string;
|
||||
temp_file_id?: string;
|
||||
filepath: string;
|
||||
source?: string;
|
||||
filename: string;
|
||||
type: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface AudioResult {
|
||||
audios: Array<{
|
||||
type: string;
|
||||
mimeType: string;
|
||||
data: string;
|
||||
}>;
|
||||
files: Array<{
|
||||
file_id?: string;
|
||||
temp_file_id?: string;
|
||||
filepath: string;
|
||||
source?: string;
|
||||
filename: string;
|
||||
type: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface ProcessedFile {
|
||||
file: IMongoFile;
|
||||
content: string;
|
||||
metadata: {
|
||||
file_id: string;
|
||||
temp_file_id?: string;
|
||||
filepath: string;
|
||||
source?: string;
|
||||
filename: string;
|
||||
type: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface StrategyFunctions {
|
||||
getDownloadStream: (req: Request, filepath: string) => Promise<Readable>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import type { Request } from 'express';
|
||||
import type { IUser } from '@librechat/data-schemas';
|
||||
import type { AppConfig } from './config';
|
||||
import type { IUser, AppConfig } from '@librechat/data-schemas';
|
||||
|
||||
/**
|
||||
* LibreChat-specific request body type that extends Express Request body
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
export * from './config';
|
||||
export * from './azure';
|
||||
export * from './balance';
|
||||
export * from './endpoints';
|
||||
|
|
@ -11,6 +10,4 @@ export * from './mistral';
|
|||
export * from './openai';
|
||||
export * from './prompts';
|
||||
export * from './run';
|
||||
export * from './tools';
|
||||
export * from './zod';
|
||||
export * from './anthropic';
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ import { openAISchema, EModelEndpoint } from 'librechat-data-provider';
|
|||
import type { TEndpointOption, TAzureConfig, TEndpoint, TConfig } from 'librechat-data-provider';
|
||||
import type { BindToolsInput } from '@langchain/core/language_models/chat_models';
|
||||
import type { OpenAIClientOptions, Providers } from '@librechat/agents';
|
||||
import type { AppConfig } from '@librechat/data-schemas';
|
||||
import type { AzureOptions } from './azure';
|
||||
import type { AppConfig } from './config';
|
||||
|
||||
export type OpenAIParameters = z.infer<typeof openAISchema>;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
import type { JsonSchemaType } from './zod';
|
||||
|
||||
export interface FunctionTool {
|
||||
type: 'function';
|
||||
function: {
|
||||
description: string;
|
||||
name: string;
|
||||
parameters: JsonSchemaType;
|
||||
};
|
||||
}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
export type JsonSchemaType = {
|
||||
type: 'string' | 'number' | 'integer' | 'float' | 'boolean' | 'array' | 'object';
|
||||
enum?: string[];
|
||||
items?: JsonSchemaType;
|
||||
properties?: Record<string, JsonSchemaType>;
|
||||
required?: string[];
|
||||
description?: string;
|
||||
additionalProperties?: boolean | JsonSchemaType;
|
||||
};
|
||||
|
||||
export type ConvertJsonSchemaToZodOptions = {
|
||||
allowEmptyObject?: boolean;
|
||||
dropFields?: string[];
|
||||
transformOneOfAnyOf?: boolean;
|
||||
};
|
||||
|
|
@ -12,8 +12,8 @@
|
|||
*
|
||||
* @throws Throws an error if the input is not a string or number, contains invalid characters, or does not evaluate to a number.
|
||||
*/
|
||||
export function math(str: string | number, fallbackValue?: number): number {
|
||||
const fallback = typeof fallbackValue !== 'undefined' && typeof fallbackValue === 'number';
|
||||
export function math(str: string | number | undefined, fallbackValue?: number): number {
|
||||
const fallback = fallbackValue != null;
|
||||
if (typeof str !== 'string' && typeof str === 'number') {
|
||||
return str;
|
||||
} else if (typeof str !== 'string') {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import type { AppConfig } from '~/types';
|
||||
import type { AppConfig } from '@librechat/data-schemas';
|
||||
import {
|
||||
createTempChatExpirationDate,
|
||||
getTempChatRetentionHours,
|
||||
|
|
@ -92,14 +92,16 @@ describe('tempChatRetention', () => {
|
|||
|
||||
describe('createTempChatExpirationDate', () => {
|
||||
it('should create expiration date with default retention period', () => {
|
||||
const beforeCall = Date.now();
|
||||
const result = createTempChatExpirationDate();
|
||||
const afterCall = Date.now();
|
||||
|
||||
const expectedDate = new Date();
|
||||
expectedDate.setHours(expectedDate.getHours() + DEFAULT_RETENTION_HOURS);
|
||||
const expectedMin = beforeCall + DEFAULT_RETENTION_HOURS * 60 * 60 * 1000;
|
||||
const expectedMax = afterCall + DEFAULT_RETENTION_HOURS * 60 * 60 * 1000;
|
||||
|
||||
// Allow for small time differences in test execution
|
||||
const timeDiff = Math.abs(result.getTime() - expectedDate.getTime());
|
||||
expect(timeDiff).toBeLessThan(1000); // Less than 1 second difference
|
||||
// Result should be between expectedMin and expectedMax
|
||||
expect(result.getTime()).toBeGreaterThanOrEqual(expectedMin);
|
||||
expect(result.getTime()).toBeLessThanOrEqual(expectedMax);
|
||||
});
|
||||
|
||||
it('should create expiration date with custom retention period', () => {
|
||||
|
|
@ -109,14 +111,16 @@ describe('tempChatRetention', () => {
|
|||
},
|
||||
};
|
||||
|
||||
const beforeCall = Date.now();
|
||||
const result = createTempChatExpirationDate(config?.interfaceConfig);
|
||||
const afterCall = Date.now();
|
||||
|
||||
const expectedDate = new Date();
|
||||
expectedDate.setHours(expectedDate.getHours() + 12);
|
||||
const expectedMin = beforeCall + 12 * 60 * 60 * 1000;
|
||||
const expectedMax = afterCall + 12 * 60 * 60 * 1000;
|
||||
|
||||
// Allow for small time differences in test execution
|
||||
const timeDiff = Math.abs(result.getTime() - expectedDate.getTime());
|
||||
expect(timeDiff).toBeLessThan(1000); // Less than 1 second difference
|
||||
// Result should be between expectedMin and expectedMax
|
||||
expect(result.getTime()).toBeGreaterThanOrEqual(expectedMin);
|
||||
expect(result.getTime()).toBeLessThanOrEqual(expectedMax);
|
||||
});
|
||||
|
||||
it('should return a Date object', () => {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import type { AppConfig } from '~/types';
|
||||
import type { AppConfig } from '@librechat/data-schemas';
|
||||
|
||||
/**
|
||||
* Default retention period for temporary chats in hours
|
||||
|
|
@ -73,7 +73,5 @@ export function getTempChatRetentionHours(
|
|||
*/
|
||||
export function createTempChatExpirationDate(interfaceConfig?: AppConfig['interfaceConfig']): Date {
|
||||
const retentionHours = getTempChatRetentionHours(interfaceConfig);
|
||||
const expiredAt = new Date();
|
||||
expiredAt.setHours(expiredAt.getHours() + retentionHours);
|
||||
return expiredAt;
|
||||
return new Date(Date.now() + retentionHours * 60 * 60 * 1000);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -91,6 +91,7 @@ const googleModels = {
|
|||
'gemini-2.5': 1000000, // 1M input tokens, 64k output tokens
|
||||
'gemini-2.5-pro': 1000000,
|
||||
'gemini-2.5-flash': 1000000,
|
||||
'gemini-2.5-flash-lite': 1000000,
|
||||
'gemini-2.0': 2000000,
|
||||
'gemini-2.0-flash': 1000000,
|
||||
'gemini-2.0-flash-lite': 1000000,
|
||||
|
|
@ -132,9 +133,9 @@ const anthropicModels = {
|
|||
};
|
||||
|
||||
const deepseekModels = {
|
||||
'deepseek-reasoner': 63000, // -1000 from max (API)
|
||||
deepseek: 63000, // -1000 from max (API)
|
||||
'deepseek.r1': 127500,
|
||||
'deepseek-reasoner': 128000,
|
||||
deepseek: 128000,
|
||||
'deepseek.r1': 128000,
|
||||
};
|
||||
|
||||
const metaModels = {
|
||||
|
|
@ -256,8 +257,19 @@ const aggregateModels = {
|
|||
// misc.
|
||||
kimi: 131000,
|
||||
// GPT-OSS
|
||||
'gpt-oss': 131000,
|
||||
'gpt-oss:20b': 131000,
|
||||
'gpt-oss-20b': 131000,
|
||||
'gpt-oss:120b': 131000,
|
||||
'gpt-oss-120b': 131000,
|
||||
// GLM models (Zhipu AI)
|
||||
glm4: 128000,
|
||||
'glm-4': 128000,
|
||||
'glm-4-32b': 128000,
|
||||
'glm-4.5': 131000,
|
||||
'glm-4.5-air': 131000,
|
||||
'glm-4.5v': 66000,
|
||||
'glm-4.6': 200000,
|
||||
};
|
||||
|
||||
export const maxTokensMap = {
|
||||
|
|
@ -313,9 +325,10 @@ export function findMatchingPattern(
|
|||
tokensMap: Record<string, number> | EndpointTokenConfig,
|
||||
): string | null {
|
||||
const keys = Object.keys(tokensMap);
|
||||
const lowerModelName = modelName.toLowerCase();
|
||||
for (let i = keys.length - 1; i >= 0; i--) {
|
||||
const modelKey = keys[i];
|
||||
if (modelName.includes(modelKey)) {
|
||||
if (lowerModelName.includes(modelKey)) {
|
||||
return modelKey;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,13 @@
|
|||
import { webSearchAuth } from '@librechat/data-schemas';
|
||||
import { SafeSearchTypes, AuthType } from 'librechat-data-provider';
|
||||
import type {
|
||||
ScraperTypes,
|
||||
ScraperProviders,
|
||||
TWebSearchConfig,
|
||||
SearchProviders,
|
||||
TCustomConfig,
|
||||
RerankerTypes,
|
||||
SearchProviders,
|
||||
TWebSearchConfig,
|
||||
} from 'librechat-data-provider';
|
||||
import { webSearchAuth, loadWebSearchAuth, extractWebSearchEnvVars } from './web';
|
||||
import { SafeSearchTypes, AuthType } from 'librechat-data-provider';
|
||||
import { loadWebSearchAuth, extractWebSearchEnvVars } from './web';
|
||||
|
||||
// Mock the extractVariableName function
|
||||
jest.mock('../utils', () => ({
|
||||
|
|
@ -118,7 +119,7 @@ describe('web.ts', () => {
|
|||
}
|
||||
|
||||
expect(result.authResult).toHaveProperty('searchProvider', 'serper');
|
||||
expect(result.authResult).toHaveProperty('scraperType', 'firecrawl');
|
||||
expect(result.authResult).toHaveProperty('scraperProvider', 'firecrawl');
|
||||
expect(['jina', 'cohere']).toContain(result.authResult.rerankerType as string);
|
||||
});
|
||||
|
||||
|
|
@ -287,7 +288,7 @@ describe('web.ts', () => {
|
|||
|
||||
// Check that the correct service types are set
|
||||
expect(result.authResult.searchProvider).toBe('serper' as SearchProviders);
|
||||
expect(result.authResult.scraperType).toBe('firecrawl' as ScraperTypes);
|
||||
expect(result.authResult.scraperProvider).toBe('firecrawl' as ScraperProviders);
|
||||
// One of the rerankers should be set
|
||||
expect(['jina', 'cohere']).toContain(result.authResult.rerankerType as string);
|
||||
});
|
||||
|
|
@ -329,7 +330,7 @@ describe('web.ts', () => {
|
|||
|
||||
// Should have set values for all categories
|
||||
expect(result.authResult.searchProvider).toBeDefined();
|
||||
expect(result.authResult.scraperType).toBeDefined();
|
||||
expect(result.authResult.scraperProvider).toBeDefined();
|
||||
expect(result.authResult.rerankerType).toBeDefined();
|
||||
});
|
||||
|
||||
|
|
@ -358,7 +359,7 @@ describe('web.ts', () => {
|
|||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
// Specify which services to use
|
||||
searchProvider: 'serper' as SearchProviders,
|
||||
scraperType: 'firecrawl' as ScraperTypes,
|
||||
scraperProvider: 'firecrawl' as ScraperProviders,
|
||||
rerankerType: 'jina' as RerankerTypes,
|
||||
};
|
||||
|
||||
|
|
@ -393,7 +394,7 @@ describe('web.ts', () => {
|
|||
expect(result.authResult).toHaveProperty('firecrawlApiUrl');
|
||||
expect(result.authResult).toHaveProperty('jinaApiKey');
|
||||
expect(result.authResult).toHaveProperty('searchProvider');
|
||||
expect(result.authResult).toHaveProperty('scraperType');
|
||||
expect(result.authResult).toHaveProperty('scraperProvider');
|
||||
expect(result.authResult).toHaveProperty('rerankerType');
|
||||
|
||||
expect(result.authenticated).toBe(true);
|
||||
|
|
@ -418,7 +419,7 @@ describe('web.ts', () => {
|
|||
expect(result.authResult).toHaveProperty('firecrawlApiUrl', 'https://api.firecrawl.dev');
|
||||
expect(result.authResult).toHaveProperty('jinaApiKey', 'system-jina-key');
|
||||
expect(result.authResult).toHaveProperty('searchProvider', 'serper');
|
||||
expect(result.authResult).toHaveProperty('scraperType', 'firecrawl');
|
||||
expect(result.authResult).toHaveProperty('scraperProvider', 'firecrawl');
|
||||
expect(result.authResult).toHaveProperty('rerankerType', 'jina');
|
||||
|
||||
// Restore original env
|
||||
|
|
@ -451,7 +452,7 @@ describe('web.ts', () => {
|
|||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
// Specify which services to use
|
||||
searchProvider: 'serper' as SearchProviders,
|
||||
scraperType: 'firecrawl' as ScraperTypes,
|
||||
scraperProvider: 'firecrawl' as ScraperProviders,
|
||||
rerankerType: 'jina' as RerankerTypes, // Only Jina will be checked
|
||||
};
|
||||
|
||||
|
|
@ -491,7 +492,7 @@ describe('web.ts', () => {
|
|||
|
||||
// Verify the service types are set correctly
|
||||
expect(result.authResult).toHaveProperty('searchProvider', 'serper');
|
||||
expect(result.authResult).toHaveProperty('scraperType', 'firecrawl');
|
||||
expect(result.authResult).toHaveProperty('scraperProvider', 'firecrawl');
|
||||
expect(result.authResult).toHaveProperty('rerankerType', 'jina');
|
||||
|
||||
// Restore original env
|
||||
|
|
@ -721,8 +722,8 @@ describe('web.ts', () => {
|
|||
expect(providerCalls.length).toBe(1);
|
||||
});
|
||||
|
||||
it('should only check the specified scraperType', async () => {
|
||||
// Initialize a webSearchConfig with a specific scraperType
|
||||
it('should only check the specified scraperProvider', async () => {
|
||||
// Initialize a webSearchConfig with a specific scraperProvider
|
||||
const webSearchConfig: TCustomConfig['webSearch'] = {
|
||||
serperApiKey: '${SERPER_API_KEY}',
|
||||
searxngInstanceUrl: '${SEARXNG_INSTANCE_URL}',
|
||||
|
|
@ -733,7 +734,7 @@ describe('web.ts', () => {
|
|||
jinaApiUrl: '${JINA_API_URL}',
|
||||
cohereApiKey: '${COHERE_API_KEY}',
|
||||
safeSearch: SafeSearchTypes.MODERATE,
|
||||
scraperType: 'firecrawl' as ScraperTypes,
|
||||
scraperProvider: 'firecrawl' as ScraperProviders,
|
||||
};
|
||||
|
||||
// Mock successful authentication
|
||||
|
|
@ -753,7 +754,7 @@ describe('web.ts', () => {
|
|||
});
|
||||
|
||||
expect(result.authenticated).toBe(true);
|
||||
expect(result.authResult.scraperType).toBe('firecrawl');
|
||||
expect(result.authResult.scraperProvider).toBe('firecrawl');
|
||||
|
||||
// Verify that only FIRECRAWL_API_KEY and FIRECRAWL_API_URL were requested for the scrapers category
|
||||
const scraperCalls = mockLoadAuthValues.mock.calls.filter((call) =>
|
||||
|
|
@ -932,7 +933,7 @@ describe('web.ts', () => {
|
|||
|
||||
// Should have set values for all categories
|
||||
expect(result.authResult.searchProvider).toBeDefined();
|
||||
expect(result.authResult.scraperType).toBeDefined();
|
||||
expect(result.authResult.scraperProvider).toBeDefined();
|
||||
expect(result.authResult.rerankerType).toBeDefined();
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1,112 +1,18 @@
|
|||
import {
|
||||
AuthType,
|
||||
SafeSearchTypes,
|
||||
SearchCategories,
|
||||
extractVariableName,
|
||||
} from 'librechat-data-provider';
|
||||
import { webSearchAuth } from '@librechat/data-schemas';
|
||||
import type {
|
||||
ScraperTypes,
|
||||
RerankerTypes,
|
||||
TCustomConfig,
|
||||
SearchProviders,
|
||||
ScraperProviders,
|
||||
TWebSearchConfig,
|
||||
} from 'librechat-data-provider';
|
||||
import {
|
||||
SearchCategories,
|
||||
SafeSearchTypes,
|
||||
extractVariableName,
|
||||
AuthType,
|
||||
} from 'librechat-data-provider';
|
||||
|
||||
export function loadWebSearchConfig(
|
||||
config: TCustomConfig['webSearch'],
|
||||
): TCustomConfig['webSearch'] {
|
||||
const serperApiKey = config?.serperApiKey ?? '${SERPER_API_KEY}';
|
||||
const searxngInstanceUrl = config?.searxngInstanceUrl ?? '${SEARXNG_INSTANCE_URL}';
|
||||
const searxngApiKey = config?.searxngApiKey ?? '${SEARXNG_API_KEY}';
|
||||
const firecrawlApiKey = config?.firecrawlApiKey ?? '${FIRECRAWL_API_KEY}';
|
||||
const firecrawlApiUrl = config?.firecrawlApiUrl ?? '${FIRECRAWL_API_URL}';
|
||||
const jinaApiKey = config?.jinaApiKey ?? '${JINA_API_KEY}';
|
||||
const jinaApiUrl = config?.jinaApiUrl ?? '${JINA_API_URL}';
|
||||
const cohereApiKey = config?.cohereApiKey ?? '${COHERE_API_KEY}';
|
||||
const safeSearch = config?.safeSearch ?? SafeSearchTypes.MODERATE;
|
||||
|
||||
return {
|
||||
...config,
|
||||
safeSearch,
|
||||
jinaApiKey,
|
||||
jinaApiUrl,
|
||||
cohereApiKey,
|
||||
serperApiKey,
|
||||
searxngInstanceUrl,
|
||||
searxngApiKey,
|
||||
firecrawlApiKey,
|
||||
firecrawlApiUrl,
|
||||
};
|
||||
}
|
||||
|
||||
export type TWebSearchKeys =
|
||||
| 'serperApiKey'
|
||||
| 'searxngInstanceUrl'
|
||||
| 'searxngApiKey'
|
||||
| 'firecrawlApiKey'
|
||||
| 'firecrawlApiUrl'
|
||||
| 'jinaApiKey'
|
||||
| 'jinaApiUrl'
|
||||
| 'cohereApiKey';
|
||||
|
||||
export type TWebSearchCategories =
|
||||
| SearchCategories.PROVIDERS
|
||||
| SearchCategories.SCRAPERS
|
||||
| SearchCategories.RERANKERS;
|
||||
|
||||
export const webSearchAuth = {
|
||||
providers: {
|
||||
serper: {
|
||||
serperApiKey: 1 as const,
|
||||
},
|
||||
searxng: {
|
||||
searxngInstanceUrl: 1 as const,
|
||||
/** Optional (0) */
|
||||
searxngApiKey: 0 as const,
|
||||
},
|
||||
},
|
||||
scrapers: {
|
||||
firecrawl: {
|
||||
firecrawlApiKey: 1 as const,
|
||||
/** Optional (0) */
|
||||
firecrawlApiUrl: 0 as const,
|
||||
},
|
||||
},
|
||||
rerankers: {
|
||||
jina: {
|
||||
jinaApiKey: 1 as const,
|
||||
/** Optional (0) */
|
||||
jinaApiUrl: 0 as const,
|
||||
},
|
||||
cohere: { cohereApiKey: 1 as const },
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Extracts all API keys from the webSearchAuth configuration object
|
||||
*/
|
||||
export function getWebSearchKeys(): TWebSearchKeys[] {
|
||||
const keys: TWebSearchKeys[] = [];
|
||||
|
||||
// Iterate through each category (providers, scrapers, rerankers)
|
||||
for (const category of Object.keys(webSearchAuth)) {
|
||||
const categoryObj = webSearchAuth[category as TWebSearchCategories];
|
||||
|
||||
// Iterate through each service within the category
|
||||
for (const service of Object.keys(categoryObj)) {
|
||||
const serviceObj = categoryObj[service as keyof typeof categoryObj];
|
||||
|
||||
// Extract the API keys from the service
|
||||
for (const key of Object.keys(serviceObj)) {
|
||||
keys.push(key as TWebSearchKeys);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return keys;
|
||||
}
|
||||
|
||||
export const webSearchKeys: TWebSearchKeys[] = getWebSearchKeys();
|
||||
import type { TWebSearchKeys, TWebSearchCategories } from '@librechat/data-schemas';
|
||||
|
||||
export function extractWebSearchEnvVars({
|
||||
keys,
|
||||
|
|
@ -182,8 +88,8 @@ export async function loadWebSearchAuth({
|
|||
let specificService: ServiceType | undefined;
|
||||
if (category === SearchCategories.PROVIDERS && webSearchConfig?.searchProvider) {
|
||||
specificService = webSearchConfig.searchProvider as unknown as ServiceType;
|
||||
} else if (category === SearchCategories.SCRAPERS && webSearchConfig?.scraperType) {
|
||||
specificService = webSearchConfig.scraperType as unknown as ServiceType;
|
||||
} else if (category === SearchCategories.SCRAPERS && webSearchConfig?.scraperProvider) {
|
||||
specificService = webSearchConfig.scraperProvider as unknown as ServiceType;
|
||||
} else if (category === SearchCategories.RERANKERS && webSearchConfig?.rerankerType) {
|
||||
specificService = webSearchConfig.rerankerType as unknown as ServiceType;
|
||||
}
|
||||
|
|
@ -259,7 +165,7 @@ export async function loadWebSearchAuth({
|
|||
if (category === SearchCategories.PROVIDERS) {
|
||||
authResult.searchProvider = service as SearchProviders;
|
||||
} else if (category === SearchCategories.SCRAPERS) {
|
||||
authResult.scraperType = service as ScraperTypes;
|
||||
authResult.scraperProvider = service as ScraperProviders;
|
||||
} else if (category === SearchCategories.RERANKERS) {
|
||||
authResult.rerankerType = service as RerankerTypes;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue