mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-23 02:36:12 +01:00
Merge branch 'dev' into feat/multi-lang-Terms-of-service
This commit is contained in:
commit
97a6074edc
660 changed files with 35171 additions and 17122 deletions
|
|
@ -5,6 +5,7 @@ export default {
|
|||
testResultsProcessor: 'jest-junit',
|
||||
moduleNameMapper: {
|
||||
'^@src/(.*)$': '<rootDir>/src/$1',
|
||||
'~/(.*)': '<rootDir>/src/$1',
|
||||
},
|
||||
// coverageThreshold: {
|
||||
// global: {
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "librechat-mcp",
|
||||
"version": "1.2.2",
|
||||
"name": "@librechat/api",
|
||||
"version": "1.2.5",
|
||||
"type": "commonjs",
|
||||
"description": "MCP services for LibreChat",
|
||||
"main": "dist/index.js",
|
||||
|
|
@ -47,9 +47,11 @@
|
|||
"@rollup/plugin-replace": "^5.0.5",
|
||||
"@rollup/plugin-terser": "^0.4.4",
|
||||
"@rollup/plugin-typescript": "^12.1.2",
|
||||
"@types/bun": "^1.2.15",
|
||||
"@types/diff": "^6.0.0",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/jest": "^29.5.2",
|
||||
"@types/multer": "^1.4.13",
|
||||
"@types/node": "^20.3.0",
|
||||
"@types/react": "^18.2.18",
|
||||
"@types/winston": "^2.4.4",
|
||||
|
|
@ -66,13 +68,20 @@
|
|||
"publishConfig": {
|
||||
"registry": "https://registry.npmjs.org/"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.11.2",
|
||||
"peerDependencies": {
|
||||
"@librechat/agents": "^2.4.51",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@modelcontextprotocol/sdk": "^1.12.3",
|
||||
"axios": "^1.8.2",
|
||||
"diff": "^7.0.0",
|
||||
"eventsource": "^3.0.2",
|
||||
"express": "^4.21.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"keyv": "^5.3.2"
|
||||
"express": "^4.21.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"keyv": "^5.3.2",
|
||||
"librechat-data-provider": "*",
|
||||
"node-fetch": "2.7.0",
|
||||
"tiktoken": "^1.0.15",
|
||||
"undici": "^7.10.0",
|
||||
"zod": "^3.22.4"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
// rollup.config.js
|
||||
import { readFileSync } from 'fs';
|
||||
import json from '@rollup/plugin-json';
|
||||
import terser from '@rollup/plugin-terser';
|
||||
import replace from '@rollup/plugin-replace';
|
||||
import commonjs from '@rollup/plugin-commonjs';
|
||||
|
|
@ -29,15 +30,17 @@ const plugins = [
|
|||
inlineSourceMap: true,
|
||||
}),
|
||||
terser(),
|
||||
json(),
|
||||
];
|
||||
|
||||
const cjsBuild = {
|
||||
input: 'src/index.ts',
|
||||
output: {
|
||||
file: pkg.main,
|
||||
dir: 'dist',
|
||||
format: 'cjs',
|
||||
sourcemap: true,
|
||||
exports: 'named',
|
||||
entryFileNames: '[name].js',
|
||||
},
|
||||
external: [...Object.keys(pkg.dependencies || {}), ...Object.keys(pkg.devDependencies || {})],
|
||||
preserveSymlinks: true,
|
||||
93
packages/api/src/agents/auth.ts
Normal file
93
packages/api/src/agents/auth.ts
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import type { IPluginAuth, PluginAuthMethods } from '@librechat/data-schemas';
|
||||
import { decrypt } from '../crypto/encryption';
|
||||
|
||||
export interface GetPluginAuthMapParams {
|
||||
userId: string;
|
||||
pluginKeys: string[];
|
||||
throwError?: boolean;
|
||||
findPluginAuthsByKeys: PluginAuthMethods['findPluginAuthsByKeys'];
|
||||
}
|
||||
|
||||
export type PluginAuthMap = Record<string, Record<string, string>>;
|
||||
|
||||
/**
|
||||
* Retrieves and decrypts authentication values for multiple plugins
|
||||
* @returns A map where keys are pluginKeys and values are objects of authField:decryptedValue pairs
|
||||
*/
|
||||
export async function getPluginAuthMap({
|
||||
userId,
|
||||
pluginKeys,
|
||||
throwError = true,
|
||||
findPluginAuthsByKeys,
|
||||
}: GetPluginAuthMapParams): Promise<PluginAuthMap> {
|
||||
try {
|
||||
/** Early return for empty plugin keys */
|
||||
if (!pluginKeys?.length) {
|
||||
return {};
|
||||
}
|
||||
|
||||
/** All plugin auths for current user query */
|
||||
const pluginAuths = await findPluginAuthsByKeys({ userId, pluginKeys });
|
||||
|
||||
/** Group auth records by pluginKey for efficient lookup */
|
||||
const authsByPlugin = new Map<string, IPluginAuth[]>();
|
||||
for (const auth of pluginAuths) {
|
||||
if (!auth.pluginKey) {
|
||||
logger.warn(`[getPluginAuthMap] Missing pluginKey for userId ${userId}`);
|
||||
continue;
|
||||
}
|
||||
const existing = authsByPlugin.get(auth.pluginKey) || [];
|
||||
existing.push(auth);
|
||||
authsByPlugin.set(auth.pluginKey, existing);
|
||||
}
|
||||
|
||||
const authMap: PluginAuthMap = {};
|
||||
const decryptionPromises: Promise<void>[] = [];
|
||||
|
||||
/** Single loop through requested pluginKeys */
|
||||
for (const pluginKey of pluginKeys) {
|
||||
authMap[pluginKey] = {};
|
||||
const auths = authsByPlugin.get(pluginKey) || [];
|
||||
|
||||
for (const auth of auths) {
|
||||
decryptionPromises.push(
|
||||
(async () => {
|
||||
try {
|
||||
const decryptedValue = await decrypt(auth.value);
|
||||
authMap[pluginKey][auth.authField] = decryptedValue;
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||
logger.error(
|
||||
`[getPluginAuthMap] Decryption failed for userId ${userId}, plugin ${pluginKey}, field ${auth.authField}: ${message}`,
|
||||
);
|
||||
|
||||
if (throwError) {
|
||||
throw new Error(
|
||||
`Decryption failed for plugin ${pluginKey}, field ${auth.authField}: ${message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
})(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(decryptionPromises);
|
||||
return authMap;
|
||||
} catch (error) {
|
||||
if (!throwError) {
|
||||
/** Empty objects for each plugin key on error */
|
||||
return pluginKeys.reduce((acc, key) => {
|
||||
acc[key] = {};
|
||||
return acc;
|
||||
}, {} as PluginAuthMap);
|
||||
}
|
||||
|
||||
const message = error instanceof Error ? error.message : 'Unknown error';
|
||||
logger.error(
|
||||
`[getPluginAuthMap] Failed to fetch auth values for userId ${userId}, plugins: ${pluginKeys.join(', ')}: ${message}`,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
24
packages/api/src/agents/config.ts
Normal file
24
packages/api/src/agents/config.ts
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import { EModelEndpoint, agentsEndpointSchema } from 'librechat-data-provider';
|
||||
import type { TCustomConfig, TAgentsEndpoint } from 'librechat-data-provider';
|
||||
|
||||
/**
|
||||
* Sets up the Agents configuration from the config (`librechat.yaml`) file.
|
||||
* If no agents config is defined, uses the provided defaults or parses empty object.
|
||||
*
|
||||
* @param config - The loaded custom configuration.
|
||||
* @param [defaultConfig] - Default configuration from getConfigDefaults.
|
||||
* @returns The Agents endpoint configuration.
|
||||
*/
|
||||
export function agentsConfigSetup(
|
||||
config: TCustomConfig,
|
||||
defaultConfig: Partial<TAgentsEndpoint>,
|
||||
): Partial<TAgentsEndpoint> {
|
||||
const agentsConfig = config?.endpoints?.[EModelEndpoint.agents];
|
||||
|
||||
if (!agentsConfig) {
|
||||
return defaultConfig || agentsEndpointSchema.parse({});
|
||||
}
|
||||
|
||||
const parsedConfig = agentsEndpointSchema.parse(agentsConfig);
|
||||
return parsedConfig;
|
||||
}
|
||||
4
packages/api/src/agents/index.ts
Normal file
4
packages/api/src/agents/index.ts
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
export * from './config';
|
||||
export * from './memory';
|
||||
export * from './resources';
|
||||
export * from './run';
|
||||
468
packages/api/src/agents/memory.ts
Normal file
468
packages/api/src/agents/memory.ts
Normal file
|
|
@ -0,0 +1,468 @@
|
|||
/** Memories */
|
||||
import { z } from 'zod';
|
||||
import { tool } from '@langchain/core/tools';
|
||||
import { Tools } from 'librechat-data-provider';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { Run, Providers, GraphEvents } from '@librechat/agents';
|
||||
import type {
|
||||
StreamEventData,
|
||||
ToolEndCallback,
|
||||
EventHandler,
|
||||
ToolEndData,
|
||||
LLMConfig,
|
||||
} from '@librechat/agents';
|
||||
import type { TAttachment, MemoryArtifact } from 'librechat-data-provider';
|
||||
import type { ObjectId, MemoryMethods } from '@librechat/data-schemas';
|
||||
import type { BaseMessage } from '@langchain/core/messages';
|
||||
import type { Response as ServerResponse } from 'express';
|
||||
import { Tokenizer } from '~/utils';
|
||||
|
||||
type RequiredMemoryMethods = Pick<
|
||||
MemoryMethods,
|
||||
'setMemory' | 'deleteMemory' | 'getFormattedMemories'
|
||||
>;
|
||||
|
||||
type ToolEndMetadata = Record<string, unknown> & {
|
||||
run_id?: string;
|
||||
thread_id?: string;
|
||||
};
|
||||
|
||||
export interface MemoryConfig {
|
||||
validKeys?: string[];
|
||||
instructions?: string;
|
||||
llmConfig?: Partial<LLMConfig>;
|
||||
tokenLimit?: number;
|
||||
}
|
||||
|
||||
export const memoryInstructions =
|
||||
'The system automatically stores important user information and can update or delete memories based on user requests, enabling dynamic memory management.';
|
||||
|
||||
const getDefaultInstructions = (
|
||||
validKeys?: string[],
|
||||
tokenLimit?: number,
|
||||
) => `Use the \`set_memory\` tool to save important information about the user, but ONLY when the user has explicitly provided this information. If there is nothing to note about the user specifically, END THE TURN IMMEDIATELY.
|
||||
|
||||
The \`delete_memory\` tool should only be used in two scenarios:
|
||||
1. When the user explicitly asks to forget or remove specific information
|
||||
2. When updating existing memories, use the \`set_memory\` tool instead of deleting and re-adding the memory.
|
||||
|
||||
${
|
||||
validKeys && validKeys.length > 0
|
||||
? `CRITICAL INSTRUCTION: Only the following keys are valid for storing memories:
|
||||
${validKeys.map((key) => `- ${key}`).join('\n ')}`
|
||||
: 'You can use any appropriate key to store memories about the user.'
|
||||
}
|
||||
|
||||
${
|
||||
tokenLimit
|
||||
? `⚠️ TOKEN LIMIT: Each memory value must not exceed ${tokenLimit} tokens. Be concise and store only essential information.`
|
||||
: ''
|
||||
}
|
||||
|
||||
⚠️ WARNING ⚠️
|
||||
DO NOT STORE ANY INFORMATION UNLESS THE USER HAS EXPLICITLY PROVIDED IT.
|
||||
ONLY store information the user has EXPLICITLY shared.
|
||||
NEVER guess or assume user information.
|
||||
ALL memory values must be factual statements about THIS specific user.
|
||||
If nothing needs to be stored, DO NOT CALL any memory tools.
|
||||
If you're unsure whether to store something, DO NOT store it.
|
||||
If nothing needs to be stored, END THE TURN IMMEDIATELY.`;
|
||||
|
||||
/**
|
||||
* Creates a memory tool instance with user context
|
||||
*/
|
||||
const createMemoryTool = ({
|
||||
userId,
|
||||
setMemory,
|
||||
validKeys,
|
||||
tokenLimit,
|
||||
totalTokens = 0,
|
||||
}: {
|
||||
userId: string | ObjectId;
|
||||
setMemory: MemoryMethods['setMemory'];
|
||||
validKeys?: string[];
|
||||
tokenLimit?: number;
|
||||
totalTokens?: number;
|
||||
}) => {
|
||||
return tool(
|
||||
async ({ key, value }) => {
|
||||
try {
|
||||
if (validKeys && validKeys.length > 0 && !validKeys.includes(key)) {
|
||||
logger.warn(
|
||||
`Memory Agent failed to set memory: Invalid key "${key}". Must be one of: ${validKeys.join(
|
||||
', ',
|
||||
)}`,
|
||||
);
|
||||
return `Invalid key "${key}". Must be one of: ${validKeys.join(', ')}`;
|
||||
}
|
||||
|
||||
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
|
||||
|
||||
if (tokenLimit && tokenCount > tokenLimit) {
|
||||
logger.warn(
|
||||
`Memory Agent failed to set memory: Value exceeds token limit. Value has ${tokenCount} tokens, but limit is ${tokenLimit}`,
|
||||
);
|
||||
return `Memory value too large: ${tokenCount} tokens exceeds limit of ${tokenLimit}`;
|
||||
}
|
||||
|
||||
if (tokenLimit && totalTokens + tokenCount > tokenLimit) {
|
||||
const remainingCapacity = tokenLimit - totalTokens;
|
||||
logger.warn(
|
||||
`Memory Agent failed to set memory: Would exceed total token limit. Current usage: ${totalTokens}, new memory: ${tokenCount} tokens, limit: ${tokenLimit}`,
|
||||
);
|
||||
return `Cannot add memory: would exceed token limit. Current usage: ${totalTokens}/${tokenLimit} tokens. This memory requires ${tokenCount} tokens, but only ${remainingCapacity} tokens available.`;
|
||||
}
|
||||
|
||||
const artifact: Record<Tools.memory, MemoryArtifact> = {
|
||||
[Tools.memory]: {
|
||||
key,
|
||||
value,
|
||||
tokenCount,
|
||||
type: 'update',
|
||||
},
|
||||
};
|
||||
|
||||
const result = await setMemory({ userId, key, value, tokenCount });
|
||||
if (result.ok) {
|
||||
logger.debug(`Memory set for key "${key}" (${tokenCount} tokens) for user "${userId}"`);
|
||||
return [`Memory set for key "${key}" (${tokenCount} tokens)`, artifact];
|
||||
}
|
||||
logger.warn(`Failed to set memory for key "${key}" for user "${userId}"`);
|
||||
return [`Failed to set memory for key "${key}"`, undefined];
|
||||
} catch (error) {
|
||||
logger.error('Memory Agent failed to set memory', error);
|
||||
return [`Error setting memory for key "${key}"`, undefined];
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'set_memory',
|
||||
description: 'Saves important information about the user into memory.',
|
||||
responseFormat: 'content_and_artifact',
|
||||
schema: z.object({
|
||||
key: z
|
||||
.string()
|
||||
.describe(
|
||||
validKeys && validKeys.length > 0
|
||||
? `The key of the memory value. Must be one of: ${validKeys.join(', ')}`
|
||||
: 'The key identifier for this memory',
|
||||
),
|
||||
value: z
|
||||
.string()
|
||||
.describe(
|
||||
'Value MUST be a complete sentence that fully describes relevant user information.',
|
||||
),
|
||||
}),
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a delete memory tool instance with user context
|
||||
*/
|
||||
const createDeleteMemoryTool = ({
|
||||
userId,
|
||||
deleteMemory,
|
||||
validKeys,
|
||||
}: {
|
||||
userId: string | ObjectId;
|
||||
deleteMemory: MemoryMethods['deleteMemory'];
|
||||
validKeys?: string[];
|
||||
}) => {
|
||||
return tool(
|
||||
async ({ key }) => {
|
||||
try {
|
||||
if (validKeys && validKeys.length > 0 && !validKeys.includes(key)) {
|
||||
logger.warn(
|
||||
`Memory Agent failed to delete memory: Invalid key "${key}". Must be one of: ${validKeys.join(
|
||||
', ',
|
||||
)}`,
|
||||
);
|
||||
return `Invalid key "${key}". Must be one of: ${validKeys.join(', ')}`;
|
||||
}
|
||||
|
||||
const artifact: Record<Tools.memory, MemoryArtifact> = {
|
||||
[Tools.memory]: {
|
||||
key,
|
||||
type: 'delete',
|
||||
},
|
||||
};
|
||||
|
||||
const result = await deleteMemory({ userId, key });
|
||||
if (result.ok) {
|
||||
logger.debug(`Memory deleted for key "${key}" for user "${userId}"`);
|
||||
return [`Memory deleted for key "${key}"`, artifact];
|
||||
}
|
||||
logger.warn(`Failed to delete memory for key "${key}" for user "${userId}"`);
|
||||
return [`Failed to delete memory for key "${key}"`, undefined];
|
||||
} catch (error) {
|
||||
logger.error('Memory Agent failed to delete memory', error);
|
||||
return [`Error deleting memory for key "${key}"`, undefined];
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'delete_memory',
|
||||
description:
|
||||
'Deletes specific memory data about the user using the provided key. For updating existing memories, use the `set_memory` tool instead',
|
||||
responseFormat: 'content_and_artifact',
|
||||
schema: z.object({
|
||||
key: z
|
||||
.string()
|
||||
.describe(
|
||||
validKeys && validKeys.length > 0
|
||||
? `The key of the memory to delete. Must be one of: ${validKeys.join(', ')}`
|
||||
: 'The key identifier of the memory to delete',
|
||||
),
|
||||
}),
|
||||
},
|
||||
);
|
||||
};
|
||||
export class BasicToolEndHandler implements EventHandler {
|
||||
private callback?: ToolEndCallback;
|
||||
constructor(callback?: ToolEndCallback) {
|
||||
this.callback = callback;
|
||||
}
|
||||
handle(
|
||||
event: string,
|
||||
data: StreamEventData | undefined,
|
||||
metadata?: Record<string, unknown>,
|
||||
): void {
|
||||
if (!metadata) {
|
||||
console.warn(`Graph or metadata not found in ${event} event`);
|
||||
return;
|
||||
}
|
||||
const toolEndData = data as ToolEndData | undefined;
|
||||
if (!toolEndData?.output) {
|
||||
console.warn('No output found in tool_end event');
|
||||
return;
|
||||
}
|
||||
this.callback?.(toolEndData, metadata);
|
||||
}
|
||||
}
|
||||
|
||||
export async function processMemory({
|
||||
res,
|
||||
userId,
|
||||
setMemory,
|
||||
deleteMemory,
|
||||
messages,
|
||||
memory,
|
||||
messageId,
|
||||
conversationId,
|
||||
validKeys,
|
||||
instructions,
|
||||
llmConfig,
|
||||
tokenLimit,
|
||||
totalTokens = 0,
|
||||
}: {
|
||||
res: ServerResponse;
|
||||
setMemory: MemoryMethods['setMemory'];
|
||||
deleteMemory: MemoryMethods['deleteMemory'];
|
||||
userId: string | ObjectId;
|
||||
memory: string;
|
||||
messageId: string;
|
||||
conversationId: string;
|
||||
messages: BaseMessage[];
|
||||
validKeys?: string[];
|
||||
instructions: string;
|
||||
tokenLimit?: number;
|
||||
totalTokens?: number;
|
||||
llmConfig?: Partial<LLMConfig>;
|
||||
}): Promise<(TAttachment | null)[] | undefined> {
|
||||
try {
|
||||
const memoryTool = createMemoryTool({ userId, tokenLimit, setMemory, validKeys, totalTokens });
|
||||
const deleteMemoryTool = createDeleteMemoryTool({
|
||||
userId,
|
||||
validKeys,
|
||||
deleteMemory,
|
||||
});
|
||||
|
||||
const currentMemoryTokens = totalTokens;
|
||||
|
||||
let memoryStatus = `# Existing memory:\n${memory ?? 'No existing memories'}`;
|
||||
|
||||
if (tokenLimit) {
|
||||
const remainingTokens = tokenLimit - currentMemoryTokens;
|
||||
memoryStatus = `# Memory Status:
|
||||
Current memory usage: ${currentMemoryTokens} tokens
|
||||
Token limit: ${tokenLimit} tokens
|
||||
Remaining capacity: ${remainingTokens} tokens
|
||||
|
||||
# Existing memory:
|
||||
${memory ?? 'No existing memories'}`;
|
||||
}
|
||||
|
||||
const defaultLLMConfig: LLMConfig = {
|
||||
provider: Providers.OPENAI,
|
||||
model: 'gpt-4.1-mini',
|
||||
temperature: 0.4,
|
||||
streaming: false,
|
||||
disableStreaming: true,
|
||||
};
|
||||
|
||||
const finalLLMConfig = {
|
||||
...defaultLLMConfig,
|
||||
...llmConfig,
|
||||
/**
|
||||
* Ensure streaming is always disabled for memory processing
|
||||
*/
|
||||
streaming: false,
|
||||
disableStreaming: true,
|
||||
};
|
||||
|
||||
const artifactPromises: Promise<TAttachment | null>[] = [];
|
||||
const memoryCallback = createMemoryCallback({ res, artifactPromises });
|
||||
const customHandlers = {
|
||||
[GraphEvents.TOOL_END]: new BasicToolEndHandler(memoryCallback),
|
||||
};
|
||||
|
||||
const run = await Run.create({
|
||||
runId: messageId,
|
||||
graphConfig: {
|
||||
type: 'standard',
|
||||
llmConfig: finalLLMConfig,
|
||||
tools: [memoryTool, deleteMemoryTool],
|
||||
instructions,
|
||||
additional_instructions: memoryStatus,
|
||||
toolEnd: true,
|
||||
},
|
||||
customHandlers,
|
||||
returnContent: true,
|
||||
});
|
||||
|
||||
const config = {
|
||||
configurable: {
|
||||
provider: llmConfig?.provider,
|
||||
thread_id: `memory-run-${conversationId}`,
|
||||
},
|
||||
streamMode: 'values',
|
||||
version: 'v2',
|
||||
} as const;
|
||||
|
||||
const inputs = {
|
||||
messages,
|
||||
};
|
||||
const content = await run.processStream(inputs, config);
|
||||
if (content) {
|
||||
logger.debug('Memory Agent processed memory successfully', content);
|
||||
} else {
|
||||
logger.warn('Memory Agent processed memory but returned no content');
|
||||
}
|
||||
return await Promise.all(artifactPromises);
|
||||
} catch (error) {
|
||||
logger.error('Memory Agent failed to process memory', error);
|
||||
}
|
||||
}
|
||||
|
||||
export async function createMemoryProcessor({
|
||||
res,
|
||||
userId,
|
||||
messageId,
|
||||
memoryMethods,
|
||||
conversationId,
|
||||
config = {},
|
||||
}: {
|
||||
res: ServerResponse;
|
||||
messageId: string;
|
||||
conversationId: string;
|
||||
userId: string | ObjectId;
|
||||
memoryMethods: RequiredMemoryMethods;
|
||||
config?: MemoryConfig;
|
||||
}): Promise<[string, (messages: BaseMessage[]) => Promise<(TAttachment | null)[] | undefined>]> {
|
||||
const { validKeys, instructions, llmConfig, tokenLimit } = config;
|
||||
const finalInstructions = instructions || getDefaultInstructions(validKeys, tokenLimit);
|
||||
|
||||
const { withKeys, withoutKeys, totalTokens } = await memoryMethods.getFormattedMemories({
|
||||
userId,
|
||||
});
|
||||
|
||||
return [
|
||||
withoutKeys,
|
||||
async function (messages: BaseMessage[]): Promise<(TAttachment | null)[] | undefined> {
|
||||
try {
|
||||
return await processMemory({
|
||||
res,
|
||||
userId,
|
||||
messages,
|
||||
validKeys,
|
||||
llmConfig,
|
||||
messageId,
|
||||
tokenLimit,
|
||||
conversationId,
|
||||
memory: withKeys,
|
||||
totalTokens: totalTokens || 0,
|
||||
instructions: finalInstructions,
|
||||
setMemory: memoryMethods.setMemory,
|
||||
deleteMemory: memoryMethods.deleteMemory,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Memory Agent failed to process memory', error);
|
||||
}
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async function handleMemoryArtifact({
|
||||
res,
|
||||
data,
|
||||
metadata,
|
||||
}: {
|
||||
res: ServerResponse;
|
||||
data: ToolEndData;
|
||||
metadata?: ToolEndMetadata;
|
||||
}) {
|
||||
const output = data?.output;
|
||||
if (!output) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!output.artifact) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const memoryArtifact = output.artifact[Tools.memory] as MemoryArtifact | undefined;
|
||||
if (!memoryArtifact) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const attachment: Partial<TAttachment> = {
|
||||
type: Tools.memory,
|
||||
toolCallId: output.tool_call_id,
|
||||
messageId: metadata?.run_id ?? '',
|
||||
conversationId: metadata?.thread_id ?? '',
|
||||
[Tools.memory]: memoryArtifact,
|
||||
};
|
||||
if (!res.headersSent) {
|
||||
return attachment;
|
||||
}
|
||||
res.write(`event: attachment\ndata: ${JSON.stringify(attachment)}\n\n`);
|
||||
return attachment;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a memory callback for handling memory artifacts
|
||||
* @param params - The parameters object
|
||||
* @param params.res - The server response object
|
||||
* @param params.artifactPromises - Array to collect artifact promises
|
||||
* @returns The memory callback function
|
||||
*/
|
||||
export function createMemoryCallback({
|
||||
res,
|
||||
artifactPromises,
|
||||
}: {
|
||||
res: ServerResponse;
|
||||
artifactPromises: Promise<Partial<TAttachment> | null>[];
|
||||
}): ToolEndCallback {
|
||||
return async (data: ToolEndData, metadata?: Record<string, unknown>) => {
|
||||
const output = data?.output;
|
||||
const memoryArtifact = output?.artifact?.[Tools.memory] as MemoryArtifact;
|
||||
if (memoryArtifact == null) {
|
||||
return;
|
||||
}
|
||||
artifactPromises.push(
|
||||
handleMemoryArtifact({ res, data, metadata }).catch((error) => {
|
||||
logger.error('Error processing memory artifact content:', error);
|
||||
return null;
|
||||
}),
|
||||
);
|
||||
};
|
||||
}
|
||||
990
packages/api/src/agents/resources.test.ts
Normal file
990
packages/api/src/agents/resources.test.ts
Normal file
|
|
@ -0,0 +1,990 @@
|
|||
import { primeResources } from './resources';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { EModelEndpoint, EToolResources, AgentCapabilities } from 'librechat-data-provider';
|
||||
import type { Request as ServerRequest } from 'express';
|
||||
import type { TFile } from 'librechat-data-provider';
|
||||
import type { TGetFiles } from './resources';
|
||||
|
||||
// Mock logger
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('primeResources', () => {
|
||||
let mockReq: ServerRequest;
|
||||
let mockGetFiles: jest.MockedFunction<TGetFiles>;
|
||||
let requestFileSet: Set<string>;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset mocks
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Setup mock request
|
||||
mockReq = {
|
||||
app: {
|
||||
locals: {
|
||||
[EModelEndpoint.agents]: {
|
||||
capabilities: [AgentCapabilities.ocr],
|
||||
},
|
||||
},
|
||||
},
|
||||
} as unknown as ServerRequest;
|
||||
|
||||
// Setup mock getFiles function
|
||||
mockGetFiles = jest.fn();
|
||||
|
||||
// Setup request file set
|
||||
requestFileSet = new Set(['file1', 'file2', 'file3']);
|
||||
});
|
||||
|
||||
describe('when OCR is enabled and tool_resources has OCR file_ids', () => {
|
||||
it('should fetch OCR files and include them in attachments', async () => {
|
||||
const mockOcrFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'ocr-file-1',
|
||||
filename: 'document.pdf',
|
||||
filepath: '/uploads/document.pdf',
|
||||
object: 'file',
|
||||
type: 'application/pdf',
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
mockGetFiles.mockResolvedValue(mockOcrFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments: undefined,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
expect(mockGetFiles).toHaveBeenCalledWith({ file_id: { $in: ['ocr-file-1'] } }, {}, {});
|
||||
expect(result.attachments).toEqual(mockOcrFiles);
|
||||
expect(result.tool_resources).toEqual(tool_resources);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when OCR is disabled', () => {
|
||||
it('should not fetch OCR files even if tool_resources has OCR file_ids', async () => {
|
||||
(mockReq.app as ServerRequest['app']).locals[EModelEndpoint.agents].capabilities = [];
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments: undefined,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
expect(mockGetFiles).not.toHaveBeenCalled();
|
||||
expect(result.attachments).toBeUndefined();
|
||||
expect(result.tool_resources).toEqual(tool_resources);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when attachments are provided', () => {
|
||||
it('should process files with fileIdentifier as execute_code resources', async () => {
|
||||
const mockFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file1',
|
||||
filename: 'script.py',
|
||||
filepath: '/uploads/script.py',
|
||||
object: 'file',
|
||||
type: 'text/x-python',
|
||||
bytes: 512,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
metadata: {
|
||||
fileIdentifier: 'python-script',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const attachments = Promise.resolve(mockFiles);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: {},
|
||||
});
|
||||
|
||||
expect(result.attachments).toEqual(mockFiles);
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]?.files).toEqual(mockFiles);
|
||||
});
|
||||
|
||||
it('should process embedded files as file_search resources', async () => {
|
||||
const mockFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file2',
|
||||
filename: 'document.txt',
|
||||
filepath: '/uploads/document.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: true,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const attachments = Promise.resolve(mockFiles);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: {},
|
||||
});
|
||||
|
||||
expect(result.attachments).toEqual(mockFiles);
|
||||
expect(result.tool_resources?.[EToolResources.file_search]?.files).toEqual(mockFiles);
|
||||
});
|
||||
|
||||
it('should process image files in requestFileSet as image_edit resources', async () => {
|
||||
const mockFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file1',
|
||||
filename: 'image.png',
|
||||
filepath: '/uploads/image.png',
|
||||
object: 'file',
|
||||
type: 'image/png',
|
||||
bytes: 2048,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
height: 800,
|
||||
width: 600,
|
||||
},
|
||||
];
|
||||
|
||||
const attachments = Promise.resolve(mockFiles);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: {},
|
||||
});
|
||||
|
||||
expect(result.attachments).toEqual(mockFiles);
|
||||
expect(result.tool_resources?.[EToolResources.image_edit]?.files).toEqual(mockFiles);
|
||||
});
|
||||
|
||||
it('should not process image files not in requestFileSet', async () => {
|
||||
const mockFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file-not-in-set',
|
||||
filename: 'image.png',
|
||||
filepath: '/uploads/image.png',
|
||||
object: 'file',
|
||||
type: 'image/png',
|
||||
bytes: 2048,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
height: 800,
|
||||
width: 600,
|
||||
},
|
||||
];
|
||||
|
||||
const attachments = Promise.resolve(mockFiles);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: {},
|
||||
});
|
||||
|
||||
expect(result.attachments).toEqual(mockFiles);
|
||||
expect(result.tool_resources?.[EToolResources.image_edit]).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not process image files without height and width', async () => {
|
||||
const mockFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file1',
|
||||
filename: 'image.png',
|
||||
filepath: '/uploads/image.png',
|
||||
object: 'file',
|
||||
type: 'image/png',
|
||||
bytes: 2048,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
// Missing height and width
|
||||
},
|
||||
];
|
||||
|
||||
const attachments = Promise.resolve(mockFiles);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: {},
|
||||
});
|
||||
|
||||
expect(result.attachments).toEqual(mockFiles);
|
||||
expect(result.tool_resources?.[EToolResources.image_edit]).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should filter out null files from attachments', async () => {
|
||||
const mockFiles: Array<TFile | null> = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file1',
|
||||
filename: 'valid.txt',
|
||||
filepath: '/uploads/valid.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
null,
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file2',
|
||||
filename: 'valid2.txt',
|
||||
filepath: '/uploads/valid2.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 128,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const attachments = Promise.resolve(mockFiles);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: {},
|
||||
});
|
||||
|
||||
expect(result.attachments).toHaveLength(2);
|
||||
expect(result.attachments?.[0]?.file_id).toBe('file1');
|
||||
expect(result.attachments?.[1]?.file_id).toBe('file2');
|
||||
});
|
||||
|
||||
it('should merge existing tool_resources with new files', async () => {
|
||||
const mockFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file1',
|
||||
filename: 'script.py',
|
||||
filepath: '/uploads/script.py',
|
||||
object: 'file',
|
||||
type: 'text/x-python',
|
||||
bytes: 512,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
metadata: {
|
||||
fileIdentifier: 'python-script',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const existingToolResources = {
|
||||
[EToolResources.execute_code]: {
|
||||
files: [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'existing-file',
|
||||
filename: 'existing.py',
|
||||
filepath: '/uploads/existing.py',
|
||||
object: 'file' as const,
|
||||
type: 'text/x-python',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const attachments = Promise.resolve(mockFiles);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: existingToolResources,
|
||||
});
|
||||
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]?.files).toHaveLength(2);
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]?.files?.[0]?.file_id).toBe(
|
||||
'existing-file',
|
||||
);
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]?.files?.[1]?.file_id).toBe(
|
||||
'file1',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when both OCR and attachments are provided', () => {
|
||||
it('should include both OCR files and attachment files', async () => {
|
||||
const mockOcrFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'ocr-file-1',
|
||||
filename: 'document.pdf',
|
||||
filepath: '/uploads/document.pdf',
|
||||
object: 'file',
|
||||
type: 'application/pdf',
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const mockAttachmentFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file1',
|
||||
filename: 'attachment.txt',
|
||||
filepath: '/uploads/attachment.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
mockGetFiles.mockResolvedValue(mockOcrFiles);
|
||||
const attachments = Promise.resolve(mockAttachmentFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
expect(result.attachments).toHaveLength(2);
|
||||
expect(result.attachments?.[0]?.file_id).toBe('ocr-file-1');
|
||||
expect(result.attachments?.[1]?.file_id).toBe('file1');
|
||||
});
|
||||
|
||||
it('should prevent duplicate files when same file exists in OCR and attachments', async () => {
|
||||
const sharedFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'shared-file-id',
|
||||
filename: 'document.pdf',
|
||||
filepath: '/uploads/document.pdf',
|
||||
object: 'file',
|
||||
type: 'application/pdf',
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
const mockOcrFiles: TFile[] = [sharedFile];
|
||||
const mockAttachmentFiles: TFile[] = [
|
||||
sharedFile,
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'unique-file',
|
||||
filename: 'other.txt',
|
||||
filepath: '/uploads/other.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
mockGetFiles.mockResolvedValue(mockOcrFiles);
|
||||
const attachments = Promise.resolve(mockAttachmentFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['shared-file-id'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
// Should only have 2 files, not 3 (no duplicate)
|
||||
expect(result.attachments).toHaveLength(2);
|
||||
expect(result.attachments?.filter((f) => f?.file_id === 'shared-file-id')).toHaveLength(1);
|
||||
expect(result.attachments?.find((f) => f?.file_id === 'unique-file')).toBeDefined();
|
||||
});
|
||||
|
||||
it('should still categorize duplicate files for tool_resources', async () => {
|
||||
const sharedFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'shared-file-id',
|
||||
filename: 'script.py',
|
||||
filepath: '/uploads/script.py',
|
||||
object: 'file',
|
||||
type: 'text/x-python',
|
||||
bytes: 512,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
metadata: {
|
||||
fileIdentifier: 'python-script',
|
||||
},
|
||||
};
|
||||
|
||||
const mockOcrFiles: TFile[] = [sharedFile];
|
||||
const mockAttachmentFiles: TFile[] = [sharedFile];
|
||||
|
||||
mockGetFiles.mockResolvedValue(mockOcrFiles);
|
||||
const attachments = Promise.resolve(mockAttachmentFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['shared-file-id'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
// File should appear only once in attachments
|
||||
expect(result.attachments).toHaveLength(1);
|
||||
expect(result.attachments?.[0]?.file_id).toBe('shared-file-id');
|
||||
|
||||
// But should still be categorized in tool_resources
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]?.files).toHaveLength(1);
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]?.files?.[0]?.file_id).toBe(
|
||||
'shared-file-id',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle multiple duplicate files', async () => {
|
||||
const file1: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'file-1',
|
||||
filename: 'doc1.pdf',
|
||||
filepath: '/uploads/doc1.pdf',
|
||||
object: 'file',
|
||||
type: 'application/pdf',
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
const file2: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'file-2',
|
||||
filename: 'doc2.pdf',
|
||||
filepath: '/uploads/doc2.pdf',
|
||||
object: 'file',
|
||||
type: 'application/pdf',
|
||||
bytes: 2048,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
const uniqueFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'unique-file',
|
||||
filename: 'unique.txt',
|
||||
filepath: '/uploads/unique.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
const mockOcrFiles: TFile[] = [file1, file2];
|
||||
const mockAttachmentFiles: TFile[] = [file1, file2, uniqueFile];
|
||||
|
||||
mockGetFiles.mockResolvedValue(mockOcrFiles);
|
||||
const attachments = Promise.resolve(mockAttachmentFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['file-1', 'file-2'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
// Should have 3 files total (2 from OCR + 1 unique from attachments)
|
||||
expect(result.attachments).toHaveLength(3);
|
||||
|
||||
// Each file should appear only once
|
||||
const fileIds = result.attachments?.map((f) => f?.file_id);
|
||||
expect(fileIds).toContain('file-1');
|
||||
expect(fileIds).toContain('file-2');
|
||||
expect(fileIds).toContain('unique-file');
|
||||
|
||||
// Check no duplicates
|
||||
const uniqueFileIds = new Set(fileIds);
|
||||
expect(uniqueFileIds.size).toBe(fileIds?.length);
|
||||
});
|
||||
|
||||
it('should handle files without file_id gracefully', async () => {
|
||||
const fileWithoutId: Partial<TFile> = {
|
||||
user: 'user1',
|
||||
filename: 'no-id.txt',
|
||||
filepath: '/uploads/no-id.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
const normalFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'normal-file',
|
||||
filename: 'normal.txt',
|
||||
filepath: '/uploads/normal.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 512,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
const mockOcrFiles: TFile[] = [normalFile];
|
||||
const mockAttachmentFiles = [fileWithoutId as TFile, normalFile];
|
||||
|
||||
mockGetFiles.mockResolvedValue(mockOcrFiles);
|
||||
const attachments = Promise.resolve(mockAttachmentFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['normal-file'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
// Should include file without ID and one instance of normal file
|
||||
expect(result.attachments).toHaveLength(2);
|
||||
expect(result.attachments?.filter((f) => f?.file_id === 'normal-file')).toHaveLength(1);
|
||||
expect(result.attachments?.some((f) => !f?.file_id)).toBe(true);
|
||||
});
|
||||
|
||||
it('should prevent duplicates from existing tool_resources', async () => {
|
||||
const existingFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'existing-file',
|
||||
filename: 'existing.py',
|
||||
filepath: '/uploads/existing.py',
|
||||
object: 'file',
|
||||
type: 'text/x-python',
|
||||
bytes: 512,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
metadata: {
|
||||
fileIdentifier: 'python-script',
|
||||
},
|
||||
};
|
||||
|
||||
const newFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'new-file',
|
||||
filename: 'new.py',
|
||||
filepath: '/uploads/new.py',
|
||||
object: 'file',
|
||||
type: 'text/x-python',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
metadata: {
|
||||
fileIdentifier: 'python-script',
|
||||
},
|
||||
};
|
||||
|
||||
const existingToolResources = {
|
||||
[EToolResources.execute_code]: {
|
||||
files: [existingFile],
|
||||
},
|
||||
};
|
||||
|
||||
const attachments = Promise.resolve([existingFile, newFile]);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: existingToolResources,
|
||||
});
|
||||
|
||||
// Should only add the new file to attachments
|
||||
expect(result.attachments).toHaveLength(1);
|
||||
expect(result.attachments?.[0]?.file_id).toBe('new-file');
|
||||
|
||||
// Should not duplicate the existing file in tool_resources
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]?.files).toHaveLength(2);
|
||||
const fileIds = result.tool_resources?.[EToolResources.execute_code]?.files?.map(
|
||||
(f) => f.file_id,
|
||||
);
|
||||
expect(fileIds).toEqual(['existing-file', 'new-file']);
|
||||
});
|
||||
|
||||
it('should handle duplicates within attachments array', async () => {
|
||||
const duplicatedFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'dup-file',
|
||||
filename: 'duplicate.txt',
|
||||
filepath: '/uploads/duplicate.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
const uniqueFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'unique-file',
|
||||
filename: 'unique.txt',
|
||||
filepath: '/uploads/unique.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 128,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
// Same file appears multiple times in attachments
|
||||
const attachments = Promise.resolve([
|
||||
duplicatedFile,
|
||||
duplicatedFile,
|
||||
uniqueFile,
|
||||
duplicatedFile,
|
||||
]);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: {},
|
||||
});
|
||||
|
||||
// Should only have 2 unique files
|
||||
expect(result.attachments).toHaveLength(2);
|
||||
const fileIds = result.attachments?.map((f) => f?.file_id);
|
||||
expect(fileIds).toContain('dup-file');
|
||||
expect(fileIds).toContain('unique-file');
|
||||
|
||||
// Verify no duplicates
|
||||
expect(fileIds?.filter((id) => id === 'dup-file')).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should prevent duplicates across different tool_resource categories', async () => {
|
||||
const multiPurposeFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'multi-file',
|
||||
filename: 'data.txt',
|
||||
filepath: '/uploads/data.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 512,
|
||||
embedded: true, // Will be categorized as file_search
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
const existingToolResources = {
|
||||
[EToolResources.file_search]: {
|
||||
files: [multiPurposeFile],
|
||||
},
|
||||
};
|
||||
|
||||
// Try to add the same file again
|
||||
const attachments = Promise.resolve([multiPurposeFile]);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: existingToolResources,
|
||||
});
|
||||
|
||||
// Should not add to attachments (already exists)
|
||||
expect(result.attachments).toHaveLength(0);
|
||||
|
||||
// Should not duplicate in file_search
|
||||
expect(result.tool_resources?.[EToolResources.file_search]?.files).toHaveLength(1);
|
||||
expect(result.tool_resources?.[EToolResources.file_search]?.files?.[0]?.file_id).toBe(
|
||||
'multi-file',
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle complex scenario with OCR, existing tool_resources, and attachments', async () => {
|
||||
const ocrFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'ocr-file',
|
||||
filename: 'scan.pdf',
|
||||
filepath: '/uploads/scan.pdf',
|
||||
object: 'file',
|
||||
type: 'application/pdf',
|
||||
bytes: 2048,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
const existingFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'existing-file',
|
||||
filename: 'code.py',
|
||||
filepath: '/uploads/code.py',
|
||||
object: 'file',
|
||||
type: 'text/x-python',
|
||||
bytes: 512,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
metadata: {
|
||||
fileIdentifier: 'python-script',
|
||||
},
|
||||
};
|
||||
|
||||
const newFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'new-file',
|
||||
filename: 'image.png',
|
||||
filepath: '/uploads/image.png',
|
||||
object: 'file',
|
||||
type: 'image/png',
|
||||
bytes: 4096,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
height: 800,
|
||||
width: 600,
|
||||
};
|
||||
|
||||
mockGetFiles.mockResolvedValue([ocrFile, existingFile]); // OCR returns both files
|
||||
const attachments = Promise.resolve([existingFile, ocrFile, newFile]); // Attachments has duplicates
|
||||
|
||||
const existingToolResources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr-file', 'existing-file'],
|
||||
},
|
||||
[EToolResources.execute_code]: {
|
||||
files: [existingFile],
|
||||
},
|
||||
};
|
||||
|
||||
requestFileSet.add('new-file'); // Only new-file is in request set
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: existingToolResources,
|
||||
});
|
||||
|
||||
// Should have 3 unique files total
|
||||
expect(result.attachments).toHaveLength(3);
|
||||
const attachmentIds = result.attachments?.map((f) => f?.file_id).sort();
|
||||
expect(attachmentIds).toEqual(['existing-file', 'new-file', 'ocr-file']);
|
||||
|
||||
// Check tool_resources
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]?.files).toHaveLength(1);
|
||||
expect(result.tool_resources?.[EToolResources.image_edit]?.files).toHaveLength(1);
|
||||
expect(result.tool_resources?.[EToolResources.image_edit]?.files?.[0]?.file_id).toBe(
|
||||
'new-file',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should handle errors gracefully and log them', async () => {
|
||||
const mockFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file1',
|
||||
filename: 'test.txt',
|
||||
filepath: '/uploads/test.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const attachments = Promise.resolve(mockFiles);
|
||||
const error = new Error('Test error');
|
||||
|
||||
// Mock getFiles to throw an error when called for OCR
|
||||
mockGetFiles.mockRejectedValue(error);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith('Error priming resources', error);
|
||||
expect(result.attachments).toEqual(mockFiles);
|
||||
expect(result.tool_resources).toEqual(tool_resources);
|
||||
});
|
||||
|
||||
it('should handle promise rejection in attachments', async () => {
|
||||
const error = new Error('Attachment error');
|
||||
const attachments = Promise.reject(error);
|
||||
|
||||
// The function should now handle rejected attachment promises gracefully
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources: {},
|
||||
});
|
||||
|
||||
// Should log both the main error and the attachment error
|
||||
expect(logger.error).toHaveBeenCalledWith('Error priming resources', error);
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'Error resolving attachments in catch block',
|
||||
error,
|
||||
);
|
||||
|
||||
// Should return empty array when attachments promise is rejected
|
||||
expect(result.attachments).toEqual([]);
|
||||
expect(result.tool_resources).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle missing app.locals gracefully', async () => {
|
||||
const reqWithoutLocals = {} as ServerRequest;
|
||||
|
||||
const result = await primeResources({
|
||||
req: reqWithoutLocals,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments: undefined,
|
||||
tool_resources: {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(mockGetFiles).not.toHaveBeenCalled();
|
||||
// When app.locals is missing and there's an error accessing properties,
|
||||
// the function falls back to the catch block which returns an empty array
|
||||
expect(result.attachments).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle undefined tool_resources', async () => {
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments: undefined,
|
||||
tool_resources: undefined,
|
||||
});
|
||||
|
||||
expect(result.tool_resources).toEqual({});
|
||||
expect(result.attachments).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle empty requestFileSet', async () => {
|
||||
const mockFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file1',
|
||||
filename: 'image.png',
|
||||
filepath: '/uploads/image.png',
|
||||
object: 'file',
|
||||
type: 'image/png',
|
||||
bytes: 2048,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
height: 800,
|
||||
width: 600,
|
||||
},
|
||||
];
|
||||
|
||||
const attachments = Promise.resolve(mockFiles);
|
||||
const emptyRequestFileSet = new Set<string>();
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet: emptyRequestFileSet,
|
||||
attachments,
|
||||
tool_resources: {},
|
||||
});
|
||||
|
||||
expect(result.attachments).toEqual(mockFiles);
|
||||
expect(result.tool_resources?.[EToolResources.image_edit]).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
282
packages/api/src/agents/resources.ts
Normal file
282
packages/api/src/agents/resources.ts
Normal file
|
|
@ -0,0 +1,282 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import { EModelEndpoint, EToolResources, AgentCapabilities } from 'librechat-data-provider';
|
||||
import type { AgentToolResources, TFile, AgentBaseResource } from 'librechat-data-provider';
|
||||
import type { FilterQuery, QueryOptions, ProjectionType } from 'mongoose';
|
||||
import type { IMongoFile } from '@librechat/data-schemas';
|
||||
import type { Request as ServerRequest } from 'express';
|
||||
|
||||
/**
|
||||
* Function type for retrieving files from the database
|
||||
* @param filter - MongoDB filter query for files
|
||||
* @param _sortOptions - Sorting options (currently unused)
|
||||
* @param selectFields - Field selection options
|
||||
* @returns Promise resolving to array of files
|
||||
*/
|
||||
export type TGetFiles = (
|
||||
filter: FilterQuery<IMongoFile>,
|
||||
_sortOptions: ProjectionType<IMongoFile> | null | undefined,
|
||||
selectFields: QueryOptions<IMongoFile> | null | undefined,
|
||||
) => Promise<Array<TFile>>;
|
||||
|
||||
/**
|
||||
* Helper function to add a file to a specific tool resource category
|
||||
* Prevents duplicate files within the same resource category
|
||||
* @param params - Parameters object
|
||||
* @param params.file - The file to add to the resource
|
||||
* @param params.resourceType - The type of tool resource (e.g., execute_code, file_search, image_edit)
|
||||
* @param params.tool_resources - The agent's tool resources object to update
|
||||
* @param params.processedResourceFiles - Set tracking processed files per resource type
|
||||
*/
|
||||
const addFileToResource = ({
|
||||
file,
|
||||
resourceType,
|
||||
tool_resources,
|
||||
processedResourceFiles,
|
||||
}: {
|
||||
file: TFile;
|
||||
resourceType: EToolResources;
|
||||
tool_resources: AgentToolResources;
|
||||
processedResourceFiles: Set<string>;
|
||||
}): void => {
|
||||
if (!file.file_id) {
|
||||
return;
|
||||
}
|
||||
|
||||
const resourceKey = `${resourceType}:${file.file_id}`;
|
||||
if (processedResourceFiles.has(resourceKey)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const resource = tool_resources[resourceType as keyof AgentToolResources] ?? {};
|
||||
if (!resource.files) {
|
||||
(tool_resources[resourceType as keyof AgentToolResources] as AgentBaseResource) = {
|
||||
...resource,
|
||||
files: [],
|
||||
};
|
||||
}
|
||||
|
||||
// Check if already exists in the files array
|
||||
const resourceFiles = tool_resources[resourceType as keyof AgentToolResources]?.files;
|
||||
const alreadyExists = resourceFiles?.some((f: TFile) => f.file_id === file.file_id);
|
||||
|
||||
if (!alreadyExists) {
|
||||
resourceFiles?.push(file);
|
||||
processedResourceFiles.add(resourceKey);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Categorizes a file into the appropriate tool resource based on its properties
|
||||
* Files are categorized as:
|
||||
* - execute_code: Files with fileIdentifier metadata
|
||||
* - file_search: Files marked as embedded
|
||||
* - image_edit: Image files in the request file set with dimensions
|
||||
* @param params - Parameters object
|
||||
* @param params.file - The file to categorize
|
||||
* @param params.tool_resources - The agent's tool resources to update
|
||||
* @param params.requestFileSet - Set of file IDs from the current request
|
||||
* @param params.processedResourceFiles - Set tracking processed files per resource type
|
||||
*/
|
||||
const categorizeFileForToolResources = ({
|
||||
file,
|
||||
tool_resources,
|
||||
requestFileSet,
|
||||
processedResourceFiles,
|
||||
}: {
|
||||
file: TFile;
|
||||
tool_resources: AgentToolResources;
|
||||
requestFileSet: Set<string>;
|
||||
processedResourceFiles: Set<string>;
|
||||
}): void => {
|
||||
if (file.metadata?.fileIdentifier) {
|
||||
addFileToResource({
|
||||
file,
|
||||
resourceType: EToolResources.execute_code,
|
||||
tool_resources,
|
||||
processedResourceFiles,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (file.embedded === true) {
|
||||
addFileToResource({
|
||||
file,
|
||||
resourceType: EToolResources.file_search,
|
||||
tool_resources,
|
||||
processedResourceFiles,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
requestFileSet.has(file.file_id) &&
|
||||
file.type.startsWith('image') &&
|
||||
file.height &&
|
||||
file.width
|
||||
) {
|
||||
addFileToResource({
|
||||
file,
|
||||
resourceType: EToolResources.image_edit,
|
||||
tool_resources,
|
||||
processedResourceFiles,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Primes resources for agent execution by processing attachments and tool resources
|
||||
* This function:
|
||||
* 1. Fetches OCR files if OCR is enabled
|
||||
* 2. Processes attachment files
|
||||
* 3. Categorizes files into appropriate tool resources
|
||||
* 4. Prevents duplicate files across all sources
|
||||
*
|
||||
* @param params - Parameters object
|
||||
* @param params.req - Express request object containing app configuration
|
||||
* @param params.getFiles - Function to retrieve files from database
|
||||
* @param params.requestFileSet - Set of file IDs from the current request
|
||||
* @param params.attachments - Promise resolving to array of attachment files
|
||||
* @param params.tool_resources - Existing tool resources for the agent
|
||||
* @returns Promise resolving to processed attachments and updated tool resources
|
||||
*/
|
||||
export const primeResources = async ({
|
||||
req,
|
||||
getFiles,
|
||||
requestFileSet,
|
||||
attachments: _attachments,
|
||||
tool_resources: _tool_resources,
|
||||
}: {
|
||||
req: ServerRequest;
|
||||
requestFileSet: Set<string>;
|
||||
attachments: Promise<Array<TFile | null>> | undefined;
|
||||
tool_resources: AgentToolResources | undefined;
|
||||
getFiles: TGetFiles;
|
||||
}): Promise<{
|
||||
attachments: Array<TFile | undefined> | undefined;
|
||||
tool_resources: AgentToolResources | undefined;
|
||||
}> => {
|
||||
try {
|
||||
/**
|
||||
* Array to collect all unique files that will be returned as attachments
|
||||
* Files are added from OCR results and attachment promises, with duplicates prevented
|
||||
*/
|
||||
const attachments: Array<TFile> = [];
|
||||
/**
|
||||
* Set of file IDs already added to the attachments array
|
||||
* Used to prevent duplicate files from being added multiple times
|
||||
* Pre-populated with files from non-OCR tool_resources to prevent re-adding them
|
||||
*/
|
||||
const attachmentFileIds = new Set<string>();
|
||||
/**
|
||||
* Set tracking which files have been added to specific tool resource categories
|
||||
* Format: "resourceType:fileId" (e.g., "execute_code:file123")
|
||||
* Prevents the same file from being added multiple times to the same resource
|
||||
*/
|
||||
const processedResourceFiles = new Set<string>();
|
||||
/**
|
||||
* The agent's tool resources object that will be updated with categorized files
|
||||
* Initialized from input parameter or empty object if not provided
|
||||
*/
|
||||
const tool_resources = _tool_resources ?? {};
|
||||
|
||||
// Track existing files in tool_resources to prevent duplicates within resources
|
||||
for (const [resourceType, resource] of Object.entries(tool_resources)) {
|
||||
if (resource?.files && Array.isArray(resource.files)) {
|
||||
for (const file of resource.files) {
|
||||
if (file?.file_id) {
|
||||
processedResourceFiles.add(`${resourceType}:${file.file_id}`);
|
||||
// Files from non-OCR resources should not be added to attachments from _attachments
|
||||
if (resourceType !== EToolResources.ocr) {
|
||||
attachmentFileIds.add(file.file_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const isOCREnabled = (req.app.locals?.[EModelEndpoint.agents]?.capabilities ?? []).includes(
|
||||
AgentCapabilities.ocr,
|
||||
);
|
||||
|
||||
if (tool_resources[EToolResources.ocr]?.file_ids && isOCREnabled) {
|
||||
const context = await getFiles(
|
||||
{
|
||||
file_id: { $in: tool_resources.ocr.file_ids },
|
||||
},
|
||||
{},
|
||||
{},
|
||||
);
|
||||
|
||||
for (const file of context) {
|
||||
if (!file?.file_id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Clear from attachmentFileIds if it was pre-added
|
||||
attachmentFileIds.delete(file.file_id);
|
||||
|
||||
// Add to attachments
|
||||
attachments.push(file);
|
||||
attachmentFileIds.add(file.file_id);
|
||||
|
||||
// Categorize for tool resources
|
||||
categorizeFileForToolResources({
|
||||
file,
|
||||
tool_resources,
|
||||
requestFileSet,
|
||||
processedResourceFiles,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!_attachments) {
|
||||
return { attachments: attachments.length > 0 ? attachments : undefined, tool_resources };
|
||||
}
|
||||
|
||||
const files = await _attachments;
|
||||
|
||||
for (const file of files) {
|
||||
if (!file) {
|
||||
continue;
|
||||
}
|
||||
|
||||
categorizeFileForToolResources({
|
||||
file,
|
||||
tool_resources,
|
||||
requestFileSet,
|
||||
processedResourceFiles,
|
||||
});
|
||||
|
||||
if (file.file_id && attachmentFileIds.has(file.file_id)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
attachments.push(file);
|
||||
if (file.file_id) {
|
||||
attachmentFileIds.add(file.file_id);
|
||||
}
|
||||
}
|
||||
|
||||
return { attachments: attachments.length > 0 ? attachments : [], tool_resources };
|
||||
} catch (error) {
|
||||
logger.error('Error priming resources', error);
|
||||
|
||||
// Safely try to get attachments without rethrowing
|
||||
let safeAttachments: Array<TFile | undefined> = [];
|
||||
if (_attachments) {
|
||||
try {
|
||||
const attachmentFiles = await _attachments;
|
||||
safeAttachments = (attachmentFiles?.filter((file) => !!file) ?? []) as Array<TFile>;
|
||||
} catch (attachmentError) {
|
||||
// If attachments promise is also rejected, just use empty array
|
||||
logger.error('Error resolving attachments in catch block', attachmentError);
|
||||
safeAttachments = [];
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
attachments: safeAttachments,
|
||||
tool_resources: _tool_resources,
|
||||
};
|
||||
}
|
||||
};
|
||||
107
packages/api/src/agents/run.ts
Normal file
107
packages/api/src/agents/run.ts
Normal file
|
|
@ -0,0 +1,107 @@
|
|||
import { Run, Providers } from '@librechat/agents';
|
||||
import { providerEndpointMap, KnownEndpoints } from 'librechat-data-provider';
|
||||
import type {
|
||||
OpenAIClientOptions,
|
||||
StandardGraphConfig,
|
||||
EventHandler,
|
||||
GenericTool,
|
||||
GraphEvents,
|
||||
IState,
|
||||
} from '@librechat/agents';
|
||||
import type { Agent } from 'librechat-data-provider';
|
||||
import type * as t from '~/types';
|
||||
|
||||
const customProviders = new Set([
|
||||
Providers.XAI,
|
||||
Providers.OLLAMA,
|
||||
Providers.DEEPSEEK,
|
||||
Providers.OPENROUTER,
|
||||
]);
|
||||
|
||||
/**
|
||||
* Creates a new Run instance with custom handlers and configuration.
|
||||
*
|
||||
* @param options - The options for creating the Run instance.
|
||||
* @param options.agent - The agent for this run.
|
||||
* @param options.signal - The signal for this run.
|
||||
* @param options.req - The server request.
|
||||
* @param options.runId - Optional run ID; otherwise, a new run ID will be generated.
|
||||
* @param options.customHandlers - Custom event handlers.
|
||||
* @param options.streaming - Whether to use streaming.
|
||||
* @param options.streamUsage - Whether to stream usage information.
|
||||
* @returns {Promise<Run<IState>>} A promise that resolves to a new Run instance.
|
||||
*/
|
||||
export async function createRun({
|
||||
runId,
|
||||
agent,
|
||||
signal,
|
||||
customHandlers,
|
||||
streaming = true,
|
||||
streamUsage = true,
|
||||
}: {
|
||||
agent: Omit<Agent, 'tools'> & { tools?: GenericTool[] };
|
||||
signal: AbortSignal;
|
||||
runId?: string;
|
||||
streaming?: boolean;
|
||||
streamUsage?: boolean;
|
||||
customHandlers?: Record<GraphEvents, EventHandler>;
|
||||
}): Promise<Run<IState>> {
|
||||
const provider =
|
||||
(providerEndpointMap[
|
||||
agent.provider as keyof typeof providerEndpointMap
|
||||
] as unknown as Providers) ?? agent.provider;
|
||||
|
||||
const llmConfig: t.RunLLMConfig = Object.assign(
|
||||
{
|
||||
provider,
|
||||
streaming,
|
||||
streamUsage,
|
||||
},
|
||||
agent.model_parameters,
|
||||
);
|
||||
|
||||
/** Resolves issues with new OpenAI usage field */
|
||||
if (
|
||||
customProviders.has(agent.provider) ||
|
||||
(agent.provider === Providers.OPENAI && agent.endpoint !== agent.provider)
|
||||
) {
|
||||
llmConfig.streamUsage = false;
|
||||
llmConfig.usage = true;
|
||||
}
|
||||
|
||||
let reasoningKey: 'reasoning_content' | 'reasoning' | undefined;
|
||||
if (provider === Providers.GOOGLE) {
|
||||
reasoningKey = 'reasoning';
|
||||
} else if (
|
||||
llmConfig.configuration?.baseURL?.includes(KnownEndpoints.openrouter) ||
|
||||
(agent.endpoint && agent.endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
|
||||
) {
|
||||
reasoningKey = 'reasoning';
|
||||
} else if (
|
||||
(llmConfig as OpenAIClientOptions).useResponsesApi === true &&
|
||||
(provider === Providers.OPENAI || provider === Providers.AZURE)
|
||||
) {
|
||||
reasoningKey = 'reasoning';
|
||||
}
|
||||
|
||||
const graphConfig: StandardGraphConfig = {
|
||||
signal,
|
||||
llmConfig,
|
||||
reasoningKey,
|
||||
tools: agent.tools,
|
||||
instructions: agent.instructions,
|
||||
additional_instructions: agent.additional_instructions,
|
||||
// toolEnd: agent.end_after_tools,
|
||||
};
|
||||
|
||||
// TEMPORARY FOR TESTING
|
||||
if (agent.provider === Providers.ANTHROPIC || agent.provider === Providers.BEDROCK) {
|
||||
graphConfig.streamBuffer = 2000;
|
||||
}
|
||||
|
||||
return Run.create({
|
||||
runId,
|
||||
graphConfig,
|
||||
customHandlers,
|
||||
});
|
||||
}
|
||||
129
packages/api/src/crypto/encryption.ts
Normal file
129
packages/api/src/crypto/encryption.ts
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
import 'dotenv/config';
|
||||
import crypto from 'node:crypto';
|
||||
const { webcrypto } = crypto;
|
||||
|
||||
// Use hex decoding for both key and IV for legacy methods.
|
||||
const key = Buffer.from(process.env.CREDS_KEY ?? '', 'hex');
|
||||
const iv = Buffer.from(process.env.CREDS_IV ?? '', 'hex');
|
||||
const algorithm = 'AES-CBC';
|
||||
|
||||
// --- Legacy v1/v2 Setup: AES-CBC with fixed key and IV ---
|
||||
|
||||
export async function encrypt(value: string) {
|
||||
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
||||
'encrypt',
|
||||
]);
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(value);
|
||||
const encryptedBuffer = await webcrypto.subtle.encrypt(
|
||||
{ name: algorithm, iv: iv },
|
||||
cryptoKey,
|
||||
data,
|
||||
);
|
||||
return Buffer.from(encryptedBuffer).toString('hex');
|
||||
}
|
||||
|
||||
export async function decrypt(encryptedValue: string) {
|
||||
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
||||
'decrypt',
|
||||
]);
|
||||
const encryptedBuffer = Buffer.from(encryptedValue, 'hex');
|
||||
const decryptedBuffer = await webcrypto.subtle.decrypt(
|
||||
{ name: algorithm, iv: iv },
|
||||
cryptoKey,
|
||||
encryptedBuffer,
|
||||
);
|
||||
const decoder = new TextDecoder();
|
||||
return decoder.decode(decryptedBuffer);
|
||||
}
|
||||
|
||||
// --- v2: AES-CBC with a random IV per encryption ---
|
||||
|
||||
export async function encryptV2(value: string) {
|
||||
const gen_iv = webcrypto.getRandomValues(new Uint8Array(16));
|
||||
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
||||
'encrypt',
|
||||
]);
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(value);
|
||||
const encryptedBuffer = await webcrypto.subtle.encrypt(
|
||||
{ name: algorithm, iv: gen_iv },
|
||||
cryptoKey,
|
||||
data,
|
||||
);
|
||||
return Buffer.from(gen_iv).toString('hex') + ':' + Buffer.from(encryptedBuffer).toString('hex');
|
||||
}
|
||||
|
||||
export async function decryptV2(encryptedValue: string) {
|
||||
const parts = encryptedValue.split(':');
|
||||
if (parts.length === 1) {
|
||||
return parts[0];
|
||||
}
|
||||
const gen_iv = Buffer.from(parts.shift() ?? '', 'hex');
|
||||
const encrypted = parts.join(':');
|
||||
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
|
||||
'decrypt',
|
||||
]);
|
||||
const encryptedBuffer = Buffer.from(encrypted, 'hex');
|
||||
const decryptedBuffer = await webcrypto.subtle.decrypt(
|
||||
{ name: algorithm, iv: gen_iv },
|
||||
cryptoKey,
|
||||
encryptedBuffer,
|
||||
);
|
||||
const decoder = new TextDecoder();
|
||||
return decoder.decode(decryptedBuffer);
|
||||
}
|
||||
|
||||
// --- v3: AES-256-CTR using Node's crypto functions ---
|
||||
const algorithm_v3 = 'aes-256-ctr';
|
||||
|
||||
/**
|
||||
* Encrypts a value using AES-256-CTR.
|
||||
* Note: AES-256 requires a 32-byte key. Ensure that process.env.CREDS_KEY is a 64-character hex string.
|
||||
*
|
||||
* @param value - The plaintext to encrypt.
|
||||
* @returns The encrypted string with a "v3:" prefix.
|
||||
*/
|
||||
export function encryptV3(value: string) {
|
||||
if (key.length !== 32) {
|
||||
throw new Error(`Invalid key length: expected 32 bytes, got ${key.length} bytes`);
|
||||
}
|
||||
const iv_v3 = crypto.randomBytes(16);
|
||||
const cipher = crypto.createCipheriv(algorithm_v3, key, iv_v3);
|
||||
const encrypted = Buffer.concat([cipher.update(value, 'utf8'), cipher.final()]);
|
||||
return `v3:${iv_v3.toString('hex')}:${encrypted.toString('hex')}`;
|
||||
}
|
||||
|
||||
export function decryptV3(encryptedValue: string) {
|
||||
const parts = encryptedValue.split(':');
|
||||
if (parts[0] !== 'v3') {
|
||||
throw new Error('Not a v3 encrypted value');
|
||||
}
|
||||
const iv_v3 = Buffer.from(parts[1], 'hex');
|
||||
const encryptedText = Buffer.from(parts.slice(2).join(':'), 'hex');
|
||||
const decipher = crypto.createDecipheriv(algorithm_v3, key, iv_v3);
|
||||
const decrypted = Buffer.concat([decipher.update(encryptedText), decipher.final()]);
|
||||
return decrypted.toString('utf8');
|
||||
}
|
||||
|
||||
export async function getRandomValues(length: number) {
|
||||
if (!Number.isInteger(length) || length <= 0) {
|
||||
throw new Error('Length must be a positive integer');
|
||||
}
|
||||
const randomValues = new Uint8Array(length);
|
||||
webcrypto.getRandomValues(randomValues);
|
||||
return Buffer.from(randomValues).toString('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes SHA-256 hash for the given input.
|
||||
* @param input - The input to hash.
|
||||
* @returns The SHA-256 hash of the input.
|
||||
*/
|
||||
export async function hashBackupCode(input: string) {
|
||||
const encoder = new TextEncoder();
|
||||
const data = encoder.encode(input);
|
||||
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
||||
}
|
||||
1
packages/api/src/crypto/index.ts
Normal file
1
packages/api/src/crypto/index.ts
Normal file
|
|
@ -0,0 +1 @@
|
|||
export * from './encryption';
|
||||
1
packages/api/src/endpoints/google/index.ts
Normal file
1
packages/api/src/endpoints/google/index.ts
Normal file
|
|
@ -0,0 +1 @@
|
|||
export * from './llm';
|
||||
207
packages/api/src/endpoints/google/llm.ts
Normal file
207
packages/api/src/endpoints/google/llm.ts
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
import { Providers } from '@librechat/agents';
|
||||
import { googleSettings, AuthKeys } from 'librechat-data-provider';
|
||||
import type { GoogleClientOptions, VertexAIClientOptions } from '@librechat/agents';
|
||||
import type { GoogleAIToolType } from '@langchain/google-common';
|
||||
import type * as t from '~/types';
|
||||
import { isEnabled } from '~/utils';
|
||||
|
||||
function getThresholdMapping(model: string) {
|
||||
const gemini1Pattern = /gemini-(1\.0|1\.5|pro$|1\.0-pro|1\.5-pro|1\.5-flash-001)/;
|
||||
const restrictedPattern = /(gemini-(1\.5-flash-8b|2\.0|exp)|learnlm)/;
|
||||
|
||||
if (gemini1Pattern.test(model)) {
|
||||
return (value: string) => {
|
||||
if (value === 'OFF') {
|
||||
return 'BLOCK_NONE';
|
||||
}
|
||||
return value;
|
||||
};
|
||||
}
|
||||
|
||||
if (restrictedPattern.test(model)) {
|
||||
return (value: string) => {
|
||||
if (value === 'OFF' || value === 'HARM_BLOCK_THRESHOLD_UNSPECIFIED') {
|
||||
return 'BLOCK_NONE';
|
||||
}
|
||||
return value;
|
||||
};
|
||||
}
|
||||
|
||||
return (value: string) => value;
|
||||
}
|
||||
|
||||
export function getSafetySettings(
|
||||
model?: string,
|
||||
): Array<{ category: string; threshold: string }> | undefined {
|
||||
if (isEnabled(process.env.GOOGLE_EXCLUDE_SAFETY_SETTINGS)) {
|
||||
return undefined;
|
||||
}
|
||||
const mapThreshold = getThresholdMapping(model ?? '');
|
||||
|
||||
return [
|
||||
{
|
||||
category: 'HARM_CATEGORY_SEXUALLY_EXPLICIT',
|
||||
threshold: mapThreshold(
|
||||
process.env.GOOGLE_SAFETY_SEXUALLY_EXPLICIT || 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',
|
||||
),
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_HATE_SPEECH',
|
||||
threshold: mapThreshold(
|
||||
process.env.GOOGLE_SAFETY_HATE_SPEECH || 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',
|
||||
),
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_HARASSMENT',
|
||||
threshold: mapThreshold(
|
||||
process.env.GOOGLE_SAFETY_HARASSMENT || 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',
|
||||
),
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_DANGEROUS_CONTENT',
|
||||
threshold: mapThreshold(
|
||||
process.env.GOOGLE_SAFETY_DANGEROUS_CONTENT || 'HARM_BLOCK_THRESHOLD_UNSPECIFIED',
|
||||
),
|
||||
},
|
||||
{
|
||||
category: 'HARM_CATEGORY_CIVIC_INTEGRITY',
|
||||
threshold: mapThreshold(process.env.GOOGLE_SAFETY_CIVIC_INTEGRITY || 'BLOCK_NONE'),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* Replicates core logic from GoogleClient's constructor and setOptions, plus client determination.
|
||||
* Returns an object with the provider label and the final options that would be passed to createLLM.
|
||||
*
|
||||
* @param credentials - Either a JSON string or an object containing Google keys
|
||||
* @param options - The same shape as the "GoogleClient" constructor options
|
||||
*/
|
||||
|
||||
export function getGoogleConfig(
|
||||
credentials: string | t.GoogleCredentials | undefined,
|
||||
options: t.GoogleConfigOptions = {},
|
||||
) {
|
||||
let creds: t.GoogleCredentials = {};
|
||||
if (typeof credentials === 'string') {
|
||||
try {
|
||||
creds = JSON.parse(credentials);
|
||||
} catch (err: unknown) {
|
||||
throw new Error(
|
||||
`Error parsing string credentials: ${err instanceof Error ? err.message : 'Unknown error'}`,
|
||||
);
|
||||
}
|
||||
} else if (credentials && typeof credentials === 'object') {
|
||||
creds = credentials;
|
||||
}
|
||||
|
||||
const serviceKeyRaw = creds[AuthKeys.GOOGLE_SERVICE_KEY] ?? {};
|
||||
const serviceKey =
|
||||
typeof serviceKeyRaw === 'string' ? JSON.parse(serviceKeyRaw) : (serviceKeyRaw ?? {});
|
||||
|
||||
const apiKey = creds[AuthKeys.GOOGLE_API_KEY] ?? null;
|
||||
const project_id = !apiKey ? (serviceKey?.project_id ?? null) : null;
|
||||
|
||||
const reverseProxyUrl = options.reverseProxyUrl;
|
||||
const authHeader = options.authHeader;
|
||||
|
||||
const {
|
||||
grounding,
|
||||
thinking = googleSettings.thinking.default,
|
||||
thinkingBudget = googleSettings.thinkingBudget.default,
|
||||
...modelOptions
|
||||
} = options.modelOptions || {};
|
||||
|
||||
const llmConfig: GoogleClientOptions | VertexAIClientOptions = {
|
||||
...(modelOptions || {}),
|
||||
model: modelOptions?.model ?? '',
|
||||
maxRetries: 2,
|
||||
};
|
||||
|
||||
/** Used only for Safety Settings */
|
||||
llmConfig.safetySettings = getSafetySettings(llmConfig.model);
|
||||
|
||||
let provider;
|
||||
|
||||
if (project_id) {
|
||||
provider = Providers.VERTEXAI;
|
||||
} else {
|
||||
provider = Providers.GOOGLE;
|
||||
}
|
||||
|
||||
// If we have a GCP project => Vertex AI
|
||||
if (provider === Providers.VERTEXAI) {
|
||||
(llmConfig as VertexAIClientOptions).authOptions = {
|
||||
credentials: { ...serviceKey },
|
||||
projectId: project_id,
|
||||
};
|
||||
(llmConfig as VertexAIClientOptions).location = process.env.GOOGLE_LOC || 'us-central1';
|
||||
} else if (apiKey && provider === Providers.GOOGLE) {
|
||||
llmConfig.apiKey = apiKey;
|
||||
} else {
|
||||
throw new Error(
|
||||
`Invalid credentials provided. Please provide either a valid API key or service account credentials for Google Cloud.`,
|
||||
);
|
||||
}
|
||||
|
||||
const shouldEnableThinking =
|
||||
thinking && thinkingBudget != null && (thinkingBudget > 0 || thinkingBudget === -1);
|
||||
|
||||
if (shouldEnableThinking && provider === Providers.GOOGLE) {
|
||||
(llmConfig as GoogleClientOptions).thinkingConfig = {
|
||||
thinkingBudget: thinking ? thinkingBudget : googleSettings.thinkingBudget.default,
|
||||
includeThoughts: Boolean(thinking),
|
||||
};
|
||||
} else if (shouldEnableThinking && provider === Providers.VERTEXAI) {
|
||||
(llmConfig as VertexAIClientOptions).thinkingBudget = thinking
|
||||
? thinkingBudget
|
||||
: googleSettings.thinkingBudget.default;
|
||||
(llmConfig as VertexAIClientOptions).includeThoughts = Boolean(thinking);
|
||||
}
|
||||
|
||||
/*
|
||||
let legacyOptions = {};
|
||||
// Filter out any "examples" that are empty
|
||||
legacyOptions.examples = (legacyOptions.examples ?? [])
|
||||
.filter(Boolean)
|
||||
.filter((obj) => obj?.input?.content !== '' && obj?.output?.content !== '');
|
||||
|
||||
// If user has "examples" from legacyOptions, push them onto llmConfig
|
||||
if (legacyOptions.examples?.length) {
|
||||
llmConfig.examples = legacyOptions.examples.map((ex) => {
|
||||
const { input, output } = ex;
|
||||
if (!input?.content || !output?.content) {return undefined;}
|
||||
return {
|
||||
input: new HumanMessage(input.content),
|
||||
output: new AIMessage(output.content),
|
||||
};
|
||||
}).filter(Boolean);
|
||||
}
|
||||
*/
|
||||
|
||||
if (reverseProxyUrl) {
|
||||
(llmConfig as GoogleClientOptions).baseUrl = reverseProxyUrl;
|
||||
}
|
||||
|
||||
if (authHeader) {
|
||||
(llmConfig as GoogleClientOptions).customHeaders = {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
};
|
||||
}
|
||||
|
||||
const tools: GoogleAIToolType[] = [];
|
||||
|
||||
if (grounding) {
|
||||
tools.push({ googleSearch: {} });
|
||||
}
|
||||
|
||||
// Return the final shape
|
||||
return {
|
||||
/** @type {GoogleAIToolType[]} */
|
||||
tools,
|
||||
/** @type {Providers.GOOGLE | Providers.VERTEXAI} */
|
||||
provider,
|
||||
/** @type {GoogleClientOptions | VertexAIClientOptions} */
|
||||
llmConfig,
|
||||
};
|
||||
}
|
||||
2
packages/api/src/endpoints/index.ts
Normal file
2
packages/api/src/endpoints/index.ts
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export * from './google';
|
||||
export * from './openai';
|
||||
2
packages/api/src/endpoints/openai/index.ts
Normal file
2
packages/api/src/endpoints/openai/index.ts
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export * from './llm';
|
||||
export * from './initialize';
|
||||
175
packages/api/src/endpoints/openai/initialize.ts
Normal file
175
packages/api/src/endpoints/openai/initialize.ts
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
import { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } from 'librechat-data-provider';
|
||||
import type {
|
||||
UserKeyValues,
|
||||
OpenAIOptionsResult,
|
||||
OpenAIConfigOptions,
|
||||
InitializeOpenAIOptionsParams,
|
||||
} from '~/types';
|
||||
import { createHandleLLMNewToken } from '~/utils/generators';
|
||||
import { getAzureCredentials } from '~/utils/azure';
|
||||
import { isUserProvided } from '~/utils/common';
|
||||
import { resolveHeaders } from '~/utils/env';
|
||||
import { getOpenAIConfig } from './llm';
|
||||
|
||||
/**
|
||||
* Initializes OpenAI options for agent usage. This function always returns configuration
|
||||
* options and never creates a client instance (equivalent to optionsOnly=true behavior).
|
||||
*
|
||||
* @param params - Configuration parameters
|
||||
* @returns Promise resolving to OpenAI configuration options
|
||||
* @throws Error if API key is missing or user key has expired
|
||||
*/
|
||||
export const initializeOpenAI = async ({
|
||||
req,
|
||||
overrideModel,
|
||||
endpointOption,
|
||||
overrideEndpoint,
|
||||
getUserKeyValues,
|
||||
checkUserKeyExpiry,
|
||||
}: InitializeOpenAIOptionsParams): Promise<OpenAIOptionsResult> => {
|
||||
const { PROXY, OPENAI_API_KEY, AZURE_API_KEY, OPENAI_REVERSE_PROXY, AZURE_OPENAI_BASEURL } =
|
||||
process.env;
|
||||
|
||||
const { key: expiresAt } = req.body;
|
||||
const modelName = overrideModel ?? req.body.model;
|
||||
const endpoint = overrideEndpoint ?? req.body.endpoint;
|
||||
|
||||
if (!endpoint) {
|
||||
throw new Error('Endpoint is required');
|
||||
}
|
||||
|
||||
const credentials = {
|
||||
[EModelEndpoint.openAI]: OPENAI_API_KEY,
|
||||
[EModelEndpoint.azureOpenAI]: AZURE_API_KEY,
|
||||
};
|
||||
|
||||
const baseURLOptions = {
|
||||
[EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY,
|
||||
[EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL,
|
||||
};
|
||||
|
||||
const userProvidesKey = isUserProvided(credentials[endpoint as keyof typeof credentials]);
|
||||
const userProvidesURL = isUserProvided(baseURLOptions[endpoint as keyof typeof baseURLOptions]);
|
||||
|
||||
let userValues: UserKeyValues | null = null;
|
||||
if (expiresAt && (userProvidesKey || userProvidesURL)) {
|
||||
checkUserKeyExpiry(expiresAt, endpoint);
|
||||
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
|
||||
}
|
||||
|
||||
let apiKey = userProvidesKey
|
||||
? userValues?.apiKey
|
||||
: credentials[endpoint as keyof typeof credentials];
|
||||
const baseURL = userProvidesURL
|
||||
? userValues?.baseURL
|
||||
: baseURLOptions[endpoint as keyof typeof baseURLOptions];
|
||||
|
||||
const clientOptions: OpenAIConfigOptions = {
|
||||
proxy: PROXY ?? undefined,
|
||||
reverseProxyUrl: baseURL || undefined,
|
||||
streaming: true,
|
||||
};
|
||||
|
||||
const isAzureOpenAI = endpoint === EModelEndpoint.azureOpenAI;
|
||||
const azureConfig = isAzureOpenAI && req.app.locals[EModelEndpoint.azureOpenAI];
|
||||
|
||||
if (isAzureOpenAI && azureConfig) {
|
||||
const { modelGroupMap, groupMap } = azureConfig;
|
||||
const {
|
||||
azureOptions,
|
||||
baseURL: configBaseURL,
|
||||
headers = {},
|
||||
serverless,
|
||||
} = mapModelToAzureConfig({
|
||||
modelName: modelName || '',
|
||||
modelGroupMap,
|
||||
groupMap,
|
||||
});
|
||||
|
||||
clientOptions.reverseProxyUrl = configBaseURL ?? clientOptions.reverseProxyUrl;
|
||||
clientOptions.headers = resolveHeaders(
|
||||
{ ...headers, ...(clientOptions.headers ?? {}) },
|
||||
req.user,
|
||||
);
|
||||
|
||||
const groupName = modelGroupMap[modelName || '']?.group;
|
||||
if (groupName && groupMap[groupName]) {
|
||||
clientOptions.addParams = groupMap[groupName]?.addParams;
|
||||
clientOptions.dropParams = groupMap[groupName]?.dropParams;
|
||||
}
|
||||
|
||||
apiKey = azureOptions.azureOpenAIApiKey;
|
||||
clientOptions.azure = !serverless ? azureOptions : undefined;
|
||||
|
||||
if (serverless === true) {
|
||||
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
|
||||
? { 'api-version': azureOptions.azureOpenAIApiVersion }
|
||||
: undefined;
|
||||
|
||||
if (!clientOptions.headers) {
|
||||
clientOptions.headers = {};
|
||||
}
|
||||
clientOptions.headers['api-key'] = apiKey;
|
||||
}
|
||||
} else if (isAzureOpenAI) {
|
||||
clientOptions.azure =
|
||||
userProvidesKey && userValues?.apiKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
|
||||
apiKey = clientOptions.azure?.azureOpenAIApiKey;
|
||||
}
|
||||
|
||||
if (userProvidesKey && !apiKey) {
|
||||
throw new Error(
|
||||
JSON.stringify({
|
||||
type: ErrorTypes.NO_USER_KEY,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error(`${endpoint} API Key not provided.`);
|
||||
}
|
||||
|
||||
const modelOptions = {
|
||||
...endpointOption.model_parameters,
|
||||
model: modelName,
|
||||
user: req.user.id,
|
||||
};
|
||||
|
||||
const finalClientOptions: OpenAIConfigOptions = {
|
||||
...clientOptions,
|
||||
modelOptions,
|
||||
};
|
||||
|
||||
const options = getOpenAIConfig(apiKey, finalClientOptions, endpoint);
|
||||
|
||||
const openAIConfig = req.app.locals[EModelEndpoint.openAI];
|
||||
const allConfig = req.app.locals.all;
|
||||
const azureRate = modelName?.includes('gpt-4') ? 30 : 17;
|
||||
|
||||
let streamRate: number | undefined;
|
||||
|
||||
if (isAzureOpenAI && azureConfig) {
|
||||
streamRate = azureConfig.streamRate ?? azureRate;
|
||||
} else if (!isAzureOpenAI && openAIConfig) {
|
||||
streamRate = openAIConfig.streamRate;
|
||||
}
|
||||
|
||||
if (allConfig?.streamRate) {
|
||||
streamRate = allConfig.streamRate;
|
||||
}
|
||||
|
||||
if (streamRate) {
|
||||
options.llmConfig.callbacks = [
|
||||
{
|
||||
handleLLMNewToken: createHandleLLMNewToken(streamRate),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const result: OpenAIOptionsResult = {
|
||||
...options,
|
||||
streamRate,
|
||||
};
|
||||
|
||||
return result;
|
||||
};
|
||||
230
packages/api/src/endpoints/openai/llm.ts
Normal file
230
packages/api/src/endpoints/openai/llm.ts
Normal file
|
|
@ -0,0 +1,230 @@
|
|||
import { ProxyAgent } from 'undici';
|
||||
import { KnownEndpoints, removeNullishValues } from 'librechat-data-provider';
|
||||
import type { BindToolsInput } from '@langchain/core/language_models/chat_models';
|
||||
import type { AzureOpenAIInput } from '@langchain/openai';
|
||||
import type { OpenAI } from 'openai';
|
||||
import type * as t from '~/types';
|
||||
import { sanitizeModelName, constructAzureURL } from '~/utils/azure';
|
||||
import { isEnabled } from '~/utils/common';
|
||||
|
||||
function hasReasoningParams({
|
||||
reasoning_effort,
|
||||
reasoning_summary,
|
||||
}: {
|
||||
reasoning_effort?: string | null;
|
||||
reasoning_summary?: string | null;
|
||||
}): boolean {
|
||||
return (
|
||||
(reasoning_effort != null && reasoning_effort !== '') ||
|
||||
(reasoning_summary != null && reasoning_summary !== '')
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates configuration options for creating a language model (LLM) instance.
|
||||
* @param apiKey - The API key for authentication.
|
||||
* @param options - Additional options for configuring the LLM.
|
||||
* @param endpoint - The endpoint name
|
||||
* @returns Configuration options for creating an LLM instance.
|
||||
*/
|
||||
export function getOpenAIConfig(
|
||||
apiKey: string,
|
||||
options: t.OpenAIConfigOptions = {},
|
||||
endpoint?: string | null,
|
||||
): t.LLMConfigResult {
|
||||
const {
|
||||
modelOptions: _modelOptions = {},
|
||||
reverseProxyUrl,
|
||||
defaultQuery,
|
||||
headers,
|
||||
proxy,
|
||||
azure,
|
||||
streaming = true,
|
||||
addParams,
|
||||
dropParams,
|
||||
} = options;
|
||||
const { reasoning_effort, reasoning_summary, ...modelOptions } = _modelOptions;
|
||||
const llmConfig: Partial<t.ClientOptions> &
|
||||
Partial<t.OpenAIParameters> &
|
||||
Partial<AzureOpenAIInput> = Object.assign(
|
||||
{
|
||||
streaming,
|
||||
model: modelOptions.model ?? '',
|
||||
},
|
||||
modelOptions,
|
||||
);
|
||||
|
||||
if (addParams && typeof addParams === 'object') {
|
||||
Object.assign(llmConfig, addParams);
|
||||
}
|
||||
|
||||
let useOpenRouter = false;
|
||||
const configOptions: t.OpenAIConfiguration = {};
|
||||
|
||||
if (
|
||||
(reverseProxyUrl && reverseProxyUrl.includes(KnownEndpoints.openrouter)) ||
|
||||
(endpoint && endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
|
||||
) {
|
||||
useOpenRouter = true;
|
||||
llmConfig.include_reasoning = true;
|
||||
configOptions.baseURL = reverseProxyUrl;
|
||||
configOptions.defaultHeaders = Object.assign(
|
||||
{
|
||||
'HTTP-Referer': 'https://librechat.ai',
|
||||
'X-Title': 'LibreChat',
|
||||
},
|
||||
headers,
|
||||
);
|
||||
} else if (reverseProxyUrl) {
|
||||
configOptions.baseURL = reverseProxyUrl;
|
||||
if (headers) {
|
||||
configOptions.defaultHeaders = headers;
|
||||
}
|
||||
}
|
||||
|
||||
if (defaultQuery) {
|
||||
configOptions.defaultQuery = defaultQuery;
|
||||
}
|
||||
|
||||
if (proxy) {
|
||||
const proxyAgent = new ProxyAgent(proxy);
|
||||
configOptions.fetchOptions = {
|
||||
dispatcher: proxyAgent,
|
||||
};
|
||||
}
|
||||
|
||||
if (azure) {
|
||||
const useModelName = isEnabled(process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME);
|
||||
const updatedAzure = { ...azure };
|
||||
updatedAzure.azureOpenAIApiDeploymentName = useModelName
|
||||
? sanitizeModelName(llmConfig.model || '')
|
||||
: azure.azureOpenAIApiDeploymentName;
|
||||
|
||||
if (process.env.AZURE_OPENAI_DEFAULT_MODEL) {
|
||||
llmConfig.model = process.env.AZURE_OPENAI_DEFAULT_MODEL;
|
||||
}
|
||||
|
||||
const constructBaseURL = () => {
|
||||
if (!configOptions.baseURL) {
|
||||
return;
|
||||
}
|
||||
const azureURL = constructAzureURL({
|
||||
baseURL: configOptions.baseURL,
|
||||
azureOptions: updatedAzure,
|
||||
});
|
||||
updatedAzure.azureOpenAIBasePath = azureURL.split(
|
||||
`/${updatedAzure.azureOpenAIApiDeploymentName}`,
|
||||
)[0];
|
||||
};
|
||||
|
||||
constructBaseURL();
|
||||
Object.assign(llmConfig, updatedAzure);
|
||||
|
||||
const constructAzureResponsesApi = () => {
|
||||
if (!llmConfig.useResponsesApi) {
|
||||
return;
|
||||
}
|
||||
|
||||
configOptions.baseURL = constructAzureURL({
|
||||
baseURL: configOptions.baseURL || 'https://${INSTANCE_NAME}.openai.azure.com/openai/v1',
|
||||
azureOptions: llmConfig,
|
||||
});
|
||||
|
||||
delete llmConfig.azureOpenAIApiDeploymentName;
|
||||
delete llmConfig.azureOpenAIApiInstanceName;
|
||||
delete llmConfig.azureOpenAIApiVersion;
|
||||
delete llmConfig.azureOpenAIBasePath;
|
||||
delete llmConfig.azureOpenAIApiKey;
|
||||
llmConfig.apiKey = apiKey;
|
||||
|
||||
configOptions.defaultHeaders = {
|
||||
...configOptions.defaultHeaders,
|
||||
'api-key': apiKey,
|
||||
};
|
||||
configOptions.defaultQuery = {
|
||||
...configOptions.defaultQuery,
|
||||
'api-version': 'preview',
|
||||
};
|
||||
};
|
||||
|
||||
constructAzureResponsesApi();
|
||||
|
||||
llmConfig.model = updatedAzure.azureOpenAIApiDeploymentName;
|
||||
} else {
|
||||
llmConfig.apiKey = apiKey;
|
||||
}
|
||||
|
||||
if (process.env.OPENAI_ORGANIZATION && azure) {
|
||||
configOptions.organization = process.env.OPENAI_ORGANIZATION;
|
||||
}
|
||||
|
||||
if (
|
||||
hasReasoningParams({ reasoning_effort, reasoning_summary }) &&
|
||||
(llmConfig.useResponsesApi === true || useOpenRouter)
|
||||
) {
|
||||
llmConfig.reasoning = removeNullishValues(
|
||||
{
|
||||
effort: reasoning_effort,
|
||||
summary: reasoning_summary,
|
||||
},
|
||||
true,
|
||||
) as OpenAI.Reasoning;
|
||||
} else if (hasReasoningParams({ reasoning_effort })) {
|
||||
llmConfig.reasoning_effort = reasoning_effort;
|
||||
}
|
||||
|
||||
if (llmConfig.max_tokens != null) {
|
||||
llmConfig.maxTokens = llmConfig.max_tokens;
|
||||
delete llmConfig.max_tokens;
|
||||
}
|
||||
|
||||
const tools: BindToolsInput[] = [];
|
||||
|
||||
if (modelOptions.web_search) {
|
||||
llmConfig.useResponsesApi = true;
|
||||
tools.push({ type: 'web_search_preview' });
|
||||
}
|
||||
|
||||
/**
|
||||
* Note: OpenAI Web Search models do not support any known parameters besides `max_tokens`
|
||||
*/
|
||||
if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) {
|
||||
const searchExcludeParams = [
|
||||
'frequency_penalty',
|
||||
'presence_penalty',
|
||||
'reasoning',
|
||||
'reasoning_effort',
|
||||
'temperature',
|
||||
'top_p',
|
||||
'top_k',
|
||||
'stop',
|
||||
'logit_bias',
|
||||
'seed',
|
||||
'response_format',
|
||||
'n',
|
||||
'logprobs',
|
||||
'user',
|
||||
];
|
||||
|
||||
const updatedDropParams = dropParams || [];
|
||||
const combinedDropParams = [...new Set([...updatedDropParams, ...searchExcludeParams])];
|
||||
|
||||
combinedDropParams.forEach((param) => {
|
||||
if (param in llmConfig) {
|
||||
delete llmConfig[param as keyof t.ClientOptions];
|
||||
}
|
||||
});
|
||||
} else if (dropParams && Array.isArray(dropParams)) {
|
||||
dropParams.forEach((param) => {
|
||||
if (param in llmConfig) {
|
||||
delete llmConfig[param as keyof t.ClientOptions];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
llmConfig,
|
||||
configOptions,
|
||||
tools,
|
||||
};
|
||||
}
|
||||
1
packages/api/src/files/index.ts
Normal file
1
packages/api/src/files/index.ts
Normal file
|
|
@ -0,0 +1 @@
|
|||
export * from './mistral/crud';
|
||||
1570
packages/api/src/files/mistral/crud.spec.ts
Normal file
1570
packages/api/src/files/mistral/crud.spec.ts
Normal file
File diff suppressed because it is too large
Load diff
647
packages/api/src/files/mistral/crud.ts
Normal file
647
packages/api/src/files/mistral/crud.ts
Normal file
|
|
@ -0,0 +1,647 @@
|
|||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import FormData from 'form-data';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import {
|
||||
FileSources,
|
||||
envVarRegex,
|
||||
extractEnvVariable,
|
||||
extractVariableName,
|
||||
} from 'librechat-data-provider';
|
||||
import type { TCustomConfig } from 'librechat-data-provider';
|
||||
import type { Request as ServerRequest } from 'express';
|
||||
import type { AxiosError } from 'axios';
|
||||
import type {
|
||||
MistralFileUploadResponse,
|
||||
MistralSignedUrlResponse,
|
||||
MistralOCRUploadResult,
|
||||
MistralOCRError,
|
||||
OCRResultPage,
|
||||
OCRResult,
|
||||
OCRImage,
|
||||
} from '~/types';
|
||||
import { logAxiosError, createAxiosInstance } from '~/utils/axios';
|
||||
import { loadServiceKey } from '~/utils/key';
|
||||
|
||||
const axios = createAxiosInstance();
|
||||
const DEFAULT_MISTRAL_BASE_URL = 'https://api.mistral.ai/v1';
|
||||
const DEFAULT_MISTRAL_MODEL = 'mistral-ocr-latest';
|
||||
|
||||
/** Helper type for auth configuration */
|
||||
interface AuthConfig {
|
||||
apiKey: string;
|
||||
baseURL: string;
|
||||
}
|
||||
|
||||
/** Helper type for Google service account */
|
||||
interface GoogleServiceAccount {
|
||||
client_email?: string;
|
||||
private_key?: string;
|
||||
project_id?: string;
|
||||
}
|
||||
|
||||
/** Helper type for OCR request context */
|
||||
interface OCRContext {
|
||||
req: Pick<ServerRequest, 'user' | 'app'> & {
|
||||
user?: { id: string };
|
||||
app: {
|
||||
locals?: {
|
||||
ocr?: TCustomConfig['ocr'];
|
||||
};
|
||||
};
|
||||
};
|
||||
file: Express.Multer.File;
|
||||
loadAuthValues: (params: {
|
||||
userId: string;
|
||||
authFields: string[];
|
||||
optional?: Set<string>;
|
||||
}) => Promise<Record<string, string | undefined>>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a document to Mistral API using file streaming to avoid loading the entire file into memory
|
||||
* @param params Upload parameters
|
||||
* @param params.filePath The path to the file on disk
|
||||
* @param params.fileName Optional filename to use (defaults to the name from filePath)
|
||||
* @param params.apiKey Mistral API key
|
||||
* @param params.baseURL Mistral API base URL
|
||||
* @returns The response from Mistral API
|
||||
*/
|
||||
export async function uploadDocumentToMistral({
|
||||
apiKey,
|
||||
filePath,
|
||||
baseURL = DEFAULT_MISTRAL_BASE_URL,
|
||||
fileName = '',
|
||||
}: {
|
||||
apiKey: string;
|
||||
filePath: string;
|
||||
baseURL?: string;
|
||||
fileName?: string;
|
||||
}): Promise<MistralFileUploadResponse> {
|
||||
const form = new FormData();
|
||||
form.append('purpose', 'ocr');
|
||||
const actualFileName = fileName || path.basename(filePath);
|
||||
const fileStream = fs.createReadStream(filePath);
|
||||
form.append('file', fileStream, { filename: actualFileName });
|
||||
|
||||
return axios
|
||||
.post(`${baseURL}/files`, form, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
...form.getHeaders(),
|
||||
},
|
||||
maxBodyLength: Infinity,
|
||||
maxContentLength: Infinity,
|
||||
})
|
||||
.then((res) => res.data)
|
||||
.catch((error) => {
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
export async function getSignedUrl({
|
||||
apiKey,
|
||||
fileId,
|
||||
expiry = 24,
|
||||
baseURL = DEFAULT_MISTRAL_BASE_URL,
|
||||
}: {
|
||||
apiKey: string;
|
||||
fileId: string;
|
||||
expiry?: number;
|
||||
baseURL?: string;
|
||||
}): Promise<MistralSignedUrlResponse> {
|
||||
return axios
|
||||
.get(`${baseURL}/files/${fileId}/url?expiry=${expiry}`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
})
|
||||
.then((res) => res.data)
|
||||
.catch((error) => {
|
||||
logger.error('Error fetching signed URL:', error.message);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Object} params
|
||||
* @param {string} params.apiKey
|
||||
* @param {string} params.url - The document or image URL
|
||||
* @param {string} [params.documentType='document_url'] - 'document_url' or 'image_url'
|
||||
* @param {string} [params.model]
|
||||
* @param {string} [params.baseURL]
|
||||
* @returns {Promise<OCRResult>}
|
||||
*/
|
||||
export async function performOCR({
|
||||
url,
|
||||
apiKey,
|
||||
model = DEFAULT_MISTRAL_MODEL,
|
||||
baseURL = DEFAULT_MISTRAL_BASE_URL,
|
||||
documentType = 'document_url',
|
||||
}: {
|
||||
url: string;
|
||||
apiKey: string;
|
||||
model?: string;
|
||||
baseURL?: string;
|
||||
documentType?: 'document_url' | 'image_url';
|
||||
}): Promise<OCRResult> {
|
||||
const documentKey = documentType === 'image_url' ? 'image_url' : 'document_url';
|
||||
return axios
|
||||
.post(
|
||||
`${baseURL}/ocr`,
|
||||
{
|
||||
model,
|
||||
image_limit: 0,
|
||||
include_image_base64: false,
|
||||
document: {
|
||||
type: documentType,
|
||||
[documentKey]: url,
|
||||
},
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
},
|
||||
)
|
||||
.then((res) => res.data)
|
||||
.catch((error) => {
|
||||
logger.error('Error performing OCR:', error.message);
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if a value needs to be loaded from environment
|
||||
*/
|
||||
function needsEnvLoad(value: string): boolean {
|
||||
return envVarRegex.test(value) || !value.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the environment variable name for a config value
|
||||
*/
|
||||
function getEnvVarName(configValue: string, defaultName: string): string {
|
||||
if (!envVarRegex.test(configValue)) {
|
||||
return defaultName;
|
||||
}
|
||||
return extractVariableName(configValue) || defaultName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a configuration value from either hardcoded or environment
|
||||
*/
|
||||
async function resolveConfigValue(
|
||||
configValue: string,
|
||||
defaultEnvName: string,
|
||||
authValues: Record<string, string | undefined>,
|
||||
defaultValue?: string,
|
||||
): Promise<string> {
|
||||
// If it's a hardcoded value (not env var and not empty), use it directly
|
||||
if (!needsEnvLoad(configValue)) {
|
||||
return configValue;
|
||||
}
|
||||
|
||||
// Otherwise, get from auth values
|
||||
const envVarName = getEnvVarName(configValue, defaultEnvName);
|
||||
return authValues[envVarName] || defaultValue || '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads authentication configuration from OCR config
|
||||
*/
|
||||
async function loadAuthConfig(context: OCRContext): Promise<AuthConfig> {
|
||||
const ocrConfig = context.req.app.locals?.ocr;
|
||||
const apiKeyConfig = ocrConfig?.apiKey || '';
|
||||
const baseURLConfig = ocrConfig?.baseURL || '';
|
||||
|
||||
if (!needsEnvLoad(apiKeyConfig) && !needsEnvLoad(baseURLConfig)) {
|
||||
return {
|
||||
apiKey: apiKeyConfig,
|
||||
baseURL: baseURLConfig,
|
||||
};
|
||||
}
|
||||
|
||||
const authFields: string[] = [];
|
||||
|
||||
if (needsEnvLoad(baseURLConfig)) {
|
||||
authFields.push(getEnvVarName(baseURLConfig, 'OCR_BASEURL'));
|
||||
}
|
||||
|
||||
if (needsEnvLoad(apiKeyConfig)) {
|
||||
authFields.push(getEnvVarName(apiKeyConfig, 'OCR_API_KEY'));
|
||||
}
|
||||
|
||||
const authValues = await context.loadAuthValues({
|
||||
userId: context.req.user?.id || '',
|
||||
authFields,
|
||||
optional: new Set(['OCR_BASEURL']),
|
||||
});
|
||||
|
||||
const apiKey = await resolveConfigValue(apiKeyConfig, 'OCR_API_KEY', authValues);
|
||||
const baseURL = await resolveConfigValue(
|
||||
baseURLConfig,
|
||||
'OCR_BASEURL',
|
||||
authValues,
|
||||
DEFAULT_MISTRAL_BASE_URL,
|
||||
);
|
||||
|
||||
return { apiKey, baseURL };
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the model configuration
|
||||
*/
|
||||
function getModelConfig(ocrConfig: TCustomConfig['ocr']): string {
|
||||
const modelConfig = ocrConfig?.mistralModel || '';
|
||||
|
||||
if (!modelConfig.trim()) {
|
||||
return DEFAULT_MISTRAL_MODEL;
|
||||
}
|
||||
|
||||
if (envVarRegex.test(modelConfig)) {
|
||||
return extractEnvVariable(modelConfig) || DEFAULT_MISTRAL_MODEL;
|
||||
}
|
||||
|
||||
return modelConfig.trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines document type based on file
|
||||
*/
|
||||
function getDocumentType(file: Express.Multer.File): 'image_url' | 'document_url' {
|
||||
const mimetype = (file.mimetype || '').toLowerCase();
|
||||
const originalname = file.originalname || '';
|
||||
const isImage =
|
||||
mimetype.startsWith('image') || /\.(png|jpe?g|gif|bmp|webp|tiff?)$/i.test(originalname);
|
||||
|
||||
return isImage ? 'image_url' : 'document_url';
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes OCR result pages into aggregated text and images
|
||||
*/
|
||||
function processOCRResult(ocrResult: OCRResult): { text: string; images: string[] } {
|
||||
let aggregatedText = '';
|
||||
const images: string[] = [];
|
||||
|
||||
ocrResult.pages.forEach((page: OCRResultPage, index: number) => {
|
||||
if (ocrResult.pages.length > 1) {
|
||||
aggregatedText += `# PAGE ${index + 1}\n`;
|
||||
}
|
||||
|
||||
aggregatedText += page.markdown + '\n\n';
|
||||
|
||||
if (!page.images || page.images.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
page.images.forEach((image: OCRImage) => {
|
||||
if (image.image_base64) {
|
||||
images.push(image.image_base64);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return { text: aggregatedText, images };
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an error message for OCR operations
|
||||
*/
|
||||
function createOCRError(error: unknown, baseMessage: string): Error {
|
||||
const axiosError = error as AxiosError<MistralOCRError>;
|
||||
const detail = axiosError?.response?.data?.detail;
|
||||
const message = detail || baseMessage;
|
||||
|
||||
const responseMessage = axiosError?.response?.data?.message;
|
||||
const errorLog = logAxiosError({ error: axiosError, message });
|
||||
const fullMessage = responseMessage ? `${errorLog} - ${responseMessage}` : errorLog;
|
||||
|
||||
return new Error(fullMessage);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a file to the Mistral OCR API and processes the OCR result.
|
||||
*
|
||||
* @param params - The params object.
|
||||
* @param params.req - The request object from Express. It should have a `user` property with an `id`
|
||||
* representing the user
|
||||
* @param params.file - The file object, which is part of the request. The file object should
|
||||
* have a `mimetype` property that tells us the file type
|
||||
* @param params.loadAuthValues - Function to load authentication values
|
||||
* @returns - The result object containing the processed `text` and `images` (not currently used),
|
||||
* along with the `filename` and `bytes` properties.
|
||||
*/
|
||||
export const uploadMistralOCR = async (context: OCRContext): Promise<MistralOCRUploadResult> => {
|
||||
try {
|
||||
const { apiKey, baseURL } = await loadAuthConfig(context);
|
||||
const model = getModelConfig(context.req.app.locals?.ocr);
|
||||
|
||||
const mistralFile = await uploadDocumentToMistral({
|
||||
filePath: context.file.path,
|
||||
fileName: context.file.originalname,
|
||||
apiKey,
|
||||
baseURL,
|
||||
});
|
||||
|
||||
const signedUrlResponse = await getSignedUrl({
|
||||
apiKey,
|
||||
baseURL,
|
||||
fileId: mistralFile.id,
|
||||
});
|
||||
|
||||
const documentType = getDocumentType(context.file);
|
||||
const ocrResult = await performOCR({
|
||||
apiKey,
|
||||
baseURL,
|
||||
model,
|
||||
url: signedUrlResponse.url,
|
||||
documentType,
|
||||
});
|
||||
|
||||
if (!ocrResult || !ocrResult.pages || ocrResult.pages.length === 0) {
|
||||
throw new Error(
|
||||
'No OCR result returned from service, may be down or the file is not supported.',
|
||||
);
|
||||
}
|
||||
const { text, images } = processOCRResult(ocrResult);
|
||||
|
||||
return {
|
||||
filename: context.file.originalname,
|
||||
bytes: text.length * 4,
|
||||
filepath: FileSources.mistral_ocr,
|
||||
text,
|
||||
images,
|
||||
};
|
||||
} catch (error) {
|
||||
throw createOCRError(error, 'Error uploading document to Mistral OCR API:');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Use Azure Mistral OCR API to processe the OCR result.
|
||||
*
|
||||
* @param params - The params object.
|
||||
* @param params.req - The request object from Express. It should have a `user` property with an `id`
|
||||
* representing the user
|
||||
* @param params.file - The file object, which is part of the request. The file object should
|
||||
* have a `mimetype` property that tells us the file type
|
||||
* @param params.loadAuthValues - Function to load authentication values
|
||||
* @returns - The result object containing the processed `text` and `images` (not currently used),
|
||||
* along with the `filename` and `bytes` properties.
|
||||
*/
|
||||
export const uploadAzureMistralOCR = async (
|
||||
context: OCRContext,
|
||||
): Promise<MistralOCRUploadResult> => {
|
||||
try {
|
||||
const { apiKey, baseURL } = await loadAuthConfig(context);
|
||||
const model = getModelConfig(context.req.app.locals?.ocr);
|
||||
|
||||
const buffer = fs.readFileSync(context.file.path);
|
||||
const base64 = buffer.toString('base64');
|
||||
/** Uses actual mimetype of the file, 'image/jpeg' as fallback since it seems to be accepted regardless of mismatch */
|
||||
const base64Prefix = `data:${context.file.mimetype || 'image/jpeg'};base64,`;
|
||||
|
||||
const documentType = getDocumentType(context.file);
|
||||
const ocrResult = await performOCR({
|
||||
apiKey,
|
||||
baseURL,
|
||||
model,
|
||||
url: `${base64Prefix}${base64}`,
|
||||
documentType,
|
||||
});
|
||||
|
||||
if (!ocrResult || !ocrResult.pages || ocrResult.pages.length === 0) {
|
||||
throw new Error(
|
||||
'No OCR result returned from service, may be down or the file is not supported.',
|
||||
);
|
||||
}
|
||||
|
||||
const { text, images } = processOCRResult(ocrResult);
|
||||
|
||||
return {
|
||||
filename: context.file.originalname,
|
||||
bytes: text.length * 4,
|
||||
filepath: FileSources.azure_mistral_ocr,
|
||||
text,
|
||||
images,
|
||||
};
|
||||
} catch (error) {
|
||||
throw createOCRError(error, 'Error uploading document to Azure Mistral OCR API:');
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Loads Google service account configuration
|
||||
*/
|
||||
async function loadGoogleAuthConfig(): Promise<{
|
||||
serviceAccount: GoogleServiceAccount;
|
||||
accessToken: string;
|
||||
}> {
|
||||
/** Path from environment variable or default location */
|
||||
const serviceKeyPath =
|
||||
process.env.GOOGLE_SERVICE_KEY_FILE_PATH ||
|
||||
path.join(__dirname, '..', '..', '..', 'api', 'data', 'auth.json');
|
||||
|
||||
const serviceKey = await loadServiceKey(serviceKeyPath);
|
||||
|
||||
if (!serviceKey) {
|
||||
throw new Error(
|
||||
`Google service account not found or could not be loaded from ${serviceKeyPath}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!serviceKey.client_email || !serviceKey.private_key || !serviceKey.project_id) {
|
||||
throw new Error('Invalid Google service account configuration');
|
||||
}
|
||||
|
||||
const jwt = await createJWT(serviceKey as GoogleServiceAccount);
|
||||
const accessToken = await exchangeJWTForAccessToken(jwt);
|
||||
|
||||
return {
|
||||
serviceAccount: serviceKey as GoogleServiceAccount,
|
||||
accessToken,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a JWT token manually
|
||||
*/
|
||||
async function createJWT(serviceKey: GoogleServiceAccount): Promise<string> {
|
||||
const crypto = await import('crypto');
|
||||
|
||||
const header = {
|
||||
alg: 'RS256',
|
||||
typ: 'JWT',
|
||||
};
|
||||
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const payload = {
|
||||
iss: serviceKey.client_email,
|
||||
scope: 'https://www.googleapis.com/auth/cloud-platform',
|
||||
aud: 'https://oauth2.googleapis.com/token',
|
||||
exp: now + 3600,
|
||||
iat: now,
|
||||
};
|
||||
|
||||
const encodedHeader = Buffer.from(JSON.stringify(header)).toString('base64url');
|
||||
const encodedPayload = Buffer.from(JSON.stringify(payload)).toString('base64url');
|
||||
|
||||
const signatureInput = `${encodedHeader}.${encodedPayload}`;
|
||||
|
||||
const sign = crypto.createSign('RSA-SHA256');
|
||||
sign.update(signatureInput);
|
||||
sign.end();
|
||||
|
||||
const signature = sign.sign(serviceKey.private_key!, 'base64url');
|
||||
|
||||
return `${signatureInput}.${signature}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Exchanges JWT for access token
|
||||
*/
|
||||
async function exchangeJWTForAccessToken(jwt: string): Promise<string> {
|
||||
const response = await axios.post(
|
||||
'https://oauth2.googleapis.com/token',
|
||||
new URLSearchParams({
|
||||
grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer',
|
||||
assertion: jwt,
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.data?.access_token) {
|
||||
throw new Error('No access token in response');
|
||||
}
|
||||
|
||||
return response.data.access_token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs OCR using Google Vertex AI
|
||||
*/
|
||||
async function performGoogleVertexOCR({
|
||||
url,
|
||||
accessToken,
|
||||
projectId,
|
||||
model,
|
||||
documentType = 'document_url',
|
||||
}: {
|
||||
url: string;
|
||||
accessToken: string;
|
||||
projectId: string;
|
||||
model: string;
|
||||
documentType?: 'document_url' | 'image_url';
|
||||
}): Promise<OCRResult> {
|
||||
const location = process.env.GOOGLE_LOC || 'us-central1';
|
||||
const modelId = model || 'mistral-ocr-2505';
|
||||
|
||||
let baseURL: string;
|
||||
if (location === 'global') {
|
||||
baseURL = `https://aiplatform.googleapis.com/v1/projects/${projectId}/locations/global/publishers/mistralai/models/${modelId}:rawPredict`;
|
||||
} else {
|
||||
baseURL = `https://${location}-aiplatform.googleapis.com/v1/projects/${projectId}/locations/${location}/publishers/mistralai/models/${modelId}:rawPredict`;
|
||||
}
|
||||
|
||||
const documentKey = documentType === 'image_url' ? 'image_url' : 'document_url';
|
||||
|
||||
const requestBody = {
|
||||
model: modelId,
|
||||
document: {
|
||||
type: documentType,
|
||||
[documentKey]: url,
|
||||
},
|
||||
include_image_base64: true,
|
||||
};
|
||||
|
||||
logger.debug('Sending request to Google Vertex AI:', {
|
||||
url: baseURL,
|
||||
body: {
|
||||
...requestBody,
|
||||
document: { ...requestBody.document, [documentKey]: 'base64_data_hidden' },
|
||||
},
|
||||
});
|
||||
|
||||
return axios
|
||||
.post(baseURL, requestBody, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
.then((res) => {
|
||||
logger.debug('Google Vertex AI response received');
|
||||
return res.data;
|
||||
})
|
||||
.catch((error) => {
|
||||
if (error.response?.data) {
|
||||
logger.error('Vertex AI error response: ' + JSON.stringify(error.response.data, null, 2));
|
||||
}
|
||||
throw new Error(
|
||||
logAxiosError({
|
||||
error: error as AxiosError,
|
||||
message: 'Error calling Google Vertex AI Mistral OCR',
|
||||
}),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Use Google Vertex AI Mistral OCR API to process the OCR result.
|
||||
*
|
||||
* @param params - The params object.
|
||||
* @param params.req - The request object from Express. It should have a `user` property with an `id`
|
||||
* representing the user
|
||||
* @param params.file - The file object, which is part of the request. The file object should
|
||||
* have a `mimetype` property that tells us the file type
|
||||
* @param params.loadAuthValues - Function to load authentication values
|
||||
* @returns - The result object containing the processed `text` and `images` (not currently used),
|
||||
* along with the `filename` and `bytes` properties.
|
||||
*/
|
||||
export const uploadGoogleVertexMistralOCR = async (
|
||||
context: OCRContext,
|
||||
): Promise<MistralOCRUploadResult> => {
|
||||
try {
|
||||
const { serviceAccount, accessToken } = await loadGoogleAuthConfig();
|
||||
const model = getModelConfig(context.req.app.locals?.ocr);
|
||||
|
||||
const buffer = fs.readFileSync(context.file.path);
|
||||
const base64 = buffer.toString('base64');
|
||||
const base64Prefix = `data:${context.file.mimetype || 'application/pdf'};base64,`;
|
||||
|
||||
const documentType = getDocumentType(context.file);
|
||||
const ocrResult = await performGoogleVertexOCR({
|
||||
url: `${base64Prefix}${base64}`,
|
||||
accessToken,
|
||||
projectId: serviceAccount.project_id!,
|
||||
model,
|
||||
documentType,
|
||||
});
|
||||
|
||||
if (!ocrResult || !ocrResult.pages || ocrResult.pages.length === 0) {
|
||||
throw new Error(
|
||||
'No OCR result returned from service, may be down or the file is not supported.',
|
||||
);
|
||||
}
|
||||
|
||||
const { text, images } = processOCRResult(ocrResult);
|
||||
|
||||
return {
|
||||
filename: context.file.originalname,
|
||||
bytes: text.length * 4,
|
||||
filepath: FileSources.vertexai_mistral_ocr as string,
|
||||
text,
|
||||
images,
|
||||
};
|
||||
} catch (error) {
|
||||
throw createOCRError(error, 'Error uploading document to Google Vertex AI Mistral OCR:');
|
||||
}
|
||||
};
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
import { FlowStateManager } from './manager';
|
||||
import { Keyv } from 'keyv';
|
||||
import { FlowStateManager } from './manager';
|
||||
import type { FlowState } from './types';
|
||||
|
||||
// Create a mock class without extending Keyv
|
||||
/** Mock class without extending Keyv */
|
||||
class MockKeyv {
|
||||
private store: Map<string, FlowState<string>>;
|
||||
|
||||
|
|
@ -1,28 +1,18 @@
|
|||
import { Keyv } from 'keyv';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import type { StoredDataNoRaw } from 'keyv';
|
||||
import type { Logger } from 'winston';
|
||||
import type { FlowState, FlowMetadata, FlowManagerOptions } from './types';
|
||||
|
||||
export class FlowStateManager<T = unknown> {
|
||||
private keyv: Keyv;
|
||||
private ttl: number;
|
||||
private logger: Logger;
|
||||
private intervals: Set<NodeJS.Timeout>;
|
||||
|
||||
private static getDefaultLogger(): Logger {
|
||||
return {
|
||||
error: console.error,
|
||||
warn: console.warn,
|
||||
info: console.info,
|
||||
debug: console.debug,
|
||||
} as Logger;
|
||||
}
|
||||
|
||||
constructor(store: Keyv, options?: FlowManagerOptions) {
|
||||
if (!options) {
|
||||
options = { ttl: 60000 * 3 };
|
||||
}
|
||||
const { ci = false, ttl, logger } = options;
|
||||
const { ci = false, ttl } = options;
|
||||
|
||||
if (!ci && !(store instanceof Keyv)) {
|
||||
throw new Error('Invalid store provided to FlowStateManager');
|
||||
|
|
@ -30,14 +20,13 @@ export class FlowStateManager<T = unknown> {
|
|||
|
||||
this.ttl = ttl;
|
||||
this.keyv = store;
|
||||
this.logger = logger || FlowStateManager.getDefaultLogger();
|
||||
this.intervals = new Set();
|
||||
this.setupCleanupHandlers();
|
||||
}
|
||||
|
||||
private setupCleanupHandlers() {
|
||||
const cleanup = () => {
|
||||
this.logger.info('Cleaning up FlowStateManager intervals...');
|
||||
logger.info('Cleaning up FlowStateManager intervals...');
|
||||
this.intervals.forEach((interval) => clearInterval(interval));
|
||||
this.intervals.clear();
|
||||
process.exit(0);
|
||||
|
|
@ -66,7 +55,7 @@ export class FlowStateManager<T = unknown> {
|
|||
|
||||
let existingState = (await this.keyv.get(flowKey)) as FlowState<T> | undefined;
|
||||
if (existingState) {
|
||||
this.logger.debug(`[${flowKey}] Flow already exists`);
|
||||
logger.debug(`[${flowKey}] Flow already exists`);
|
||||
return this.monitorFlow(flowKey, type, signal);
|
||||
}
|
||||
|
||||
|
|
@ -74,7 +63,7 @@ export class FlowStateManager<T = unknown> {
|
|||
|
||||
existingState = (await this.keyv.get(flowKey)) as FlowState<T> | undefined;
|
||||
if (existingState) {
|
||||
this.logger.debug(`[${flowKey}] Flow exists on 2nd check`);
|
||||
logger.debug(`[${flowKey}] Flow exists on 2nd check`);
|
||||
return this.monitorFlow(flowKey, type, signal);
|
||||
}
|
||||
|
||||
|
|
@ -85,7 +74,7 @@ export class FlowStateManager<T = unknown> {
|
|||
createdAt: Date.now(),
|
||||
};
|
||||
|
||||
this.logger.debug('Creating initial flow state:', flowKey);
|
||||
logger.debug('Creating initial flow state:', flowKey);
|
||||
await this.keyv.set(flowKey, initialState, this.ttl);
|
||||
return this.monitorFlow(flowKey, type, signal);
|
||||
}
|
||||
|
|
@ -102,7 +91,7 @@ export class FlowStateManager<T = unknown> {
|
|||
if (!flowState) {
|
||||
clearInterval(intervalId);
|
||||
this.intervals.delete(intervalId);
|
||||
this.logger.error(`[${flowKey}] Flow state not found`);
|
||||
logger.error(`[${flowKey}] Flow state not found`);
|
||||
reject(new Error(`${type} Flow state not found`));
|
||||
return;
|
||||
}
|
||||
|
|
@ -110,7 +99,7 @@ export class FlowStateManager<T = unknown> {
|
|||
if (signal?.aborted) {
|
||||
clearInterval(intervalId);
|
||||
this.intervals.delete(intervalId);
|
||||
this.logger.warn(`[${flowKey}] Flow aborted`);
|
||||
logger.warn(`[${flowKey}] Flow aborted`);
|
||||
const message = `${type} flow aborted`;
|
||||
await this.keyv.delete(flowKey);
|
||||
reject(new Error(message));
|
||||
|
|
@ -120,7 +109,7 @@ export class FlowStateManager<T = unknown> {
|
|||
if (flowState.status !== 'PENDING') {
|
||||
clearInterval(intervalId);
|
||||
this.intervals.delete(intervalId);
|
||||
this.logger.debug(`[${flowKey}] Flow completed`);
|
||||
logger.debug(`[${flowKey}] Flow completed`);
|
||||
|
||||
if (flowState.status === 'COMPLETED' && flowState.result !== undefined) {
|
||||
resolve(flowState.result);
|
||||
|
|
@ -135,17 +124,15 @@ export class FlowStateManager<T = unknown> {
|
|||
if (elapsedTime >= this.ttl) {
|
||||
clearInterval(intervalId);
|
||||
this.intervals.delete(intervalId);
|
||||
this.logger.error(
|
||||
logger.error(
|
||||
`[${flowKey}] Flow timed out | Elapsed time: ${elapsedTime} | TTL: ${this.ttl}`,
|
||||
);
|
||||
await this.keyv.delete(flowKey);
|
||||
reject(new Error(`${type} flow timed out`));
|
||||
}
|
||||
this.logger.debug(
|
||||
`[${flowKey}] Flow state elapsed time: ${elapsedTime}, checking again...`,
|
||||
);
|
||||
logger.debug(`[${flowKey}] Flow state elapsed time: ${elapsedTime}, checking again...`);
|
||||
} catch (error) {
|
||||
this.logger.error(`[${flowKey}] Error checking flow state:`, error);
|
||||
logger.error(`[${flowKey}] Error checking flow state:`, error);
|
||||
clearInterval(intervalId);
|
||||
this.intervals.delete(intervalId);
|
||||
reject(error);
|
||||
|
|
@ -224,7 +211,7 @@ export class FlowStateManager<T = unknown> {
|
|||
const flowKey = this.getFlowKey(flowId, type);
|
||||
let existingState = (await this.keyv.get(flowKey)) as FlowState<T> | undefined;
|
||||
if (existingState) {
|
||||
this.logger.debug(`[${flowKey}] Flow already exists`);
|
||||
logger.debug(`[${flowKey}] Flow already exists`);
|
||||
return this.monitorFlow(flowKey, type, signal);
|
||||
}
|
||||
|
||||
|
|
@ -232,7 +219,7 @@ export class FlowStateManager<T = unknown> {
|
|||
|
||||
existingState = (await this.keyv.get(flowKey)) as FlowState<T> | undefined;
|
||||
if (existingState) {
|
||||
this.logger.debug(`[${flowKey}] Flow exists on 2nd check`);
|
||||
logger.debug(`[${flowKey}] Flow exists on 2nd check`);
|
||||
return this.monitorFlow(flowKey, type, signal);
|
||||
}
|
||||
|
||||
|
|
@ -242,7 +229,7 @@ export class FlowStateManager<T = unknown> {
|
|||
metadata: {},
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
this.logger.debug(`[${flowKey}] Creating initial flow state`);
|
||||
logger.debug(`[${flowKey}] Creating initial flow state`);
|
||||
await this.keyv.set(flowKey, initialState, this.ttl);
|
||||
|
||||
try {
|
||||
24
packages/api/src/index.ts
Normal file
24
packages/api/src/index.ts
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
/* MCP */
|
||||
export * from './mcp/manager';
|
||||
export * from './mcp/oauth';
|
||||
export * from './mcp/auth';
|
||||
/* Utilities */
|
||||
export * from './mcp/utils';
|
||||
export * from './utils';
|
||||
/* OAuth */
|
||||
export * from './oauth';
|
||||
/* Crypto */
|
||||
export * from './crypto';
|
||||
/* Flow */
|
||||
export * from './flow/manager';
|
||||
/* Middleware */
|
||||
export * from './middleware';
|
||||
/* Agents */
|
||||
export * from './agents';
|
||||
/* Endpoints */
|
||||
export * from './endpoints';
|
||||
/* Files */
|
||||
export * from './files';
|
||||
/* types */
|
||||
export type * from './mcp/types';
|
||||
export type * from './flow/types';
|
||||
58
packages/api/src/mcp/auth.ts
Normal file
58
packages/api/src/mcp/auth.ts
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import { Constants } from 'librechat-data-provider';
|
||||
import type { PluginAuthMethods } from '@librechat/data-schemas';
|
||||
import type { GenericTool } from '@librechat/agents';
|
||||
import { getPluginAuthMap } from '~/agents/auth';
|
||||
import { mcpToolPattern } from './utils';
|
||||
|
||||
export async function getUserMCPAuthMap({
|
||||
userId,
|
||||
tools,
|
||||
appTools,
|
||||
findPluginAuthsByKeys,
|
||||
}: {
|
||||
userId: string;
|
||||
tools: GenericTool[] | undefined;
|
||||
appTools: Record<string, unknown>;
|
||||
findPluginAuthsByKeys: PluginAuthMethods['findPluginAuthsByKeys'];
|
||||
}) {
|
||||
if (!tools || tools.length === 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const uniqueMcpServers = new Set<string>();
|
||||
|
||||
for (const tool of tools) {
|
||||
const toolKey = tool.name;
|
||||
if (toolKey && appTools[toolKey] && mcpToolPattern.test(toolKey)) {
|
||||
const parts = toolKey.split(Constants.mcp_delimiter);
|
||||
const serverName = parts[parts.length - 1];
|
||||
uniqueMcpServers.add(`${Constants.mcp_prefix}${serverName}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (uniqueMcpServers.size === 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const mcpPluginKeysToFetch = Array.from(uniqueMcpServers);
|
||||
|
||||
let allMcpCustomUserVars: Record<string, Record<string, string>> = {};
|
||||
try {
|
||||
allMcpCustomUserVars = await getPluginAuthMap({
|
||||
userId,
|
||||
pluginKeys: mcpPluginKeysToFetch,
|
||||
throwError: false,
|
||||
findPluginAuthsByKeys,
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[handleTools] Error batch fetching customUserVars for MCP tools (keys: ${mcpPluginKeysToFetch.join(
|
||||
', ',
|
||||
)}), user ${userId}: ${err instanceof Error ? err.message : 'Unknown error'}`,
|
||||
err,
|
||||
);
|
||||
}
|
||||
|
||||
return allMcpCustomUserVars;
|
||||
}
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
import { EventEmitter } from 'events';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
|
||||
import { SSEClientTransport } from '@modelcontextprotocol/sdk/client/sse.js';
|
||||
import {
|
||||
|
|
@ -10,8 +11,8 @@ import { ResourceListChangedNotificationSchema } from '@modelcontextprotocol/sdk
|
|||
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js';
|
||||
import type { Transport } from '@modelcontextprotocol/sdk/shared/transport.js';
|
||||
import type { JSONRPCMessage } from '@modelcontextprotocol/sdk/types.js';
|
||||
import type { Logger } from 'winston';
|
||||
import type * as t from './types/mcp.js';
|
||||
import type { MCPOAuthTokens } from './oauth/types';
|
||||
import type * as t from './types';
|
||||
|
||||
function isStdioOptions(options: t.MCPOptions): options is t.StdioOptions {
|
||||
return 'command' in options;
|
||||
|
|
@ -67,28 +68,33 @@ export class MCPConnection extends EventEmitter {
|
|||
private isReconnecting = false;
|
||||
private isInitializing = false;
|
||||
private reconnectAttempts = 0;
|
||||
iconPath?: string;
|
||||
timeout?: number;
|
||||
private readonly userId?: string;
|
||||
private lastPingTime: number;
|
||||
private oauthTokens?: MCPOAuthTokens | null;
|
||||
private oauthRequired = false;
|
||||
iconPath?: string;
|
||||
timeout?: number;
|
||||
url?: string;
|
||||
|
||||
constructor(
|
||||
serverName: string,
|
||||
private readonly options: t.MCPOptions,
|
||||
private logger?: Logger,
|
||||
userId?: string,
|
||||
oauthTokens?: MCPOAuthTokens | null,
|
||||
) {
|
||||
super();
|
||||
this.serverName = serverName;
|
||||
this.logger = logger;
|
||||
this.userId = userId;
|
||||
this.iconPath = options.iconPath;
|
||||
this.timeout = options.timeout;
|
||||
this.lastPingTime = Date.now();
|
||||
if (oauthTokens) {
|
||||
this.oauthTokens = oauthTokens;
|
||||
}
|
||||
this.client = new Client(
|
||||
{
|
||||
name: 'librechat-mcp-client',
|
||||
version: '1.2.2',
|
||||
name: '@librechat/api-client',
|
||||
version: '1.2.3',
|
||||
},
|
||||
{
|
||||
capabilities: {},
|
||||
|
|
@ -107,11 +113,10 @@ export class MCPConnection extends EventEmitter {
|
|||
public static getInstance(
|
||||
serverName: string,
|
||||
options: t.MCPOptions,
|
||||
logger?: Logger,
|
||||
userId?: string,
|
||||
): MCPConnection {
|
||||
if (!MCPConnection.instance) {
|
||||
MCPConnection.instance = new MCPConnection(serverName, options, logger, userId);
|
||||
MCPConnection.instance = new MCPConnection(serverName, options, userId);
|
||||
}
|
||||
return MCPConnection.instance;
|
||||
}
|
||||
|
|
@ -129,7 +134,7 @@ export class MCPConnection extends EventEmitter {
|
|||
|
||||
private emitError(error: unknown, errorContext: string): void {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
this.logger?.error(`${this.getLogPrefix()} ${errorContext}: ${errorMessage}`);
|
||||
logger.error(`${this.getLogPrefix()} ${errorContext}: ${errorMessage}`);
|
||||
this.emit('error', new Error(`${errorContext}: ${errorMessage}`));
|
||||
}
|
||||
|
||||
|
|
@ -167,45 +172,52 @@ export class MCPConnection extends EventEmitter {
|
|||
if (!isWebSocketOptions(options)) {
|
||||
throw new Error('Invalid options for websocket transport.');
|
||||
}
|
||||
this.url = options.url;
|
||||
return new WebSocketClientTransport(new URL(options.url));
|
||||
|
||||
case 'sse': {
|
||||
if (!isSSEOptions(options)) {
|
||||
throw new Error('Invalid options for sse transport.');
|
||||
}
|
||||
this.url = options.url;
|
||||
const url = new URL(options.url);
|
||||
this.logger?.info(`${this.getLogPrefix()} Creating SSE transport: ${url.toString()}`);
|
||||
logger.info(`${this.getLogPrefix()} Creating SSE transport: ${url.toString()}`);
|
||||
const abortController = new AbortController();
|
||||
|
||||
/** Add OAuth token to headers if available */
|
||||
const headers = { ...options.headers };
|
||||
if (this.oauthTokens?.access_token) {
|
||||
headers['Authorization'] = `Bearer ${this.oauthTokens.access_token}`;
|
||||
}
|
||||
|
||||
const transport = new SSEClientTransport(url, {
|
||||
requestInit: {
|
||||
headers: options.headers,
|
||||
headers,
|
||||
signal: abortController.signal,
|
||||
},
|
||||
eventSourceInit: {
|
||||
fetch: (url, init) => {
|
||||
const headers = new Headers(Object.assign({}, init?.headers, options.headers));
|
||||
const fetchHeaders = new Headers(Object.assign({}, init?.headers, headers));
|
||||
return fetch(url, {
|
||||
...init,
|
||||
headers,
|
||||
headers: fetchHeaders,
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
transport.onclose = () => {
|
||||
this.logger?.info(`${this.getLogPrefix()} SSE transport closed`);
|
||||
logger.info(`${this.getLogPrefix()} SSE transport closed`);
|
||||
this.emit('connectionChange', 'disconnected');
|
||||
};
|
||||
|
||||
transport.onerror = (error) => {
|
||||
this.logger?.error(`${this.getLogPrefix()} SSE transport error:`, error);
|
||||
logger.error(`${this.getLogPrefix()} SSE transport error:`, error);
|
||||
this.emitError(error, 'SSE transport error:');
|
||||
};
|
||||
|
||||
transport.onmessage = (message) => {
|
||||
this.logger?.info(
|
||||
`${this.getLogPrefix()} Message received: ${JSON.stringify(message)}`,
|
||||
);
|
||||
logger.info(`${this.getLogPrefix()} Message received: ${JSON.stringify(message)}`);
|
||||
};
|
||||
|
||||
this.setupTransportErrorHandlers(transport);
|
||||
|
|
@ -216,33 +228,38 @@ export class MCPConnection extends EventEmitter {
|
|||
if (!isStreamableHTTPOptions(options)) {
|
||||
throw new Error('Invalid options for streamable-http transport.');
|
||||
}
|
||||
this.url = options.url;
|
||||
const url = new URL(options.url);
|
||||
this.logger?.info(
|
||||
logger.info(
|
||||
`${this.getLogPrefix()} Creating streamable-http transport: ${url.toString()}`,
|
||||
);
|
||||
const abortController = new AbortController();
|
||||
|
||||
// Add OAuth token to headers if available
|
||||
const headers = { ...options.headers };
|
||||
if (this.oauthTokens?.access_token) {
|
||||
headers['Authorization'] = `Bearer ${this.oauthTokens.access_token}`;
|
||||
}
|
||||
|
||||
const transport = new StreamableHTTPClientTransport(url, {
|
||||
requestInit: {
|
||||
headers: options.headers,
|
||||
headers,
|
||||
signal: abortController.signal,
|
||||
},
|
||||
});
|
||||
|
||||
transport.onclose = () => {
|
||||
this.logger?.info(`${this.getLogPrefix()} Streamable-http transport closed`);
|
||||
logger.info(`${this.getLogPrefix()} Streamable-http transport closed`);
|
||||
this.emit('connectionChange', 'disconnected');
|
||||
};
|
||||
|
||||
transport.onerror = (error: Error | unknown) => {
|
||||
this.logger?.error(`${this.getLogPrefix()} Streamable-http transport error:`, error);
|
||||
logger.error(`${this.getLogPrefix()} Streamable-http transport error:`, error);
|
||||
this.emitError(error, 'Streamable-http transport error:');
|
||||
};
|
||||
|
||||
transport.onmessage = (message: JSONRPCMessage) => {
|
||||
this.logger?.info(
|
||||
`${this.getLogPrefix()} Message received: ${JSON.stringify(message)}`,
|
||||
);
|
||||
logger.info(`${this.getLogPrefix()} Message received: ${JSON.stringify(message)}`);
|
||||
};
|
||||
|
||||
this.setupTransportErrorHandlers(transport);
|
||||
|
|
@ -271,17 +288,17 @@ export class MCPConnection extends EventEmitter {
|
|||
/**
|
||||
* // FOR DEBUGGING
|
||||
* // this.client.setRequestHandler(PingRequestSchema, async (request, extra) => {
|
||||
* // this.logger?.info(`[MCP][${this.serverName}] PingRequest: ${JSON.stringify(request)}`);
|
||||
* // logger.info(`[MCP][${this.serverName}] PingRequest: ${JSON.stringify(request)}`);
|
||||
* // if (getEventListeners && extra.signal) {
|
||||
* // const listenerCount = getEventListeners(extra.signal, 'abort').length;
|
||||
* // this.logger?.debug(`Signal has ${listenerCount} abort listeners`);
|
||||
* // logger.debug(`Signal has ${listenerCount} abort listeners`);
|
||||
* // }
|
||||
* // return {};
|
||||
* // });
|
||||
*/
|
||||
} else if (state === 'error' && !this.isReconnecting && !this.isInitializing) {
|
||||
this.handleReconnection().catch((error) => {
|
||||
this.logger?.error(`${this.getLogPrefix()} Reconnection handler failed:`, error);
|
||||
logger.error(`${this.getLogPrefix()} Reconnection handler failed:`, error);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
@ -290,7 +307,15 @@ export class MCPConnection extends EventEmitter {
|
|||
}
|
||||
|
||||
private async handleReconnection(): Promise<void> {
|
||||
if (this.isReconnecting || this.shouldStopReconnecting || this.isInitializing) {
|
||||
if (
|
||||
this.isReconnecting ||
|
||||
this.shouldStopReconnecting ||
|
||||
this.isInitializing ||
|
||||
this.oauthRequired
|
||||
) {
|
||||
if (this.oauthRequired) {
|
||||
logger.info(`${this.getLogPrefix()} OAuth required, skipping reconnection attempts`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -305,7 +330,7 @@ export class MCPConnection extends EventEmitter {
|
|||
this.reconnectAttempts++;
|
||||
const delay = backoffDelay(this.reconnectAttempts);
|
||||
|
||||
this.logger?.info(
|
||||
logger.info(
|
||||
`${this.getLogPrefix()} Reconnecting ${this.reconnectAttempts}/${this.MAX_RECONNECT_ATTEMPTS} (delay: ${delay}ms)`,
|
||||
);
|
||||
|
||||
|
|
@ -316,13 +341,13 @@ export class MCPConnection extends EventEmitter {
|
|||
this.reconnectAttempts = 0;
|
||||
return;
|
||||
} catch (error) {
|
||||
this.logger?.error(`${this.getLogPrefix()} Reconnection attempt failed:`, error);
|
||||
logger.error(`${this.getLogPrefix()} Reconnection attempt failed:`, error);
|
||||
|
||||
if (
|
||||
this.reconnectAttempts === this.MAX_RECONNECT_ATTEMPTS ||
|
||||
(this.shouldStopReconnecting as boolean)
|
||||
) {
|
||||
this.logger?.error(`${this.getLogPrefix()} Stopping reconnection attempts`);
|
||||
logger.error(`${this.getLogPrefix()} Stopping reconnection attempts`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
@ -366,18 +391,21 @@ export class MCPConnection extends EventEmitter {
|
|||
await this.client.close();
|
||||
this.transport = null;
|
||||
} catch (error) {
|
||||
this.logger?.warn(`${this.getLogPrefix()} Error closing connection:`, error);
|
||||
logger.warn(`${this.getLogPrefix()} Error closing connection:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
this.transport = this.constructTransport(this.options);
|
||||
this.setupTransportDebugHandlers();
|
||||
|
||||
const connectTimeout = this.options.initTimeout ?? 10000;
|
||||
const connectTimeout = this.options.initTimeout ?? 120000;
|
||||
await Promise.race([
|
||||
this.client.connect(this.transport),
|
||||
new Promise((_resolve, reject) =>
|
||||
setTimeout(() => reject(new Error('Connection timeout')), connectTimeout),
|
||||
setTimeout(
|
||||
() => reject(new Error(`Connection timeout after ${connectTimeout}ms`)),
|
||||
connectTimeout,
|
||||
),
|
||||
),
|
||||
]);
|
||||
|
||||
|
|
@ -385,9 +413,85 @@ export class MCPConnection extends EventEmitter {
|
|||
this.emit('connectionChange', 'connected');
|
||||
this.reconnectAttempts = 0;
|
||||
} catch (error) {
|
||||
// Check if it's an OAuth authentication error
|
||||
if (this.isOAuthError(error)) {
|
||||
logger.warn(`${this.getLogPrefix()} OAuth authentication required`);
|
||||
this.oauthRequired = true;
|
||||
const serverUrl = this.url;
|
||||
logger.debug(`${this.getLogPrefix()} Server URL for OAuth: ${serverUrl}`);
|
||||
|
||||
const oauthTimeout = this.options.initTimeout ?? 60000;
|
||||
/** Promise that will resolve when OAuth is handled */
|
||||
const oauthHandledPromise = new Promise<void>((resolve, reject) => {
|
||||
let timeoutId: NodeJS.Timeout | null = null;
|
||||
let oauthHandledListener: (() => void) | null = null;
|
||||
let oauthFailedListener: ((error: Error) => void) | null = null;
|
||||
|
||||
/** Cleanup function to remove listeners and clear timeout */
|
||||
const cleanup = () => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
if (oauthHandledListener) {
|
||||
this.off('oauthHandled', oauthHandledListener);
|
||||
}
|
||||
if (oauthFailedListener) {
|
||||
this.off('oauthFailed', oauthFailedListener);
|
||||
}
|
||||
};
|
||||
|
||||
// Success handler
|
||||
oauthHandledListener = () => {
|
||||
cleanup();
|
||||
resolve();
|
||||
};
|
||||
|
||||
// Failure handler
|
||||
oauthFailedListener = (error: Error) => {
|
||||
cleanup();
|
||||
reject(error);
|
||||
};
|
||||
|
||||
// Timeout handler
|
||||
timeoutId = setTimeout(() => {
|
||||
cleanup();
|
||||
reject(new Error(`OAuth handling timeout after ${oauthTimeout}ms`));
|
||||
}, oauthTimeout);
|
||||
|
||||
// Listen for both success and failure events
|
||||
this.once('oauthHandled', oauthHandledListener);
|
||||
this.once('oauthFailed', oauthFailedListener);
|
||||
});
|
||||
|
||||
// Emit the event
|
||||
this.emit('oauthRequired', {
|
||||
serverName: this.serverName,
|
||||
error,
|
||||
serverUrl,
|
||||
userId: this.userId,
|
||||
});
|
||||
|
||||
try {
|
||||
// Wait for OAuth to be handled
|
||||
await oauthHandledPromise;
|
||||
// Reset the oauthRequired flag
|
||||
this.oauthRequired = false;
|
||||
// Don't throw the error - just return so connection can be retried
|
||||
logger.info(
|
||||
`${this.getLogPrefix()} OAuth handled successfully, connection will be retried`,
|
||||
);
|
||||
return;
|
||||
} catch (oauthError) {
|
||||
// OAuth failed or timed out
|
||||
this.oauthRequired = false;
|
||||
logger.error(`${this.getLogPrefix()} OAuth handling failed:`, oauthError);
|
||||
// Re-throw the original authentication error
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
this.connectionState = 'error';
|
||||
this.emit('connectionChange', 'error');
|
||||
this.lastError = error instanceof Error ? error : new Error(String(error));
|
||||
throw error;
|
||||
} finally {
|
||||
this.connectPromise = null;
|
||||
|
|
@ -403,7 +507,7 @@ export class MCPConnection extends EventEmitter {
|
|||
}
|
||||
|
||||
this.transport.onmessage = (msg) => {
|
||||
this.logger?.debug(`${this.getLogPrefix()} Transport received: ${JSON.stringify(msg)}`);
|
||||
logger.debug(`${this.getLogPrefix()} Transport received: ${JSON.stringify(msg)}`);
|
||||
};
|
||||
|
||||
const originalSend = this.transport.send.bind(this.transport);
|
||||
|
|
@ -414,7 +518,7 @@ export class MCPConnection extends EventEmitter {
|
|||
}
|
||||
this.lastPingTime = Date.now();
|
||||
}
|
||||
this.logger?.debug(`${this.getLogPrefix()} Transport sending: ${JSON.stringify(msg)}`);
|
||||
logger.debug(`${this.getLogPrefix()} Transport sending: ${JSON.stringify(msg)}`);
|
||||
return originalSend(msg);
|
||||
};
|
||||
}
|
||||
|
|
@ -427,14 +531,24 @@ export class MCPConnection extends EventEmitter {
|
|||
throw new Error('Connection not established');
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger?.error(`${this.getLogPrefix()} Connection failed:`, error);
|
||||
logger.error(`${this.getLogPrefix()} Connection failed:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private setupTransportErrorHandlers(transport: Transport): void {
|
||||
transport.onerror = (error) => {
|
||||
this.logger?.error(`${this.getLogPrefix()} Transport error:`, error);
|
||||
logger.error(`${this.getLogPrefix()} Transport error:`, error);
|
||||
|
||||
// Check if it's an OAuth authentication error
|
||||
if (error && typeof error === 'object' && 'code' in error) {
|
||||
const errorCode = (error as unknown as { code?: number }).code;
|
||||
if (errorCode === 401 || errorCode === 403) {
|
||||
logger.warn(`${this.getLogPrefix()} OAuth authentication error detected`);
|
||||
this.emit('oauthError', error);
|
||||
}
|
||||
}
|
||||
|
||||
this.emit('connectionChange', 'error');
|
||||
};
|
||||
}
|
||||
|
|
@ -562,22 +676,36 @@ export class MCPConnection extends EventEmitter {
|
|||
// }
|
||||
// }
|
||||
|
||||
// Public getters for state information
|
||||
public getConnectionState(): t.ConnectionState {
|
||||
return this.connectionState;
|
||||
}
|
||||
|
||||
public async isConnected(): Promise<boolean> {
|
||||
try {
|
||||
await this.client.ping();
|
||||
return this.connectionState === 'connected';
|
||||
} catch (error) {
|
||||
this.logger?.error(`${this.getLogPrefix()} Ping failed:`, error);
|
||||
logger.error(`${this.getLogPrefix()} Ping failed:`, error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public getLastError(): Error | null {
|
||||
return this.lastError;
|
||||
public setOAuthTokens(tokens: MCPOAuthTokens): void {
|
||||
this.oauthTokens = tokens;
|
||||
}
|
||||
|
||||
private isOAuthError(error: unknown): boolean {
|
||||
if (!error || typeof error !== 'object') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for SSE error with 401 status
|
||||
if ('message' in error && typeof error.message === 'string') {
|
||||
return error.message.includes('401') || error.message.includes('Non-200 status code (401)');
|
||||
}
|
||||
|
||||
// Check for error code
|
||||
if ('code' in error) {
|
||||
const code = (error as { code?: number }).code;
|
||||
return code === 401 || code === 403;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
9
packages/api/src/mcp/enum.ts
Normal file
9
packages/api/src/mcp/enum.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
export enum CONSTANTS {
|
||||
mcp_delimiter = '_mcp_',
|
||||
/** System user ID for app-level OAuth tokens (all zeros ObjectId) */
|
||||
SYSTEM_USER_ID = '000000000000000000000000',
|
||||
}
|
||||
|
||||
export function isSystemUserId(userId?: string): boolean {
|
||||
return userId === CONSTANTS.SYSTEM_USER_ID;
|
||||
}
|
||||
1099
packages/api/src/mcp/manager.ts
Normal file
1099
packages/api/src/mcp/manager.ts
Normal file
File diff suppressed because it is too large
Load diff
712
packages/api/src/mcp/mcp.spec.ts
Normal file
712
packages/api/src/mcp/mcp.spec.ts
Normal file
|
|
@ -0,0 +1,712 @@
|
|||
import {
|
||||
MCPOptions,
|
||||
StdioOptionsSchema,
|
||||
StreamableHTTPOptionsSchema,
|
||||
} from 'librechat-data-provider';
|
||||
import type { TUser } from 'librechat-data-provider';
|
||||
import { processMCPEnv } from '~/utils/env';
|
||||
|
||||
// Helper function to create test user objects
|
||||
function createTestUser(
|
||||
overrides: Partial<TUser> & Record<string, unknown> = {},
|
||||
): TUser & Record<string, unknown> {
|
||||
return {
|
||||
id: 'test-user-id',
|
||||
username: 'testuser',
|
||||
email: 'test@example.com',
|
||||
name: 'Test User',
|
||||
avatar: 'https://example.com/avatar.png',
|
||||
provider: 'email',
|
||||
role: 'user',
|
||||
createdAt: new Date('2021-01-01').toISOString(),
|
||||
updatedAt: new Date('2021-01-01').toISOString(),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('Environment Variable Extraction (MCP)', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
TEST_API_KEY: 'test-api-key-value',
|
||||
ANOTHER_SECRET: 'another-secret-value',
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
describe('StdioOptionsSchema', () => {
|
||||
it('should transform environment variables in the env field', () => {
|
||||
const options = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
API_KEY: '${TEST_API_KEY}',
|
||||
ANOTHER_KEY: '${ANOTHER_SECRET}',
|
||||
PLAIN_VALUE: 'plain-value',
|
||||
NON_EXISTENT: '${NON_EXISTENT_VAR}',
|
||||
},
|
||||
};
|
||||
|
||||
const result = StdioOptionsSchema.parse(options);
|
||||
|
||||
expect(result.env).toEqual({
|
||||
API_KEY: 'test-api-key-value',
|
||||
ANOTHER_KEY: 'another-secret-value',
|
||||
PLAIN_VALUE: 'plain-value',
|
||||
NON_EXISTENT: '${NON_EXISTENT_VAR}',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle undefined env field', () => {
|
||||
const options = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
};
|
||||
|
||||
const result = StdioOptionsSchema.parse(options);
|
||||
|
||||
expect(result.env).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('StreamableHTTPOptionsSchema', () => {
|
||||
it('should validate a valid streamable-http configuration', () => {
|
||||
const options = {
|
||||
type: 'streamable-http',
|
||||
url: 'https://example.com/api',
|
||||
headers: {
|
||||
Authorization: 'Bearer token',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
|
||||
const result = StreamableHTTPOptionsSchema.parse(options);
|
||||
|
||||
expect(result).toEqual(options);
|
||||
});
|
||||
|
||||
it('should reject websocket URLs', () => {
|
||||
const options = {
|
||||
type: 'streamable-http',
|
||||
url: 'ws://example.com/socket',
|
||||
};
|
||||
|
||||
expect(() => StreamableHTTPOptionsSchema.parse(options)).toThrow();
|
||||
});
|
||||
|
||||
it('should reject secure websocket URLs', () => {
|
||||
const options = {
|
||||
type: 'streamable-http',
|
||||
url: 'wss://example.com/socket',
|
||||
};
|
||||
|
||||
expect(() => StreamableHTTPOptionsSchema.parse(options)).toThrow();
|
||||
});
|
||||
|
||||
it('should require type field to be set explicitly', () => {
|
||||
const options = {
|
||||
url: 'https://example.com/api',
|
||||
};
|
||||
|
||||
// Type is now required, so parsing should fail
|
||||
expect(() => StreamableHTTPOptionsSchema.parse(options)).toThrow();
|
||||
|
||||
// With type provided, it should pass
|
||||
const validOptions = {
|
||||
type: 'streamable-http' as const,
|
||||
url: 'https://example.com/api',
|
||||
};
|
||||
|
||||
const result = StreamableHTTPOptionsSchema.parse(validOptions);
|
||||
expect(result.type).toBe('streamable-http');
|
||||
});
|
||||
|
||||
it('should validate headers as record of strings', () => {
|
||||
const options = {
|
||||
type: 'streamable-http',
|
||||
url: 'https://example.com/api',
|
||||
headers: {
|
||||
'X-API-Key': '123456',
|
||||
'User-Agent': 'MCP Client',
|
||||
},
|
||||
};
|
||||
|
||||
const result = StreamableHTTPOptionsSchema.parse(options);
|
||||
|
||||
expect(result.headers).toEqual(options.headers);
|
||||
});
|
||||
});
|
||||
|
||||
describe('processMCPEnv', () => {
|
||||
it('should create a deep clone of the input object', () => {
|
||||
const originalObj: MCPOptions = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
API_KEY: '${TEST_API_KEY}',
|
||||
PLAIN_VALUE: 'plain-value',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(originalObj);
|
||||
|
||||
// Verify it's not the same object reference
|
||||
expect(result).not.toBe(originalObj);
|
||||
|
||||
// Modify the result and ensure original is unchanged
|
||||
if ('env' in result && result.env) {
|
||||
result.env.API_KEY = 'modified-value';
|
||||
}
|
||||
|
||||
expect(originalObj.env?.API_KEY).toBe('${TEST_API_KEY}');
|
||||
});
|
||||
|
||||
it('should process environment variables in env field', () => {
|
||||
const obj: MCPOptions = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
API_KEY: '${TEST_API_KEY}',
|
||||
ANOTHER_KEY: '${ANOTHER_SECRET}',
|
||||
PLAIN_VALUE: 'plain-value',
|
||||
NON_EXISTENT: '${NON_EXISTENT_VAR}',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj);
|
||||
|
||||
expect('env' in result && result.env).toEqual({
|
||||
API_KEY: 'test-api-key-value',
|
||||
ANOTHER_KEY: 'another-secret-value',
|
||||
PLAIN_VALUE: 'plain-value',
|
||||
NON_EXISTENT: '${NON_EXISTENT_VAR}',
|
||||
});
|
||||
});
|
||||
|
||||
it('should process user ID in headers field', () => {
|
||||
const user = createTestUser({ id: 'test-user-123' });
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
Authorization: '${TEST_API_KEY}',
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user);
|
||||
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
Authorization: 'test-api-key-value',
|
||||
'User-Id': 'test-user-123',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle null or undefined input', () => {
|
||||
// @ts-ignore - Testing null/undefined handling
|
||||
expect(processMCPEnv(null)).toBeNull();
|
||||
// @ts-ignore - Testing null/undefined handling
|
||||
expect(processMCPEnv(undefined)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not modify objects without env or headers', () => {
|
||||
const obj: MCPOptions = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
timeout: 5000,
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj);
|
||||
|
||||
expect(result).toEqual(obj);
|
||||
expect(result).not.toBe(obj); // Still a different object (deep clone)
|
||||
});
|
||||
|
||||
it('should ensure different users with same starting config get separate values', () => {
|
||||
// Create a single base configuration
|
||||
const baseConfig: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'API-Key': '${TEST_API_KEY}',
|
||||
},
|
||||
};
|
||||
|
||||
// Process for two different users
|
||||
const user1 = createTestUser({ id: 'user-123' });
|
||||
const user2 = createTestUser({ id: 'user-456' });
|
||||
|
||||
const resultUser1 = processMCPEnv(baseConfig, user1);
|
||||
const resultUser2 = processMCPEnv(baseConfig, user2);
|
||||
|
||||
// Verify each has the correct user ID
|
||||
expect('headers' in resultUser1 && resultUser1.headers?.['User-Id']).toBe('user-123');
|
||||
expect('headers' in resultUser2 && resultUser2.headers?.['User-Id']).toBe('user-456');
|
||||
|
||||
// Verify they're different objects
|
||||
expect(resultUser1).not.toBe(resultUser2);
|
||||
|
||||
// Modify one result and ensure it doesn't affect the other
|
||||
if ('headers' in resultUser1 && resultUser1.headers) {
|
||||
resultUser1.headers['User-Id'] = 'modified-user';
|
||||
}
|
||||
|
||||
// Original config should be unchanged
|
||||
expect(baseConfig.headers?.['User-Id']).toBe('{{LIBRECHAT_USER_ID}}');
|
||||
|
||||
// Second user's config should be unchanged
|
||||
expect('headers' in resultUser2 && resultUser2.headers?.['User-Id']).toBe('user-456');
|
||||
});
|
||||
|
||||
it('should process headers in streamable-http options', () => {
|
||||
const user = createTestUser({ id: 'test-user-123' });
|
||||
const obj: MCPOptions = {
|
||||
type: 'streamable-http',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
Authorization: '${TEST_API_KEY}',
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user);
|
||||
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
Authorization: 'test-api-key-value',
|
||||
'User-Id': 'test-user-123',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should maintain streamable-http type in processed options', () => {
|
||||
const obj: MCPOptions = {
|
||||
type: 'streamable-http',
|
||||
url: 'https://example.com/api',
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj);
|
||||
|
||||
expect(result.type).toBe('streamable-http');
|
||||
});
|
||||
|
||||
it('should process dynamic user fields in headers', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
username: 'testuser',
|
||||
openidId: 'openid-123',
|
||||
googleId: 'google-456',
|
||||
emailVerified: true,
|
||||
role: 'admin',
|
||||
});
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
'User-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'User-Name': '{{LIBRECHAT_USER_USERNAME}}',
|
||||
OpenID: '{{LIBRECHAT_USER_OPENIDID}}',
|
||||
'Google-ID': '{{LIBRECHAT_USER_GOOGLEID}}',
|
||||
'Email-Verified': '{{LIBRECHAT_USER_EMAILVERIFIED}}',
|
||||
'User-Role': '{{LIBRECHAT_USER_ROLE}}',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user);
|
||||
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
'User-Email': 'test@example.com',
|
||||
'User-Name': 'testuser',
|
||||
OpenID: 'openid-123',
|
||||
'Google-ID': 'google-456',
|
||||
'Email-Verified': 'true',
|
||||
'User-Role': 'admin',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle missing user fields gracefully', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
username: undefined, // explicitly set to undefined to test missing field
|
||||
});
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
'User-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'User-Name': '{{LIBRECHAT_USER_USERNAME}}',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user);
|
||||
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
'User-Email': 'test@example.com',
|
||||
'User-Name': '', // Empty string for missing field
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should process user fields in env variables', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
ldapId: 'ldap-user-123',
|
||||
});
|
||||
const obj: MCPOptions = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
USER_EMAIL: '{{LIBRECHAT_USER_EMAIL}}',
|
||||
LDAP_ID: '{{LIBRECHAT_USER_LDAPID}}',
|
||||
API_KEY: '${TEST_API_KEY}',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user);
|
||||
|
||||
expect('env' in result && result.env).toEqual({
|
||||
USER_EMAIL: 'test@example.com',
|
||||
LDAP_ID: 'ldap-user-123',
|
||||
API_KEY: 'test-api-key-value',
|
||||
});
|
||||
});
|
||||
|
||||
it('should process user fields in URL', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
username: 'testuser',
|
||||
});
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com/api/{{LIBRECHAT_USER_USERNAME}}/stream',
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user);
|
||||
|
||||
expect('url' in result && result.url).toBe('https://example.com/api/testuser/stream');
|
||||
});
|
||||
|
||||
it('should handle boolean user fields', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
emailVerified: true,
|
||||
twoFactorEnabled: false,
|
||||
termsAccepted: true,
|
||||
});
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
'Email-Verified': '{{LIBRECHAT_USER_EMAILVERIFIED}}',
|
||||
'Two-Factor': '{{LIBRECHAT_USER_TWOFACTORENABLED}}',
|
||||
'Terms-Accepted': '{{LIBRECHAT_USER_TERMSACCEPTED}}',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user);
|
||||
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
'Email-Verified': 'true',
|
||||
'Two-Factor': 'false',
|
||||
'Terms-Accepted': 'true',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not process sensitive fields like password', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
password: 'secret-password',
|
||||
});
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
'User-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'User-Password': '{{LIBRECHAT_USER_PASSWORD}}', // This should not be processed
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user);
|
||||
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
'User-Email': 'test@example.com',
|
||||
'User-Password': '{{LIBRECHAT_USER_PASSWORD}}', // Unchanged
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multiple occurrences of the same placeholder', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
});
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
'Primary-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'Secondary-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'Backup-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user);
|
||||
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
'Primary-Email': 'test@example.com',
|
||||
'Secondary-Email': 'test@example.com',
|
||||
'Backup-Email': 'test@example.com',
|
||||
});
|
||||
});
|
||||
|
||||
it('should support both id and _id properties for LIBRECHAT_USER_ID', () => {
|
||||
// Test with 'id' property
|
||||
const userWithId = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
});
|
||||
const obj1: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
},
|
||||
};
|
||||
|
||||
const result1 = processMCPEnv(obj1, userWithId);
|
||||
expect('headers' in result1 && result1.headers?.['User-Id']).toBe('user-123');
|
||||
|
||||
// Test with '_id' property only (should not work since we only check 'id')
|
||||
const userWithUnderscore = createTestUser({
|
||||
id: undefined, // Remove default id to test _id
|
||||
_id: 'user-456',
|
||||
email: 'test@example.com',
|
||||
});
|
||||
const obj2: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
},
|
||||
};
|
||||
|
||||
const result2 = processMCPEnv(obj2, userWithUnderscore);
|
||||
// Since we don't check _id, the placeholder should remain unchanged
|
||||
expect('headers' in result2 && result2.headers?.['User-Id']).toBe('{{LIBRECHAT_USER_ID}}');
|
||||
|
||||
// Test with both properties (id takes precedence)
|
||||
const userWithBoth = createTestUser({
|
||||
id: 'user-789',
|
||||
_id: 'user-000',
|
||||
email: 'test@example.com',
|
||||
});
|
||||
const obj3: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
},
|
||||
};
|
||||
|
||||
const result3 = processMCPEnv(obj3, userWithBoth);
|
||||
expect('headers' in result3 && result3.headers?.['User-Id']).toBe('user-789');
|
||||
});
|
||||
|
||||
it('should process customUserVars in env field', () => {
|
||||
const user = createTestUser();
|
||||
const customUserVars = {
|
||||
CUSTOM_VAR_1: 'custom-value-1',
|
||||
CUSTOM_VAR_2: 'custom-value-2',
|
||||
};
|
||||
const obj: MCPOptions = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
VAR_A: '{{CUSTOM_VAR_1}}',
|
||||
VAR_B: 'Value with {{CUSTOM_VAR_2}}',
|
||||
VAR_C: '${TEST_API_KEY}',
|
||||
VAR_D: '{{LIBRECHAT_USER_EMAIL}}',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user, customUserVars);
|
||||
|
||||
expect('env' in result && result.env).toEqual({
|
||||
VAR_A: 'custom-value-1',
|
||||
VAR_B: 'Value with custom-value-2',
|
||||
VAR_C: 'test-api-key-value',
|
||||
VAR_D: 'test@example.com',
|
||||
});
|
||||
});
|
||||
|
||||
it('should process customUserVars in headers field', () => {
|
||||
const user = createTestUser();
|
||||
const customUserVars = {
|
||||
USER_TOKEN: 'user-specific-token',
|
||||
REGION: 'us-west-1',
|
||||
};
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com/api',
|
||||
headers: {
|
||||
Authorization: 'Bearer {{USER_TOKEN}}',
|
||||
'X-Region': '{{REGION}}',
|
||||
'X-System-Key': '${TEST_API_KEY}',
|
||||
'X-User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user, customUserVars);
|
||||
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
Authorization: 'Bearer user-specific-token',
|
||||
'X-Region': 'us-west-1',
|
||||
'X-System-Key': 'test-api-key-value',
|
||||
'X-User-Id': 'test-user-id',
|
||||
});
|
||||
});
|
||||
|
||||
it('should process customUserVars in URL field', () => {
|
||||
const user = createTestUser();
|
||||
const customUserVars = {
|
||||
API_VERSION: 'v2',
|
||||
TENANT_ID: 'tenant123',
|
||||
};
|
||||
const obj: MCPOptions = {
|
||||
type: 'websocket',
|
||||
url: 'wss://example.com/{{TENANT_ID}}/api/{{API_VERSION}}?user={{LIBRECHAT_USER_ID}}&key=${TEST_API_KEY}',
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user, customUserVars);
|
||||
|
||||
expect('url' in result && result.url).toBe(
|
||||
'wss://example.com/tenant123/api/v2?user=test-user-id&key=test-api-key-value',
|
||||
);
|
||||
});
|
||||
|
||||
it('should prioritize customUserVars over user fields and system env vars if placeholders are the same (though not recommended)', () => {
|
||||
// This tests the order of operations: customUserVars -> userFields -> systemEnv
|
||||
// BUt it's generally not recommended to have overlapping placeholder names.
|
||||
process.env.LIBRECHAT_USER_EMAIL = 'system-email-should-be-overridden';
|
||||
const user = createTestUser({ email: 'user-email-should-be-overridden' });
|
||||
const customUserVars = {
|
||||
LIBRECHAT_USER_EMAIL: 'custom-email-wins',
|
||||
};
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com/api',
|
||||
headers: {
|
||||
'Test-Email': '{{LIBRECHAT_USER_EMAIL}}', // Placeholder that could match custom, user, or system
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user, customUserVars);
|
||||
expect('headers' in result && result.headers?.['Test-Email']).toBe('custom-email-wins');
|
||||
|
||||
// Clean up env var
|
||||
delete process.env.LIBRECHAT_USER_EMAIL;
|
||||
});
|
||||
|
||||
it('should handle customUserVars with no matching placeholders', () => {
|
||||
const user = createTestUser();
|
||||
const customUserVars = {
|
||||
UNUSED_VAR: 'unused-value',
|
||||
};
|
||||
const obj: MCPOptions = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
API_KEY: '${TEST_API_KEY}',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user, customUserVars);
|
||||
expect('env' in result && result.env).toEqual({
|
||||
API_KEY: 'test-api-key-value',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle placeholders with no matching customUserVars (falling back to user/system vars)', () => {
|
||||
const user = createTestUser({ email: 'user-provided-email@example.com' });
|
||||
// No customUserVars provided or customUserVars is empty
|
||||
const customUserVars = {};
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com/api',
|
||||
headers: {
|
||||
'User-Email-Header': '{{LIBRECHAT_USER_EMAIL}}', // Should use user.email
|
||||
'System-Key-Header': '${TEST_API_KEY}', // Should use process.env.TEST_API_KEY
|
||||
'Non-Existent-Custom': '{{NON_EXISTENT_CUSTOM_VAR}}', // Should remain as placeholder
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, user, customUserVars);
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
'User-Email-Header': 'user-provided-email@example.com',
|
||||
'System-Key-Header': 'test-api-key-value',
|
||||
'Non-Existent-Custom': '{{NON_EXISTENT_CUSTOM_VAR}}',
|
||||
});
|
||||
});
|
||||
|
||||
it('should correctly process a mix of all variable types', () => {
|
||||
const user = createTestUser({ id: 'userXYZ', username: 'john.doe' });
|
||||
const customUserVars = {
|
||||
CUSTOM_ENDPOINT_ID: 'ep123',
|
||||
ANOTHER_CUSTOM: 'another_val',
|
||||
};
|
||||
|
||||
const obj = {
|
||||
type: 'streamable-http' as const,
|
||||
url: 'https://{{CUSTOM_ENDPOINT_ID}}.example.com/users/{{LIBRECHAT_USER_USERNAME}}',
|
||||
headers: {
|
||||
'X-Auth-Token': '{{CUSTOM_TOKEN_FROM_USER_SETTINGS}}', // Assuming this would be a custom var
|
||||
'X-User-ID': '{{LIBRECHAT_USER_ID}}',
|
||||
'X-System-Test-Key': '${TEST_API_KEY}', // Using existing env var from beforeEach
|
||||
},
|
||||
env: {
|
||||
PROCESS_MODE: '{{PROCESS_MODE_CUSTOM}}', // Another custom var
|
||||
USER_HOME_DIR: '/home/{{LIBRECHAT_USER_USERNAME}}',
|
||||
SYSTEM_PATH: '${PATH}', // Example of a system env var
|
||||
},
|
||||
};
|
||||
|
||||
// Simulate customUserVars that would be passed, including those for headers and env
|
||||
const allCustomVarsForCall = {
|
||||
...customUserVars,
|
||||
CUSTOM_TOKEN_FROM_USER_SETTINGS: 'secretToken123!',
|
||||
PROCESS_MODE_CUSTOM: 'production',
|
||||
};
|
||||
|
||||
// Cast obj to MCPOptions when calling processMCPEnv.
|
||||
// This acknowledges the object might not strictly conform to one schema in the union,
|
||||
// but we are testing the function's ability to handle these properties if present.
|
||||
const result = processMCPEnv(obj as MCPOptions, user, allCustomVarsForCall);
|
||||
|
||||
expect('url' in result && result.url).toBe('https://ep123.example.com/users/john.doe');
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
'X-Auth-Token': 'secretToken123!',
|
||||
'X-User-ID': 'userXYZ',
|
||||
'X-System-Test-Key': 'test-api-key-value', // Expecting value of TEST_API_KEY
|
||||
});
|
||||
expect('env' in result && result.env).toEqual({
|
||||
PROCESS_MODE: 'production',
|
||||
USER_HOME_DIR: '/home/john.doe',
|
||||
SYSTEM_PATH: process.env.PATH, // Actual value of PATH from the test environment
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
603
packages/api/src/mcp/oauth/handler.ts
Normal file
603
packages/api/src/mcp/oauth/handler.ts
Normal file
|
|
@ -0,0 +1,603 @@
|
|||
import { randomBytes } from 'crypto';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import {
|
||||
discoverOAuthMetadata,
|
||||
registerClient,
|
||||
startAuthorization,
|
||||
exchangeAuthorization,
|
||||
discoverOAuthProtectedResourceMetadata,
|
||||
} from '@modelcontextprotocol/sdk/client/auth.js';
|
||||
import { OAuthMetadataSchema } from '@modelcontextprotocol/sdk/shared/auth.js';
|
||||
import type { MCPOptions } from 'librechat-data-provider';
|
||||
import type { FlowStateManager } from '~/flow/manager';
|
||||
import type {
|
||||
OAuthClientInformation,
|
||||
OAuthProtectedResourceMetadata,
|
||||
MCPOAuthFlowMetadata,
|
||||
MCPOAuthTokens,
|
||||
OAuthMetadata,
|
||||
} from './types';
|
||||
|
||||
/** Type for the OAuth metadata from the SDK */
|
||||
type SDKOAuthMetadata = Parameters<typeof registerClient>[1]['metadata'];
|
||||
|
||||
export class MCPOAuthHandler {
|
||||
private static readonly FLOW_TYPE = 'mcp_oauth';
|
||||
private static readonly FLOW_TTL = 10 * 60 * 1000; // 10 minutes
|
||||
|
||||
/**
|
||||
* Discovers OAuth metadata from the server
|
||||
*/
|
||||
private static async discoverMetadata(serverUrl: string): Promise<{
|
||||
metadata: OAuthMetadata;
|
||||
resourceMetadata?: OAuthProtectedResourceMetadata;
|
||||
authServerUrl: URL;
|
||||
}> {
|
||||
logger.debug(`[MCPOAuth] discoverMetadata called with serverUrl: ${serverUrl}`);
|
||||
|
||||
let authServerUrl = new URL(serverUrl);
|
||||
let resourceMetadata: OAuthProtectedResourceMetadata | undefined;
|
||||
|
||||
try {
|
||||
// Try to discover resource metadata first
|
||||
logger.debug(
|
||||
`[MCPOAuth] Attempting to discover protected resource metadata from ${serverUrl}`,
|
||||
);
|
||||
resourceMetadata = await discoverOAuthProtectedResourceMetadata(serverUrl);
|
||||
|
||||
if (resourceMetadata?.authorization_servers?.length) {
|
||||
authServerUrl = new URL(resourceMetadata.authorization_servers[0]);
|
||||
logger.debug(
|
||||
`[MCPOAuth] Found authorization server from resource metadata: ${authServerUrl}`,
|
||||
);
|
||||
} else {
|
||||
logger.debug(`[MCPOAuth] No authorization servers found in resource metadata`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.debug('[MCPOAuth] Resource metadata discovery failed, continuing with server URL', {
|
||||
error,
|
||||
});
|
||||
}
|
||||
|
||||
// Discover OAuth metadata
|
||||
logger.debug(`[MCPOAuth] Discovering OAuth metadata from ${authServerUrl}`);
|
||||
const rawMetadata = await discoverOAuthMetadata(authServerUrl);
|
||||
|
||||
if (!rawMetadata) {
|
||||
logger.error(`[MCPOAuth] Failed to discover OAuth metadata from ${authServerUrl}`);
|
||||
throw new Error('Failed to discover OAuth metadata');
|
||||
}
|
||||
|
||||
logger.debug(`[MCPOAuth] OAuth metadata discovered successfully`);
|
||||
const metadata = await OAuthMetadataSchema.parseAsync(rawMetadata);
|
||||
|
||||
logger.debug(`[MCPOAuth] OAuth metadata parsed successfully`);
|
||||
return {
|
||||
metadata: metadata as unknown as OAuthMetadata,
|
||||
resourceMetadata,
|
||||
authServerUrl,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers an OAuth client dynamically
|
||||
*/
|
||||
private static async registerOAuthClient(
|
||||
serverUrl: string,
|
||||
metadata: OAuthMetadata,
|
||||
resourceMetadata?: OAuthProtectedResourceMetadata,
|
||||
redirectUri?: string,
|
||||
): Promise<OAuthClientInformation> {
|
||||
logger.debug(`[MCPOAuth] Starting client registration for ${serverUrl}, server metadata:`, {
|
||||
grant_types_supported: metadata.grant_types_supported,
|
||||
response_types_supported: metadata.response_types_supported,
|
||||
token_endpoint_auth_methods_supported: metadata.token_endpoint_auth_methods_supported,
|
||||
scopes_supported: metadata.scopes_supported,
|
||||
});
|
||||
|
||||
/** Client metadata based on what the server supports */
|
||||
const clientMetadata = {
|
||||
client_name: 'LibreChat MCP Client',
|
||||
redirect_uris: [redirectUri || this.getDefaultRedirectUri()],
|
||||
grant_types: ['authorization_code'] as string[],
|
||||
response_types: ['code'] as string[],
|
||||
token_endpoint_auth_method: 'client_secret_basic',
|
||||
scope: undefined as string | undefined,
|
||||
};
|
||||
|
||||
const supportedGrantTypes = metadata.grant_types_supported || ['authorization_code'];
|
||||
const requestedGrantTypes = ['authorization_code'];
|
||||
|
||||
if (supportedGrantTypes.includes('refresh_token')) {
|
||||
requestedGrantTypes.push('refresh_token');
|
||||
logger.debug(
|
||||
`[MCPOAuth] Server ${serverUrl} supports \`refresh_token\` grant type, adding to request`,
|
||||
);
|
||||
} else {
|
||||
logger.debug(`[MCPOAuth] Server ${serverUrl} does not support \`refresh_token\` grant type`);
|
||||
}
|
||||
clientMetadata.grant_types = requestedGrantTypes;
|
||||
|
||||
clientMetadata.response_types = metadata.response_types_supported || ['code'];
|
||||
|
||||
if (metadata.token_endpoint_auth_methods_supported) {
|
||||
// Prefer client_secret_basic if supported, otherwise use the first supported method
|
||||
if (metadata.token_endpoint_auth_methods_supported.includes('client_secret_basic')) {
|
||||
clientMetadata.token_endpoint_auth_method = 'client_secret_basic';
|
||||
} else if (metadata.token_endpoint_auth_methods_supported.includes('client_secret_post')) {
|
||||
clientMetadata.token_endpoint_auth_method = 'client_secret_post';
|
||||
} else if (metadata.token_endpoint_auth_methods_supported.includes('none')) {
|
||||
clientMetadata.token_endpoint_auth_method = 'none';
|
||||
} else {
|
||||
clientMetadata.token_endpoint_auth_method =
|
||||
metadata.token_endpoint_auth_methods_supported[0];
|
||||
}
|
||||
}
|
||||
|
||||
const availableScopes = resourceMetadata?.scopes_supported || metadata.scopes_supported;
|
||||
if (availableScopes) {
|
||||
clientMetadata.scope = availableScopes.join(' ');
|
||||
}
|
||||
|
||||
logger.debug(`[MCPOAuth] Registering client for ${serverUrl} with metadata:`, clientMetadata);
|
||||
|
||||
const clientInfo = await registerClient(serverUrl, {
|
||||
metadata: metadata as unknown as SDKOAuthMetadata,
|
||||
clientMetadata,
|
||||
});
|
||||
|
||||
logger.debug(`[MCPOAuth] Client registered successfully for ${serverUrl}:`, {
|
||||
client_id: clientInfo.client_id,
|
||||
has_client_secret: !!clientInfo.client_secret,
|
||||
grant_types: clientInfo.grant_types,
|
||||
scope: clientInfo.scope,
|
||||
});
|
||||
|
||||
return clientInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initiates the OAuth flow for an MCP server
|
||||
*/
|
||||
static async initiateOAuthFlow(
|
||||
serverName: string,
|
||||
serverUrl: string,
|
||||
userId: string,
|
||||
config: MCPOptions['oauth'] | undefined,
|
||||
): Promise<{ authorizationUrl: string; flowId: string; flowMetadata: MCPOAuthFlowMetadata }> {
|
||||
logger.debug(`[MCPOAuth] initiateOAuthFlow called for ${serverName} with URL: ${serverUrl}`);
|
||||
|
||||
const flowId = this.generateFlowId(userId, serverName);
|
||||
const state = this.generateState();
|
||||
|
||||
logger.debug(`[MCPOAuth] Generated flowId: ${flowId}, state: ${state}`);
|
||||
|
||||
try {
|
||||
// Check if we have pre-configured OAuth settings
|
||||
if (config?.authorization_url && config?.token_url && config?.client_id) {
|
||||
logger.debug(`[MCPOAuth] Using pre-configured OAuth settings for ${serverName}`);
|
||||
/** Metadata based on pre-configured settings */
|
||||
const metadata: OAuthMetadata = {
|
||||
authorization_endpoint: config.authorization_url,
|
||||
token_endpoint: config.token_url,
|
||||
issuer: serverUrl,
|
||||
scopes_supported: config.scope?.split(' '),
|
||||
};
|
||||
|
||||
const clientInfo: OAuthClientInformation = {
|
||||
client_id: config.client_id,
|
||||
client_secret: config.client_secret,
|
||||
redirect_uris: [config.redirect_uri || this.getDefaultRedirectUri(serverName)],
|
||||
scope: config.scope,
|
||||
};
|
||||
|
||||
logger.debug(`[MCPOAuth] Starting authorization with pre-configured settings`);
|
||||
const { authorizationUrl, codeVerifier } = await startAuthorization(serverUrl, {
|
||||
metadata: metadata as unknown as SDKOAuthMetadata,
|
||||
clientInformation: clientInfo,
|
||||
redirectUrl: clientInfo.redirect_uris?.[0] || this.getDefaultRedirectUri(serverName),
|
||||
scope: config.scope,
|
||||
});
|
||||
|
||||
/** Add state parameter with flowId to the authorization URL */
|
||||
authorizationUrl.searchParams.set('state', flowId);
|
||||
logger.debug(`[MCPOAuth] Added state parameter to authorization URL`);
|
||||
|
||||
const flowMetadata: MCPOAuthFlowMetadata = {
|
||||
serverName,
|
||||
userId,
|
||||
serverUrl,
|
||||
state,
|
||||
codeVerifier,
|
||||
clientInfo,
|
||||
metadata,
|
||||
};
|
||||
|
||||
logger.debug(`[MCPOAuth] Authorization URL generated: ${authorizationUrl.toString()}`);
|
||||
return {
|
||||
authorizationUrl: authorizationUrl.toString(),
|
||||
flowId,
|
||||
flowMetadata,
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug(`[MCPOAuth] Starting auto-discovery of OAuth metadata from ${serverUrl}`);
|
||||
const { metadata, resourceMetadata, authServerUrl } = await this.discoverMetadata(serverUrl);
|
||||
|
||||
logger.debug(`[MCPOAuth] OAuth metadata discovered, auth server URL: ${authServerUrl}`);
|
||||
|
||||
/** Dynamic client registration based on the discovered metadata */
|
||||
const redirectUri = config?.redirect_uri || this.getDefaultRedirectUri(serverName);
|
||||
logger.debug(`[MCPOAuth] Registering OAuth client with redirect URI: ${redirectUri}`);
|
||||
|
||||
const clientInfo = await this.registerOAuthClient(
|
||||
authServerUrl.toString(),
|
||||
metadata,
|
||||
resourceMetadata,
|
||||
redirectUri,
|
||||
);
|
||||
|
||||
logger.debug(`[MCPOAuth] Client registered with ID: ${clientInfo.client_id}`);
|
||||
|
||||
/** Authorization Scope */
|
||||
const scope =
|
||||
config?.scope ||
|
||||
resourceMetadata?.scopes_supported?.join(' ') ||
|
||||
metadata.scopes_supported?.join(' ');
|
||||
|
||||
logger.debug(`[MCPOAuth] Starting authorization with scope: ${scope}`);
|
||||
|
||||
let authorizationUrl: URL;
|
||||
let codeVerifier: string;
|
||||
|
||||
try {
|
||||
logger.debug(`[MCPOAuth] Calling startAuthorization...`);
|
||||
const authResult = await startAuthorization(serverUrl, {
|
||||
metadata: metadata as unknown as SDKOAuthMetadata,
|
||||
clientInformation: clientInfo,
|
||||
redirectUrl: redirectUri,
|
||||
scope,
|
||||
});
|
||||
|
||||
authorizationUrl = authResult.authorizationUrl;
|
||||
codeVerifier = authResult.codeVerifier;
|
||||
|
||||
logger.debug(`[MCPOAuth] startAuthorization completed successfully`);
|
||||
logger.debug(`[MCPOAuth] Authorization URL: ${authorizationUrl.toString()}`);
|
||||
|
||||
/** Add state parameter with flowId to the authorization URL */
|
||||
authorizationUrl.searchParams.set('state', flowId);
|
||||
logger.debug(`[MCPOAuth] Added state parameter to authorization URL`);
|
||||
} catch (error) {
|
||||
logger.error(`[MCPOAuth] startAuthorization failed:`, error);
|
||||
throw error;
|
||||
}
|
||||
|
||||
const flowMetadata: MCPOAuthFlowMetadata = {
|
||||
serverName,
|
||||
userId,
|
||||
serverUrl,
|
||||
state,
|
||||
codeVerifier,
|
||||
clientInfo,
|
||||
metadata,
|
||||
resourceMetadata,
|
||||
};
|
||||
|
||||
logger.debug(
|
||||
`[MCPOAuth] Authorization URL generated for ${serverName}: ${authorizationUrl.toString()}`,
|
||||
);
|
||||
|
||||
const result = {
|
||||
authorizationUrl: authorizationUrl.toString(),
|
||||
flowId,
|
||||
flowMetadata,
|
||||
};
|
||||
|
||||
logger.debug(
|
||||
`[MCPOAuth] Returning from initiateOAuthFlow with result ${flowId} for ${serverName}`,
|
||||
result,
|
||||
);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[MCPOAuth] Failed to initiate OAuth flow', { error, serverName, userId });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Completes the OAuth flow by exchanging the authorization code for tokens
|
||||
*/
|
||||
static async completeOAuthFlow(
|
||||
flowId: string,
|
||||
authorizationCode: string,
|
||||
flowManager: FlowStateManager<MCPOAuthTokens>,
|
||||
): Promise<MCPOAuthTokens> {
|
||||
try {
|
||||
/** Flow state which contains our metadata */
|
||||
const flowState = await flowManager.getFlowState(flowId, this.FLOW_TYPE);
|
||||
if (!flowState) {
|
||||
throw new Error('OAuth flow not found');
|
||||
}
|
||||
|
||||
const flowMetadata = flowState.metadata as MCPOAuthFlowMetadata;
|
||||
if (!flowMetadata) {
|
||||
throw new Error('OAuth flow metadata not found');
|
||||
}
|
||||
|
||||
const metadata = flowMetadata;
|
||||
if (!metadata.metadata || !metadata.clientInfo || !metadata.codeVerifier) {
|
||||
throw new Error('Invalid flow metadata');
|
||||
}
|
||||
|
||||
const tokens = await exchangeAuthorization(metadata.serverUrl, {
|
||||
metadata: metadata.metadata as unknown as SDKOAuthMetadata,
|
||||
clientInformation: metadata.clientInfo,
|
||||
authorizationCode,
|
||||
codeVerifier: metadata.codeVerifier,
|
||||
redirectUri: metadata.clientInfo.redirect_uris?.[0] || this.getDefaultRedirectUri(),
|
||||
});
|
||||
|
||||
logger.debug('[MCPOAuth] Raw tokens from exchange:', {
|
||||
access_token: tokens.access_token ? '[REDACTED]' : undefined,
|
||||
refresh_token: tokens.refresh_token ? '[REDACTED]' : undefined,
|
||||
expires_in: tokens.expires_in,
|
||||
token_type: tokens.token_type,
|
||||
scope: tokens.scope,
|
||||
});
|
||||
|
||||
const mcpTokens: MCPOAuthTokens = {
|
||||
...tokens,
|
||||
obtained_at: Date.now(),
|
||||
expires_at: tokens.expires_in ? Date.now() + tokens.expires_in * 1000 : undefined,
|
||||
};
|
||||
|
||||
/** Now complete the flow with the tokens */
|
||||
await flowManager.completeFlow(flowId, this.FLOW_TYPE, mcpTokens);
|
||||
|
||||
return mcpTokens;
|
||||
} catch (error) {
|
||||
logger.error('[MCPOAuth] Failed to complete OAuth flow', { error, flowId });
|
||||
await flowManager.failFlow(flowId, this.FLOW_TYPE, error as Error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the OAuth flow metadata
|
||||
*/
|
||||
static async getFlowState(
|
||||
flowId: string,
|
||||
flowManager: FlowStateManager<MCPOAuthTokens>,
|
||||
): Promise<MCPOAuthFlowMetadata | null> {
|
||||
const flowState = await flowManager.getFlowState(flowId, this.FLOW_TYPE);
|
||||
if (!flowState) {
|
||||
return null;
|
||||
}
|
||||
return flowState.metadata as MCPOAuthFlowMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a flow ID for the OAuth flow
|
||||
* @returns Consistent ID so concurrent requests share the same flow
|
||||
*/
|
||||
public static generateFlowId(userId: string, serverName: string): string {
|
||||
return `${userId}:${serverName}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a secure state parameter
|
||||
*/
|
||||
private static generateState(): string {
|
||||
return randomBytes(32).toString('base64url');
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the default redirect URI for a server
|
||||
*/
|
||||
private static getDefaultRedirectUri(serverName?: string): string {
|
||||
const baseUrl = process.env.DOMAIN_SERVER || 'http://localhost:3080';
|
||||
return serverName
|
||||
? `${baseUrl}/api/mcp/${serverName}/oauth/callback`
|
||||
: `${baseUrl}/api/mcp/oauth/callback`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Refreshes OAuth tokens using a refresh token
|
||||
*/
|
||||
static async refreshOAuthTokens(
|
||||
refreshToken: string,
|
||||
metadata: { serverName: string; serverUrl?: string; clientInfo?: OAuthClientInformation },
|
||||
config?: MCPOptions['oauth'],
|
||||
): Promise<MCPOAuthTokens> {
|
||||
logger.debug(`[MCPOAuth] Refreshing tokens for ${metadata.serverName}`);
|
||||
|
||||
try {
|
||||
/** If we have stored client information from the original flow, use that first */
|
||||
if (metadata.clientInfo?.client_id) {
|
||||
logger.debug(
|
||||
`[MCPOAuth] Using stored client information for token refresh for ${metadata.serverName}`,
|
||||
);
|
||||
logger.debug(
|
||||
`[MCPOAuth] Client ID: ${metadata.clientInfo.client_id} for ${metadata.serverName}`,
|
||||
);
|
||||
logger.debug(
|
||||
`[MCPOAuth] Has client secret: ${!!metadata.clientInfo.client_secret} for ${metadata.serverName}`,
|
||||
);
|
||||
logger.debug(`[MCPOAuth] Stored client info for ${metadata.serverName}:`, {
|
||||
client_id: metadata.clientInfo.client_id,
|
||||
has_client_secret: !!metadata.clientInfo.client_secret,
|
||||
grant_types: metadata.clientInfo.grant_types,
|
||||
scope: metadata.clientInfo.scope,
|
||||
});
|
||||
|
||||
/** Use the stored client information and metadata to determine the token URL */
|
||||
let tokenUrl: string;
|
||||
if (config?.token_url) {
|
||||
tokenUrl = config.token_url;
|
||||
} else if (!metadata.serverUrl) {
|
||||
throw new Error('No token URL available for refresh');
|
||||
} else {
|
||||
/** Auto-discover OAuth configuration for refresh */
|
||||
const { metadata: oauthMetadata } = await this.discoverMetadata(metadata.serverUrl);
|
||||
if (!oauthMetadata.token_endpoint) {
|
||||
throw new Error('No token endpoint found in OAuth metadata');
|
||||
}
|
||||
tokenUrl = oauthMetadata.token_endpoint;
|
||||
}
|
||||
|
||||
const body = new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
});
|
||||
|
||||
/** Add scope if available */
|
||||
if (metadata.clientInfo.scope) {
|
||||
body.append('scope', metadata.clientInfo.scope);
|
||||
}
|
||||
|
||||
const headers: HeadersInit = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
Accept: 'application/json',
|
||||
};
|
||||
|
||||
/** Use client_secret for authentication if available */
|
||||
if (metadata.clientInfo.client_secret) {
|
||||
const clientAuth = Buffer.from(
|
||||
`${metadata.clientInfo.client_id}:${metadata.clientInfo.client_secret}`,
|
||||
).toString('base64');
|
||||
headers['Authorization'] = `Basic ${clientAuth}`;
|
||||
} else {
|
||||
/** For public clients, client_id must be in the body */
|
||||
body.append('client_id', metadata.clientInfo.client_id);
|
||||
}
|
||||
|
||||
logger.debug(`[MCPOAuth] Refresh request to: ${tokenUrl}`, {
|
||||
body: body.toString(),
|
||||
headers,
|
||||
});
|
||||
|
||||
const response = await fetch(tokenUrl, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(
|
||||
`Token refresh failed: ${response.status} ${response.statusText} - ${errorText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const tokens = await response.json();
|
||||
|
||||
return {
|
||||
...tokens,
|
||||
obtained_at: Date.now(),
|
||||
expires_at: tokens.expires_in ? Date.now() + tokens.expires_in * 1000 : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback: If we have pre-configured OAuth settings, use them
|
||||
if (config?.token_url && config?.client_id) {
|
||||
logger.debug(`[MCPOAuth] Using pre-configured OAuth settings for token refresh`);
|
||||
|
||||
const tokenUrl = new URL(config.token_url);
|
||||
const clientAuth = config.client_secret
|
||||
? Buffer.from(`${config.client_id}:${config.client_secret}`).toString('base64')
|
||||
: null;
|
||||
|
||||
const body = new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
});
|
||||
|
||||
if (config.scope) {
|
||||
body.append('scope', config.scope);
|
||||
}
|
||||
|
||||
const headers: HeadersInit = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
Accept: 'application/json',
|
||||
};
|
||||
|
||||
if (clientAuth) {
|
||||
headers['Authorization'] = `Basic ${clientAuth}`;
|
||||
} else {
|
||||
// Use client_id in body for public clients
|
||||
body.append('client_id', config.client_id);
|
||||
}
|
||||
|
||||
const response = await fetch(tokenUrl, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(
|
||||
`Token refresh failed: ${response.status} ${response.statusText} - ${errorText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const tokens = await response.json();
|
||||
|
||||
return {
|
||||
...tokens,
|
||||
obtained_at: Date.now(),
|
||||
expires_at: tokens.expires_in ? Date.now() + tokens.expires_in * 1000 : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/** For auto-discovered OAuth, we need the server URL */
|
||||
if (!metadata.serverUrl) {
|
||||
throw new Error('Server URL required for auto-discovered OAuth token refresh');
|
||||
}
|
||||
|
||||
/** Auto-discover OAuth configuration for refresh */
|
||||
const { metadata: oauthMetadata } = await this.discoverMetadata(metadata.serverUrl);
|
||||
|
||||
if (!oauthMetadata.token_endpoint) {
|
||||
throw new Error('No token endpoint found in OAuth metadata');
|
||||
}
|
||||
|
||||
const tokenUrl = new URL(oauthMetadata.token_endpoint);
|
||||
|
||||
const body = new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token: refreshToken,
|
||||
});
|
||||
|
||||
const headers: HeadersInit = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
Accept: 'application/json',
|
||||
};
|
||||
|
||||
const response = await fetch(tokenUrl, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(
|
||||
`Token refresh failed: ${response.status} ${response.statusText} - ${errorText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const tokens = await response.json();
|
||||
|
||||
return {
|
||||
...tokens,
|
||||
obtained_at: Date.now(),
|
||||
expires_at: tokens.expires_in ? Date.now() + tokens.expires_in * 1000 : undefined,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`[MCPOAuth] Failed to refresh tokens for ${metadata.serverName}`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
3
packages/api/src/mcp/oauth/index.ts
Normal file
3
packages/api/src/mcp/oauth/index.ts
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
export * from './types';
|
||||
export * from './handler';
|
||||
export * from './tokens';
|
||||
382
packages/api/src/mcp/oauth/tokens.ts
Normal file
382
packages/api/src/mcp/oauth/tokens.ts
Normal file
|
|
@ -0,0 +1,382 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import type { OAuthTokens, OAuthClientInformation } from '@modelcontextprotocol/sdk/shared/auth.js';
|
||||
import type { TokenMethods, IToken } from '@librechat/data-schemas';
|
||||
import type { MCPOAuthTokens, ExtendedOAuthTokens } from './types';
|
||||
import { encryptV2, decryptV2 } from '~/crypto';
|
||||
import { isSystemUserId } from '~/mcp/enum';
|
||||
|
||||
interface StoreTokensParams {
|
||||
userId: string;
|
||||
serverName: string;
|
||||
tokens: OAuthTokens | ExtendedOAuthTokens | MCPOAuthTokens;
|
||||
createToken: TokenMethods['createToken'];
|
||||
updateToken?: TokenMethods['updateToken'];
|
||||
findToken?: TokenMethods['findToken'];
|
||||
clientInfo?: OAuthClientInformation;
|
||||
/** Optional: Pass existing token state to avoid duplicate DB calls */
|
||||
existingTokens?: {
|
||||
accessToken?: IToken | null;
|
||||
refreshToken?: IToken | null;
|
||||
clientInfoToken?: IToken | null;
|
||||
};
|
||||
}
|
||||
|
||||
interface GetTokensParams {
|
||||
userId: string;
|
||||
serverName: string;
|
||||
findToken: TokenMethods['findToken'];
|
||||
refreshTokens?: (
|
||||
refreshToken: string,
|
||||
metadata: { userId: string; serverName: string; identifier: string },
|
||||
) => Promise<MCPOAuthTokens>;
|
||||
createToken?: TokenMethods['createToken'];
|
||||
updateToken?: TokenMethods['updateToken'];
|
||||
}
|
||||
|
||||
export class MCPTokenStorage {
|
||||
static getLogPrefix(userId: string, serverName: string): string {
|
||||
return isSystemUserId(userId)
|
||||
? `[MCP][${serverName}]`
|
||||
: `[MCP][User: ${userId}][${serverName}]`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores OAuth tokens for an MCP server
|
||||
*
|
||||
* @param params.existingTokens - Optional: Pass existing token state to avoid duplicate DB calls.
|
||||
* This is useful when refreshing tokens, as getTokens() already has the token state.
|
||||
*/
|
||||
static async storeTokens({
|
||||
userId,
|
||||
serverName,
|
||||
tokens,
|
||||
createToken,
|
||||
updateToken,
|
||||
findToken,
|
||||
clientInfo,
|
||||
existingTokens,
|
||||
}: StoreTokensParams): Promise<void> {
|
||||
const logPrefix = this.getLogPrefix(userId, serverName);
|
||||
|
||||
try {
|
||||
const identifier = `mcp:${serverName}`;
|
||||
|
||||
// Encrypt and store access token
|
||||
const encryptedAccessToken = await encryptV2(tokens.access_token);
|
||||
|
||||
logger.debug(
|
||||
`${logPrefix} Token expires_in: ${'expires_in' in tokens ? tokens.expires_in : 'N/A'}, expires_at: ${'expires_at' in tokens ? tokens.expires_at : 'N/A'}`,
|
||||
);
|
||||
|
||||
// Handle both expires_in and expires_at formats
|
||||
let accessTokenExpiry: Date;
|
||||
if ('expires_at' in tokens && tokens.expires_at) {
|
||||
/** MCPOAuthTokens format - already has calculated expiry */
|
||||
logger.debug(`${logPrefix} Using expires_at: ${tokens.expires_at}`);
|
||||
accessTokenExpiry = new Date(tokens.expires_at);
|
||||
} else if (tokens.expires_in) {
|
||||
/** Standard OAuthTokens format - calculate expiry */
|
||||
logger.debug(`${logPrefix} Using expires_in: ${tokens.expires_in}`);
|
||||
accessTokenExpiry = new Date(Date.now() + tokens.expires_in * 1000);
|
||||
} else {
|
||||
/** No expiry provided - default to 1 year */
|
||||
logger.debug(`${logPrefix} No expiry provided, using default`);
|
||||
accessTokenExpiry = new Date(Date.now() + 365 * 24 * 60 * 60 * 1000);
|
||||
}
|
||||
|
||||
logger.debug(`${logPrefix} Calculated expiry date: ${accessTokenExpiry.toISOString()}`);
|
||||
logger.debug(
|
||||
`${logPrefix} Date object: ${JSON.stringify({
|
||||
time: accessTokenExpiry.getTime(),
|
||||
valid: !isNaN(accessTokenExpiry.getTime()),
|
||||
iso: accessTokenExpiry.toISOString(),
|
||||
})}`,
|
||||
);
|
||||
|
||||
// Ensure the date is valid before passing to createToken
|
||||
if (isNaN(accessTokenExpiry.getTime())) {
|
||||
logger.error(`${logPrefix} Invalid expiry date calculated, using default`);
|
||||
accessTokenExpiry = new Date(Date.now() + 365 * 24 * 60 * 60 * 1000);
|
||||
}
|
||||
|
||||
// Calculate expiresIn (seconds from now)
|
||||
const expiresIn = Math.floor((accessTokenExpiry.getTime() - Date.now()) / 1000);
|
||||
|
||||
const accessTokenData = {
|
||||
userId,
|
||||
type: 'mcp_oauth',
|
||||
identifier,
|
||||
token: encryptedAccessToken,
|
||||
expiresIn: expiresIn > 0 ? expiresIn : 365 * 24 * 60 * 60, // Default to 1 year if negative
|
||||
};
|
||||
|
||||
// Check if token already exists and update if it does
|
||||
if (findToken && updateToken) {
|
||||
// Use provided existing token state if available, otherwise look it up
|
||||
const existingToken =
|
||||
existingTokens?.accessToken !== undefined
|
||||
? existingTokens.accessToken
|
||||
: await findToken({ userId, identifier });
|
||||
|
||||
if (existingToken) {
|
||||
await updateToken({ userId, identifier }, accessTokenData);
|
||||
logger.debug(`${logPrefix} Updated existing access token`);
|
||||
} else {
|
||||
await createToken(accessTokenData);
|
||||
logger.debug(`${logPrefix} Created new access token`);
|
||||
}
|
||||
} else {
|
||||
// Create new token if it's initial store or update methods not provided
|
||||
await createToken(accessTokenData);
|
||||
logger.debug(`${logPrefix} Created access token (no update methods available)`);
|
||||
}
|
||||
|
||||
// Store refresh token if available
|
||||
if (tokens.refresh_token) {
|
||||
const encryptedRefreshToken = await encryptV2(tokens.refresh_token);
|
||||
const extendedTokens = tokens as ExtendedOAuthTokens;
|
||||
const refreshTokenExpiry = extendedTokens.refresh_token_expires_in
|
||||
? new Date(Date.now() + extendedTokens.refresh_token_expires_in * 1000)
|
||||
: new Date(Date.now() + 365 * 24 * 60 * 60 * 1000); // Default to 1 year
|
||||
|
||||
/** Calculated expiresIn for refresh token */
|
||||
const refreshExpiresIn = Math.floor((refreshTokenExpiry.getTime() - Date.now()) / 1000);
|
||||
|
||||
const refreshTokenData = {
|
||||
userId,
|
||||
type: 'mcp_oauth_refresh',
|
||||
identifier: `${identifier}:refresh`,
|
||||
token: encryptedRefreshToken,
|
||||
expiresIn: refreshExpiresIn > 0 ? refreshExpiresIn : 365 * 24 * 60 * 60,
|
||||
};
|
||||
|
||||
// Check if refresh token already exists and update if it does
|
||||
if (findToken && updateToken) {
|
||||
// Use provided existing token state if available, otherwise look it up
|
||||
const existingRefreshToken =
|
||||
existingTokens?.refreshToken !== undefined
|
||||
? existingTokens.refreshToken
|
||||
: await findToken({
|
||||
userId,
|
||||
identifier: `${identifier}:refresh`,
|
||||
});
|
||||
|
||||
if (existingRefreshToken) {
|
||||
await updateToken({ userId, identifier: `${identifier}:refresh` }, refreshTokenData);
|
||||
logger.debug(`${logPrefix} Updated existing refresh token`);
|
||||
} else {
|
||||
await createToken(refreshTokenData);
|
||||
logger.debug(`${logPrefix} Created new refresh token`);
|
||||
}
|
||||
} else {
|
||||
await createToken(refreshTokenData);
|
||||
logger.debug(`${logPrefix} Created refresh token (no update methods available)`);
|
||||
}
|
||||
}
|
||||
|
||||
/** Store client information if provided */
|
||||
if (clientInfo) {
|
||||
logger.debug(`${logPrefix} Storing client info:`, {
|
||||
client_id: clientInfo.client_id,
|
||||
has_client_secret: !!clientInfo.client_secret,
|
||||
});
|
||||
const encryptedClientInfo = await encryptV2(JSON.stringify(clientInfo));
|
||||
|
||||
const clientInfoData = {
|
||||
userId,
|
||||
type: 'mcp_oauth_client',
|
||||
identifier: `${identifier}:client`,
|
||||
token: encryptedClientInfo,
|
||||
expiresIn: 365 * 24 * 60 * 60,
|
||||
};
|
||||
|
||||
// Check if client info already exists and update if it does
|
||||
if (findToken && updateToken) {
|
||||
// Use provided existing token state if available, otherwise look it up
|
||||
const existingClientInfo =
|
||||
existingTokens?.clientInfoToken !== undefined
|
||||
? existingTokens.clientInfoToken
|
||||
: await findToken({
|
||||
userId,
|
||||
identifier: `${identifier}:client`,
|
||||
});
|
||||
|
||||
if (existingClientInfo) {
|
||||
await updateToken({ userId, identifier: `${identifier}:client` }, clientInfoData);
|
||||
logger.debug(`${logPrefix} Updated existing client info`);
|
||||
} else {
|
||||
await createToken(clientInfoData);
|
||||
logger.debug(`${logPrefix} Created new client info`);
|
||||
}
|
||||
} else {
|
||||
await createToken(clientInfoData);
|
||||
logger.debug(`${logPrefix} Created client info (no update methods available)`);
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(`${logPrefix} Stored OAuth tokens`);
|
||||
} catch (error) {
|
||||
const logPrefix = this.getLogPrefix(userId, serverName);
|
||||
logger.error(`${logPrefix} Failed to store tokens`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves OAuth tokens for an MCP server
|
||||
*/
|
||||
static async getTokens({
|
||||
userId,
|
||||
serverName,
|
||||
findToken,
|
||||
createToken,
|
||||
updateToken,
|
||||
refreshTokens,
|
||||
}: GetTokensParams): Promise<MCPOAuthTokens | null> {
|
||||
const logPrefix = this.getLogPrefix(userId, serverName);
|
||||
|
||||
try {
|
||||
const identifier = `mcp:${serverName}`;
|
||||
|
||||
// Get access token
|
||||
const accessTokenData = await findToken({
|
||||
userId,
|
||||
type: 'mcp_oauth',
|
||||
identifier,
|
||||
});
|
||||
|
||||
/** Check if access token is missing or expired */
|
||||
const isMissing = !accessTokenData;
|
||||
const isExpired = accessTokenData?.expiresAt && new Date() >= accessTokenData.expiresAt;
|
||||
|
||||
if (isMissing || isExpired) {
|
||||
logger.info(`${logPrefix} Access token ${isMissing ? 'missing' : 'expired'}`);
|
||||
|
||||
/** Refresh data if we have a refresh token and refresh function */
|
||||
const refreshTokenData = await findToken({
|
||||
userId,
|
||||
type: 'mcp_oauth_refresh',
|
||||
identifier: `${identifier}:refresh`,
|
||||
});
|
||||
|
||||
if (!refreshTokenData) {
|
||||
logger.info(
|
||||
`${logPrefix} Access token ${isMissing ? 'missing' : 'expired'} and no refresh token available`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!refreshTokens) {
|
||||
logger.warn(
|
||||
`${logPrefix} Access token ${isMissing ? 'missing' : 'expired'}, refresh token available but no \`refreshTokens\` provided`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!createToken) {
|
||||
logger.warn(
|
||||
`${logPrefix} Access token ${isMissing ? 'missing' : 'expired'}, refresh token available but no \`createToken\` function provided`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info(`${logPrefix} Attempting to refresh token`);
|
||||
const decryptedRefreshToken = await decryptV2(refreshTokenData.token);
|
||||
|
||||
/** Client information if available */
|
||||
let clientInfo;
|
||||
let clientInfoData;
|
||||
try {
|
||||
clientInfoData = await findToken({
|
||||
userId,
|
||||
type: 'mcp_oauth_client',
|
||||
identifier: `${identifier}:client`,
|
||||
});
|
||||
if (clientInfoData) {
|
||||
const decryptedClientInfo = await decryptV2(clientInfoData.token);
|
||||
clientInfo = JSON.parse(decryptedClientInfo);
|
||||
logger.debug(`${logPrefix} Retrieved client info:`, {
|
||||
client_id: clientInfo.client_id,
|
||||
has_client_secret: !!clientInfo.client_secret,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
logger.debug(`${logPrefix} No client info found`);
|
||||
}
|
||||
|
||||
const metadata = {
|
||||
userId,
|
||||
serverName,
|
||||
identifier,
|
||||
clientInfo,
|
||||
};
|
||||
|
||||
const newTokens = await refreshTokens(decryptedRefreshToken, metadata);
|
||||
|
||||
// Store the refreshed tokens (handles both create and update)
|
||||
// Pass existing token state to avoid duplicate DB calls
|
||||
await this.storeTokens({
|
||||
userId,
|
||||
serverName,
|
||||
tokens: newTokens,
|
||||
createToken,
|
||||
updateToken,
|
||||
findToken,
|
||||
clientInfo,
|
||||
existingTokens: {
|
||||
accessToken: accessTokenData, // We know this is expired/missing
|
||||
refreshToken: refreshTokenData, // We already have this
|
||||
clientInfoToken: clientInfoData, // We already looked this up
|
||||
},
|
||||
});
|
||||
|
||||
logger.info(`${logPrefix} Successfully refreshed and stored OAuth tokens`);
|
||||
return newTokens;
|
||||
} catch (refreshError) {
|
||||
logger.error(`${logPrefix} Failed to refresh tokens`, refreshError);
|
||||
// Check if it's an unauthorized_client error (refresh not supported)
|
||||
const errorMessage =
|
||||
refreshError instanceof Error ? refreshError.message : String(refreshError);
|
||||
if (errorMessage.includes('unauthorized_client')) {
|
||||
logger.info(
|
||||
`${logPrefix} Server does not support refresh tokens for this client. New authentication required.`,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// If we reach here, access token should exist and be valid
|
||||
if (!accessTokenData) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const decryptedAccessToken = await decryptV2(accessTokenData.token);
|
||||
|
||||
/** Get refresh token if available */
|
||||
const refreshTokenData = await findToken({
|
||||
userId,
|
||||
type: 'mcp_oauth_refresh',
|
||||
identifier: `${identifier}:refresh`,
|
||||
});
|
||||
|
||||
const tokens: MCPOAuthTokens = {
|
||||
access_token: decryptedAccessToken,
|
||||
token_type: 'Bearer',
|
||||
obtained_at: accessTokenData.createdAt.getTime(),
|
||||
expires_at: accessTokenData.expiresAt?.getTime(),
|
||||
};
|
||||
|
||||
if (refreshTokenData) {
|
||||
tokens.refresh_token = await decryptV2(refreshTokenData.token);
|
||||
}
|
||||
|
||||
logger.debug(`${logPrefix} Loaded existing OAuth tokens from storage`);
|
||||
return tokens;
|
||||
} catch (error) {
|
||||
logger.error(`${logPrefix} Failed to retrieve tokens`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
98
packages/api/src/mcp/oauth/types.ts
Normal file
98
packages/api/src/mcp/oauth/types.ts
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
import type { OAuthTokens } from '@modelcontextprotocol/sdk/shared/auth.js';
|
||||
import type { FlowMetadata } from '~/flow/types';
|
||||
|
||||
export interface OAuthMetadata {
|
||||
/** OAuth authorization endpoint */
|
||||
authorization_endpoint: string;
|
||||
/** OAuth token endpoint */
|
||||
token_endpoint: string;
|
||||
/** OAuth issuer */
|
||||
issuer?: string;
|
||||
/** Supported scopes */
|
||||
scopes_supported?: string[];
|
||||
/** Response types supported */
|
||||
response_types_supported?: string[];
|
||||
/** Grant types supported */
|
||||
grant_types_supported?: string[];
|
||||
/** Token endpoint auth methods supported */
|
||||
token_endpoint_auth_methods_supported?: string[];
|
||||
/** Code challenge methods supported */
|
||||
code_challenge_methods_supported?: string[];
|
||||
}
|
||||
|
||||
export interface OAuthProtectedResourceMetadata {
|
||||
/** Resource identifier */
|
||||
resource: string;
|
||||
/** Authorization servers */
|
||||
authorization_servers?: string[];
|
||||
/** Scopes supported by the resource */
|
||||
scopes_supported?: string[];
|
||||
}
|
||||
|
||||
export interface OAuthClientInformation {
|
||||
/** Client ID */
|
||||
client_id: string;
|
||||
/** Client secret (optional for public clients) */
|
||||
client_secret?: string;
|
||||
/** Client name */
|
||||
client_name?: string;
|
||||
/** Redirect URIs */
|
||||
redirect_uris?: string[];
|
||||
/** Grant types */
|
||||
grant_types?: string[];
|
||||
/** Response types */
|
||||
response_types?: string[];
|
||||
/** Scope */
|
||||
scope?: string;
|
||||
/** Token endpoint auth method */
|
||||
token_endpoint_auth_method?: string;
|
||||
}
|
||||
|
||||
export interface MCPOAuthState {
|
||||
/** Current step in the OAuth flow */
|
||||
step: 'discovery' | 'registration' | 'authorization' | 'token_exchange' | 'complete' | 'error';
|
||||
/** Server name */
|
||||
serverName: string;
|
||||
/** User ID */
|
||||
userId: string;
|
||||
/** OAuth metadata from discovery */
|
||||
metadata?: OAuthMetadata;
|
||||
/** Resource metadata */
|
||||
resourceMetadata?: OAuthProtectedResourceMetadata;
|
||||
/** Client information */
|
||||
clientInfo?: OAuthClientInformation;
|
||||
/** Authorization URL */
|
||||
authorizationUrl?: string;
|
||||
/** Code verifier for PKCE */
|
||||
codeVerifier?: string;
|
||||
/** State parameter for OAuth flow */
|
||||
state?: string;
|
||||
/** Error information */
|
||||
error?: string;
|
||||
/** Timestamp */
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
export interface MCPOAuthFlowMetadata extends FlowMetadata {
|
||||
serverName: string;
|
||||
userId: string;
|
||||
serverUrl: string;
|
||||
state: string;
|
||||
codeVerifier?: string;
|
||||
clientInfo?: OAuthClientInformation;
|
||||
metadata?: OAuthMetadata;
|
||||
resourceMetadata?: OAuthProtectedResourceMetadata;
|
||||
}
|
||||
|
||||
export interface MCPOAuthTokens extends OAuthTokens {
|
||||
/** When the tokens were obtained */
|
||||
obtained_at: number;
|
||||
/** Calculated expiry time */
|
||||
expires_at?: number;
|
||||
}
|
||||
|
||||
/** Extended OAuth tokens that may include refresh token expiry */
|
||||
export interface ExtendedOAuthTokens extends OAuthTokens {
|
||||
/** Refresh token expiry in seconds (non-standard, some providers include this) */
|
||||
refresh_token_expires_in?: number;
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
import type * as t from './types/mcp';
|
||||
import type * as t from './types';
|
||||
const RECOGNIZED_PROVIDERS = new Set([
|
||||
'google',
|
||||
'anthropic',
|
||||
|
|
@ -7,6 +7,7 @@ const RECOGNIZED_PROVIDERS = new Set([
|
|||
'xai',
|
||||
'deepseek',
|
||||
'ollama',
|
||||
'bedrock',
|
||||
]);
|
||||
const CONTENT_ARRAY_PROVIDERS = new Set(['google', 'anthropic', 'openai']);
|
||||
|
||||
|
|
@ -8,14 +8,21 @@ import {
|
|||
StreamableHTTPOptionsSchema,
|
||||
} from 'librechat-data-provider';
|
||||
import type { JsonSchemaType, TPlugin } from 'librechat-data-provider';
|
||||
import { ToolSchema, ListToolsResultSchema } from '@modelcontextprotocol/sdk/types.js';
|
||||
import type * as t from '@modelcontextprotocol/sdk/types.js';
|
||||
|
||||
export type StdioOptions = z.infer<typeof StdioOptionsSchema>;
|
||||
export type WebSocketOptions = z.infer<typeof WebSocketOptionsSchema>;
|
||||
export type SSEOptions = z.infer<typeof SSEOptionsSchema>;
|
||||
export type StreamableHTTPOptions = z.infer<typeof StreamableHTTPOptionsSchema>;
|
||||
export type MCPOptions = z.infer<typeof MCPOptionsSchema>;
|
||||
export type MCPOptions = z.infer<typeof MCPOptionsSchema> & {
|
||||
customUserVars?: Record<
|
||||
string,
|
||||
{
|
||||
title: string;
|
||||
description: string;
|
||||
}
|
||||
>;
|
||||
};
|
||||
export type MCPServers = z.infer<typeof MCPServersSchema>;
|
||||
export interface MCPResource {
|
||||
uri: string;
|
||||
|
|
@ -45,8 +52,8 @@ export interface MCPPrompt {
|
|||
|
||||
export type ConnectionState = 'disconnected' | 'connecting' | 'connected' | 'error';
|
||||
|
||||
export type MCPTool = z.infer<typeof ToolSchema>;
|
||||
export type MCPToolListResponse = z.infer<typeof ListToolsResultSchema>;
|
||||
export type MCPTool = z.infer<typeof t.ToolSchema>;
|
||||
export type MCPToolListResponse = z.infer<typeof t.ListToolsResultSchema>;
|
||||
export type ToolContentPart = t.TextContent | t.ImageContent | t.EmbeddedResource | t.AudioContent;
|
||||
export type ImageContent = Extract<ToolContentPart, { type: 'image' }>;
|
||||
export type MCPToolCallResponse =
|
||||
|
|
@ -1,3 +1,6 @@
|
|||
import { Constants } from 'librechat-data-provider';
|
||||
|
||||
export const mcpToolPattern = new RegExp(`^.+${Constants.mcp_delimiter}.+$`);
|
||||
/**
|
||||
* Normalizes a server name to match the pattern ^[a-zA-Z0-9_.-]+$
|
||||
* This is required for Azure OpenAI models with Tool Calling
|
||||
141
packages/api/src/middleware/access.ts
Normal file
141
packages/api/src/middleware/access.ts
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import {
|
||||
Permissions,
|
||||
EndpointURLs,
|
||||
EModelEndpoint,
|
||||
PermissionTypes,
|
||||
isAgentsEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import type { NextFunction, Request as ServerRequest, Response as ServerResponse } from 'express';
|
||||
import type { IRole, IUser } from '@librechat/data-schemas';
|
||||
|
||||
export function skipAgentCheck(req?: ServerRequest): boolean {
|
||||
if (!req || !req?.body?.endpoint) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (req.method !== 'POST') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!req.originalUrl?.includes(EndpointURLs[EModelEndpoint.agents])) {
|
||||
return false;
|
||||
}
|
||||
return !isAgentsEndpoint(req.body.endpoint);
|
||||
}
|
||||
|
||||
/**
|
||||
* Core function to check if a user has one or more required permissions
|
||||
* @param user - The user object
|
||||
* @param permissionType - The type of permission to check
|
||||
* @param permissions - The list of specific permissions to check
|
||||
* @param bodyProps - An optional object where keys are permissions and values are arrays of properties to check
|
||||
* @param checkObject - The object to check properties against
|
||||
* @param skipCheck - An optional function that takes the checkObject and returns true to skip permission checking
|
||||
* @returns Whether the user has the required permissions
|
||||
*/
|
||||
export const checkAccess = async ({
|
||||
req,
|
||||
user,
|
||||
permissionType,
|
||||
permissions,
|
||||
getRoleByName,
|
||||
bodyProps = {} as Record<Permissions, string[]>,
|
||||
checkObject = {},
|
||||
skipCheck,
|
||||
}: {
|
||||
user: IUser;
|
||||
req?: ServerRequest;
|
||||
permissionType: PermissionTypes;
|
||||
permissions: Permissions[];
|
||||
bodyProps?: Record<Permissions, string[]>;
|
||||
checkObject?: object;
|
||||
/** If skipCheck function is provided and returns true, skip permission checking */
|
||||
skipCheck?: (req?: ServerRequest) => boolean;
|
||||
getRoleByName: (roleName: string, fieldsToSelect?: string | string[]) => Promise<IRole | null>;
|
||||
}): Promise<boolean> => {
|
||||
if (skipCheck && skipCheck(req)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!user || !user.role) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const role = await getRoleByName(user.role);
|
||||
if (role && role.permissions && role.permissions[permissionType]) {
|
||||
const hasAnyPermission = permissions.some((permission) => {
|
||||
if (
|
||||
role.permissions?.[permissionType as keyof typeof role.permissions]?.[
|
||||
permission as keyof (typeof role.permissions)[typeof permissionType]
|
||||
]
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (bodyProps[permission] && checkObject) {
|
||||
return bodyProps[permission].some((prop) =>
|
||||
Object.prototype.hasOwnProperty.call(checkObject, prop),
|
||||
);
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
|
||||
return hasAnyPermission;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* Middleware to check if a user has one or more required permissions, optionally based on `req.body` properties.
|
||||
* @param permissionType - The type of permission to check.
|
||||
* @param permissions - The list of specific permissions to check.
|
||||
* @param bodyProps - An optional object where keys are permissions and values are arrays of `req.body` properties to check.
|
||||
* @param skipCheck - An optional function that takes req.body and returns true to skip permission checking.
|
||||
* @param getRoleByName - A function to get the role by name.
|
||||
* @returns Express middleware function.
|
||||
*/
|
||||
export const generateCheckAccess = ({
|
||||
permissionType,
|
||||
permissions,
|
||||
bodyProps = {} as Record<Permissions, string[]>,
|
||||
skipCheck,
|
||||
getRoleByName,
|
||||
}: {
|
||||
permissionType: PermissionTypes;
|
||||
permissions: Permissions[];
|
||||
bodyProps?: Record<Permissions, string[]>;
|
||||
skipCheck?: (req?: ServerRequest) => boolean;
|
||||
getRoleByName: (roleName: string, fieldsToSelect?: string | string[]) => Promise<IRole | null>;
|
||||
}): ((req: ServerRequest, res: ServerResponse, next: NextFunction) => Promise<unknown>) => {
|
||||
return async (req, res, next) => {
|
||||
try {
|
||||
const hasAccess = await checkAccess({
|
||||
req,
|
||||
user: req.user as IUser,
|
||||
permissionType,
|
||||
permissions,
|
||||
bodyProps,
|
||||
checkObject: req.body,
|
||||
skipCheck,
|
||||
getRoleByName,
|
||||
});
|
||||
|
||||
if (hasAccess) {
|
||||
return next();
|
||||
}
|
||||
|
||||
logger.warn(
|
||||
`[${permissionType}] Forbidden: "${req.originalUrl}" - Insufficient permissions for User ${req.user?.id}: ${permissions.join(', ')}`,
|
||||
);
|
||||
return res.status(403).json({ message: 'Forbidden: Insufficient permissions' });
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
return res.status(500).json({
|
||||
message: `Server error: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
});
|
||||
}
|
||||
};
|
||||
};
|
||||
1
packages/api/src/middleware/index.ts
Normal file
1
packages/api/src/middleware/index.ts
Normal file
|
|
@ -0,0 +1 @@
|
|||
export * from './access';
|
||||
1
packages/api/src/oauth/index.ts
Normal file
1
packages/api/src/oauth/index.ts
Normal file
|
|
@ -0,0 +1 @@
|
|||
export * from './tokens';
|
||||
324
packages/api/src/oauth/tokens.ts
Normal file
324
packages/api/src/oauth/tokens.ts
Normal file
|
|
@ -0,0 +1,324 @@
|
|||
import axios from 'axios';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { TokenExchangeMethodEnum } from 'librechat-data-provider';
|
||||
import type { TokenMethods } from '@librechat/data-schemas';
|
||||
import type { AxiosError } from 'axios';
|
||||
import { encryptV2, decryptV2 } from '~/crypto';
|
||||
import { logAxiosError } from '~/utils';
|
||||
|
||||
export function createHandleOAuthToken({
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
}: {
|
||||
findToken: TokenMethods['findToken'];
|
||||
updateToken: TokenMethods['updateToken'];
|
||||
createToken: TokenMethods['createToken'];
|
||||
}) {
|
||||
/**
|
||||
* Handles the OAuth token by creating or updating the token.
|
||||
* @param fields
|
||||
* @param fields.userId - The user's ID.
|
||||
* @param fields.token - The full token to store.
|
||||
* @param fields.identifier - Unique, alternative identifier for the token.
|
||||
* @param fields.expiresIn - The number of seconds until the token expires.
|
||||
* @param fields.metadata - Additional metadata to store with the token.
|
||||
* @param [fields.type="oauth"] - The type of token. Default is 'oauth'.
|
||||
*/
|
||||
return async function handleOAuthToken({
|
||||
token,
|
||||
userId,
|
||||
identifier,
|
||||
expiresIn,
|
||||
metadata,
|
||||
type = 'oauth',
|
||||
}: {
|
||||
token: string;
|
||||
userId: string;
|
||||
identifier: string;
|
||||
expiresIn?: number | string | null;
|
||||
metadata?: Record<string, unknown>;
|
||||
type?: string;
|
||||
}) {
|
||||
const encrypedToken = await encryptV2(token);
|
||||
let expiresInNumber = 3600;
|
||||
if (typeof expiresIn === 'number') {
|
||||
expiresInNumber = expiresIn;
|
||||
} else if (expiresIn != null) {
|
||||
expiresInNumber = parseInt(expiresIn, 10) || 3600;
|
||||
}
|
||||
const tokenData = {
|
||||
type,
|
||||
userId,
|
||||
metadata,
|
||||
identifier,
|
||||
token: encrypedToken,
|
||||
expiresIn: expiresInNumber,
|
||||
};
|
||||
|
||||
const existingToken = await findToken({ userId, identifier });
|
||||
if (existingToken) {
|
||||
return await updateToken({ identifier }, tokenData);
|
||||
} else {
|
||||
return await createToken(tokenData);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes the access tokens and stores them in the database.
|
||||
* @param tokenData
|
||||
* @param tokenData.access_token
|
||||
* @param tokenData.expires_in
|
||||
* @param [tokenData.refresh_token]
|
||||
* @param [tokenData.refresh_token_expires_in]
|
||||
* @param metadata
|
||||
* @param metadata.userId
|
||||
* @param metadata.identifier
|
||||
*/
|
||||
async function processAccessTokens(
|
||||
tokenData: {
|
||||
access_token: string;
|
||||
expires_in: number;
|
||||
refresh_token?: string;
|
||||
refresh_token_expires_in?: number;
|
||||
},
|
||||
{ userId, identifier }: { userId: string; identifier: string },
|
||||
{
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
}: {
|
||||
findToken: TokenMethods['findToken'];
|
||||
updateToken: TokenMethods['updateToken'];
|
||||
createToken: TokenMethods['createToken'];
|
||||
},
|
||||
) {
|
||||
const { access_token, expires_in = 3600, refresh_token, refresh_token_expires_in } = tokenData;
|
||||
if (!access_token) {
|
||||
logger.error('Access token not found: ', tokenData);
|
||||
throw new Error('Access token not found');
|
||||
}
|
||||
const handleOAuthToken = createHandleOAuthToken({
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
});
|
||||
await handleOAuthToken({
|
||||
identifier,
|
||||
token: access_token,
|
||||
expiresIn: expires_in,
|
||||
userId,
|
||||
});
|
||||
|
||||
if (refresh_token != null) {
|
||||
logger.debug('Processing refresh token');
|
||||
await handleOAuthToken({
|
||||
token: refresh_token,
|
||||
type: 'oauth_refresh',
|
||||
userId,
|
||||
identifier: `${identifier}:refresh`,
|
||||
expiresIn: refresh_token_expires_in ?? null,
|
||||
});
|
||||
}
|
||||
logger.debug('Access tokens processed');
|
||||
}
|
||||
|
||||
/**
|
||||
* Refreshes the access token using the refresh token.
|
||||
* @param fields
|
||||
* @param fields.userId - The ID of the user.
|
||||
* @param fields.client_url - The URL of the OAuth provider.
|
||||
* @param fields.identifier - The identifier for the token.
|
||||
* @param fields.refresh_token - The refresh token to use.
|
||||
* @param fields.token_exchange_method - The token exchange method ('default_post' or 'basic_auth_header').
|
||||
* @param fields.encrypted_oauth_client_id - The client ID for the OAuth provider.
|
||||
* @param fields.encrypted_oauth_client_secret - The client secret for the OAuth provider.
|
||||
*/
|
||||
export async function refreshAccessToken(
|
||||
{
|
||||
userId,
|
||||
client_url,
|
||||
identifier,
|
||||
refresh_token,
|
||||
token_exchange_method,
|
||||
encrypted_oauth_client_id,
|
||||
encrypted_oauth_client_secret,
|
||||
}: {
|
||||
userId: string;
|
||||
client_url: string;
|
||||
identifier: string;
|
||||
refresh_token: string;
|
||||
token_exchange_method: TokenExchangeMethodEnum;
|
||||
encrypted_oauth_client_id: string;
|
||||
encrypted_oauth_client_secret: string;
|
||||
},
|
||||
{
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
}: {
|
||||
findToken: TokenMethods['findToken'];
|
||||
updateToken: TokenMethods['updateToken'];
|
||||
createToken: TokenMethods['createToken'];
|
||||
},
|
||||
): Promise<{
|
||||
access_token: string;
|
||||
expires_in: number;
|
||||
refresh_token?: string;
|
||||
refresh_token_expires_in?: number;
|
||||
}> {
|
||||
try {
|
||||
const oauth_client_id = await decryptV2(encrypted_oauth_client_id);
|
||||
const oauth_client_secret = await decryptV2(encrypted_oauth_client_secret);
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
Accept: 'application/json',
|
||||
};
|
||||
|
||||
const params = new URLSearchParams({
|
||||
grant_type: 'refresh_token',
|
||||
refresh_token,
|
||||
});
|
||||
|
||||
if (token_exchange_method === TokenExchangeMethodEnum.BasicAuthHeader) {
|
||||
const basicAuth = Buffer.from(`${oauth_client_id}:${oauth_client_secret}`).toString('base64');
|
||||
headers['Authorization'] = `Basic ${basicAuth}`;
|
||||
} else {
|
||||
params.append('client_id', oauth_client_id);
|
||||
params.append('client_secret', oauth_client_secret);
|
||||
}
|
||||
|
||||
const response = await axios({
|
||||
method: 'POST',
|
||||
url: client_url,
|
||||
headers,
|
||||
data: params.toString(),
|
||||
});
|
||||
await processAccessTokens(
|
||||
response.data,
|
||||
{
|
||||
userId,
|
||||
identifier,
|
||||
},
|
||||
{
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
},
|
||||
);
|
||||
logger.debug(`Access token refreshed successfully for ${identifier}`);
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
const message = 'Error refreshing OAuth tokens';
|
||||
throw new Error(
|
||||
logAxiosError({
|
||||
message,
|
||||
error: error as AxiosError,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles the OAuth callback and exchanges the authorization code for tokens.
|
||||
* @param {object} fields
|
||||
* @param {string} fields.code - The authorization code returned by the provider.
|
||||
* @param {string} fields.userId - The ID of the user.
|
||||
* @param {string} fields.identifier - The identifier for the token.
|
||||
* @param {string} fields.client_url - The URL of the OAuth provider.
|
||||
* @param {string} fields.redirect_uri - The redirect URI for the OAuth provider.
|
||||
* @param {string} fields.token_exchange_method - The token exchange method ('default_post' or 'basic_auth_header').
|
||||
* @param {string} fields.encrypted_oauth_client_id - The client ID for the OAuth provider.
|
||||
* @param {string} fields.encrypted_oauth_client_secret - The client secret for the OAuth provider.
|
||||
*/
|
||||
export async function getAccessToken(
|
||||
{
|
||||
code,
|
||||
userId,
|
||||
identifier,
|
||||
client_url,
|
||||
redirect_uri,
|
||||
token_exchange_method,
|
||||
encrypted_oauth_client_id,
|
||||
encrypted_oauth_client_secret,
|
||||
}: {
|
||||
code: string;
|
||||
userId: string;
|
||||
identifier: string;
|
||||
client_url: string;
|
||||
redirect_uri: string;
|
||||
token_exchange_method: TokenExchangeMethodEnum;
|
||||
encrypted_oauth_client_id: string;
|
||||
encrypted_oauth_client_secret: string;
|
||||
},
|
||||
{
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
}: {
|
||||
findToken: TokenMethods['findToken'];
|
||||
updateToken: TokenMethods['updateToken'];
|
||||
createToken: TokenMethods['createToken'];
|
||||
},
|
||||
): Promise<{
|
||||
access_token: string;
|
||||
expires_in: number;
|
||||
refresh_token?: string;
|
||||
refresh_token_expires_in?: number;
|
||||
}> {
|
||||
const oauth_client_id = await decryptV2(encrypted_oauth_client_id);
|
||||
const oauth_client_secret = await decryptV2(encrypted_oauth_client_secret);
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
Accept: 'application/json',
|
||||
};
|
||||
|
||||
const params = new URLSearchParams({
|
||||
code,
|
||||
grant_type: 'authorization_code',
|
||||
redirect_uri,
|
||||
});
|
||||
|
||||
if (token_exchange_method === TokenExchangeMethodEnum.BasicAuthHeader) {
|
||||
const basicAuth = Buffer.from(`${oauth_client_id}:${oauth_client_secret}`).toString('base64');
|
||||
headers['Authorization'] = `Basic ${basicAuth}`;
|
||||
} else {
|
||||
params.append('client_id', oauth_client_id);
|
||||
params.append('client_secret', oauth_client_secret);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios({
|
||||
method: 'POST',
|
||||
url: client_url,
|
||||
headers,
|
||||
data: params.toString(),
|
||||
});
|
||||
|
||||
await processAccessTokens(
|
||||
response.data,
|
||||
{
|
||||
userId,
|
||||
identifier,
|
||||
},
|
||||
{
|
||||
findToken,
|
||||
updateToken,
|
||||
createToken,
|
||||
},
|
||||
);
|
||||
logger.debug(`Access tokens successfully created for ${identifier}`);
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
const message = 'Error exchanging OAuth code';
|
||||
throw new Error(
|
||||
logAxiosError({
|
||||
message,
|
||||
error: error as AxiosError,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
19
packages/api/src/types/azure.ts
Normal file
19
packages/api/src/types/azure.ts
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
/**
|
||||
* Azure OpenAI configuration interface
|
||||
*/
|
||||
export interface AzureOptions {
|
||||
azureOpenAIApiKey?: string;
|
||||
azureOpenAIApiInstanceName?: string;
|
||||
azureOpenAIApiDeploymentName?: string;
|
||||
azureOpenAIApiVersion?: string;
|
||||
azureOpenAIBasePath?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Client with azure property for setting deployment name
|
||||
*/
|
||||
export interface GenericClient {
|
||||
azure: {
|
||||
azureOpenAIApiDeploymentName?: string;
|
||||
};
|
||||
}
|
||||
4
packages/api/src/types/events.ts
Normal file
4
packages/api/src/types/events.ts
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
export type ServerSentEvent = {
|
||||
data: string | Record<string, unknown>;
|
||||
event?: string;
|
||||
};
|
||||
24
packages/api/src/types/google.ts
Normal file
24
packages/api/src/types/google.ts
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
import { z } from 'zod';
|
||||
import { AuthKeys, googleBaseSchema } from 'librechat-data-provider';
|
||||
|
||||
export type GoogleParameters = z.infer<typeof googleBaseSchema>;
|
||||
|
||||
export type GoogleCredentials = {
|
||||
[AuthKeys.GOOGLE_SERVICE_KEY]?: string;
|
||||
[AuthKeys.GOOGLE_API_KEY]?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Configuration options for the getLLMConfig function
|
||||
*/
|
||||
export interface GoogleConfigOptions {
|
||||
modelOptions?: Partial<GoogleParameters>;
|
||||
reverseProxyUrl?: string;
|
||||
defaultQuery?: Record<string, string | undefined>;
|
||||
headers?: Record<string, string>;
|
||||
proxy?: string;
|
||||
streaming?: boolean;
|
||||
authHeader?: boolean;
|
||||
addParams?: Record<string, unknown>;
|
||||
dropParams?: string[];
|
||||
}
|
||||
6
packages/api/src/types/index.ts
Normal file
6
packages/api/src/types/index.ts
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
export * from './azure';
|
||||
export * from './events';
|
||||
export * from './google';
|
||||
export * from './mistral';
|
||||
export * from './openai';
|
||||
export * from './run';
|
||||
82
packages/api/src/types/mistral.ts
Normal file
82
packages/api/src/types/mistral.ts
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
/**
|
||||
* Mistral OCR API Types
|
||||
* Based on https://docs.mistral.ai/api/#tag/ocr/operation/ocr_v1_ocr_post
|
||||
*/
|
||||
|
||||
export interface MistralFileUploadResponse {
|
||||
id: string;
|
||||
object: string;
|
||||
bytes: number;
|
||||
created_at: number;
|
||||
filename: string;
|
||||
purpose: string;
|
||||
}
|
||||
|
||||
export interface MistralSignedUrlResponse {
|
||||
url: string;
|
||||
expires_at: number;
|
||||
}
|
||||
|
||||
export interface OCRImage {
|
||||
id: string;
|
||||
top_left_x: number;
|
||||
top_left_y: number;
|
||||
bottom_right_x: number;
|
||||
bottom_right_y: number;
|
||||
image_base64: string;
|
||||
image_annotation?: string;
|
||||
}
|
||||
|
||||
export interface PageDimensions {
|
||||
dpi: number;
|
||||
height: number;
|
||||
width: number;
|
||||
}
|
||||
|
||||
export interface OCRResultPage {
|
||||
index: number;
|
||||
markdown: string;
|
||||
images: OCRImage[];
|
||||
dimensions: PageDimensions;
|
||||
}
|
||||
|
||||
export interface OCRUsageInfo {
|
||||
pages_processed: number;
|
||||
doc_size_bytes: number;
|
||||
}
|
||||
|
||||
export interface OCRResult {
|
||||
pages: OCRResultPage[];
|
||||
model: string;
|
||||
document_annotation?: string | null;
|
||||
usage_info: OCRUsageInfo;
|
||||
}
|
||||
|
||||
export interface MistralOCRRequest {
|
||||
model: string;
|
||||
image_limit?: number;
|
||||
include_image_base64?: boolean;
|
||||
document: {
|
||||
type: 'document_url' | 'image_url';
|
||||
document_url?: string;
|
||||
image_url?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface MistralOCRError {
|
||||
detail?: string;
|
||||
message?: string;
|
||||
error?: {
|
||||
message?: string;
|
||||
type?: string;
|
||||
code?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface MistralOCRUploadResult {
|
||||
filename: string;
|
||||
bytes: number;
|
||||
filepath: string;
|
||||
text: string;
|
||||
images: string[];
|
||||
}
|
||||
99
packages/api/src/types/openai.ts
Normal file
99
packages/api/src/types/openai.ts
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
import { z } from 'zod';
|
||||
import { openAISchema, EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { TEndpointOption, TAzureConfig, TEndpoint } from 'librechat-data-provider';
|
||||
import type { BindToolsInput } from '@langchain/core/language_models/chat_models';
|
||||
import type { OpenAIClientOptions } from '@librechat/agents';
|
||||
import type { AzureOptions } from './azure';
|
||||
|
||||
export type OpenAIParameters = z.infer<typeof openAISchema>;
|
||||
|
||||
/**
|
||||
* Configuration options for the getLLMConfig function
|
||||
*/
|
||||
export interface OpenAIConfigOptions {
|
||||
modelOptions?: Partial<OpenAIParameters>;
|
||||
reverseProxyUrl?: string;
|
||||
defaultQuery?: Record<string, string | undefined>;
|
||||
headers?: Record<string, string>;
|
||||
proxy?: string;
|
||||
azure?: AzureOptions;
|
||||
streaming?: boolean;
|
||||
addParams?: Record<string, unknown>;
|
||||
dropParams?: string[];
|
||||
}
|
||||
|
||||
export type OpenAIConfiguration = OpenAIClientOptions['configuration'];
|
||||
|
||||
export type ClientOptions = OpenAIClientOptions & {
|
||||
include_reasoning?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return type for getLLMConfig function
|
||||
*/
|
||||
export interface LLMConfigResult {
|
||||
llmConfig: ClientOptions;
|
||||
configOptions: OpenAIConfiguration;
|
||||
tools?: BindToolsInput[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for user values retrieved from the database
|
||||
*/
|
||||
export interface UserKeyValues {
|
||||
apiKey?: string;
|
||||
baseURL?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Request interface with only the properties we need (avoids Express typing conflicts)
|
||||
*/
|
||||
export interface RequestData {
|
||||
user: {
|
||||
id: string;
|
||||
};
|
||||
body: {
|
||||
model?: string;
|
||||
endpoint?: string;
|
||||
key?: string;
|
||||
};
|
||||
app: {
|
||||
locals: {
|
||||
[EModelEndpoint.azureOpenAI]?: TAzureConfig;
|
||||
[EModelEndpoint.openAI]?: TEndpoint;
|
||||
all?: TEndpoint;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Function type for getting user key values
|
||||
*/
|
||||
export type GetUserKeyValuesFunction = (params: {
|
||||
userId: string;
|
||||
name: string;
|
||||
}) => Promise<UserKeyValues>;
|
||||
|
||||
/**
|
||||
* Function type for checking user key expiry
|
||||
*/
|
||||
export type CheckUserKeyExpiryFunction = (expiresAt: string, endpoint: string) => void;
|
||||
|
||||
/**
|
||||
* Parameters for the initializeOpenAI function
|
||||
*/
|
||||
export interface InitializeOpenAIOptionsParams {
|
||||
req: RequestData;
|
||||
overrideModel?: string;
|
||||
overrideEndpoint?: string;
|
||||
endpointOption: Partial<TEndpointOption>;
|
||||
getUserKeyValues: GetUserKeyValuesFunction;
|
||||
checkUserKeyExpiry: CheckUserKeyExpiryFunction;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extended LLM config result with stream rate handling
|
||||
*/
|
||||
export interface OpenAIOptionsResult extends LLMConfigResult {
|
||||
streamRate?: number;
|
||||
}
|
||||
12
packages/api/src/types/run.ts
Normal file
12
packages/api/src/types/run.ts
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import type { Providers, ClientOptions } from '@librechat/agents';
|
||||
import type { AgentModelParameters } from 'librechat-data-provider';
|
||||
import type { OpenAIConfiguration } from './openai';
|
||||
|
||||
export type RunLLMConfig = {
|
||||
provider: Providers;
|
||||
streaming: boolean;
|
||||
streamUsage: boolean;
|
||||
usage?: boolean;
|
||||
configuration?: OpenAIConfiguration;
|
||||
} & AgentModelParameters &
|
||||
ClientOptions;
|
||||
131
packages/api/src/utils/axios.spec.ts
Normal file
131
packages/api/src/utils/axios.spec.ts
Normal file
|
|
@ -0,0 +1,131 @@
|
|||
import axios from 'axios';
|
||||
import { createAxiosInstance } from './axios';
|
||||
|
||||
jest.mock('axios', () => ({
|
||||
interceptors: {
|
||||
request: { use: jest.fn(), eject: jest.fn() },
|
||||
response: { use: jest.fn(), eject: jest.fn() },
|
||||
},
|
||||
create: jest.fn().mockReturnValue({
|
||||
defaults: {
|
||||
proxy: null,
|
||||
},
|
||||
get: jest.fn().mockResolvedValue({ data: {} }),
|
||||
post: jest.fn().mockResolvedValue({ data: {} }),
|
||||
put: jest.fn().mockResolvedValue({ data: {} }),
|
||||
delete: jest.fn().mockResolvedValue({ data: {} }),
|
||||
}),
|
||||
get: jest.fn().mockResolvedValue({ data: {} }),
|
||||
post: jest.fn().mockResolvedValue({ data: {} }),
|
||||
put: jest.fn().mockResolvedValue({ data: {} }),
|
||||
delete: jest.fn().mockResolvedValue({ data: {} }),
|
||||
reset: jest.fn().mockImplementation(function (this: {
|
||||
get: jest.Mock;
|
||||
post: jest.Mock;
|
||||
put: jest.Mock;
|
||||
delete: jest.Mock;
|
||||
create: jest.Mock;
|
||||
}) {
|
||||
this.get.mockClear();
|
||||
this.post.mockClear();
|
||||
this.put.mockClear();
|
||||
this.delete.mockClear();
|
||||
this.create.mockClear();
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('createAxiosInstance', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset mocks
|
||||
jest.clearAllMocks();
|
||||
// Create a clean copy of process.env
|
||||
process.env = { ...originalEnv };
|
||||
// Default: no proxy
|
||||
delete process.env.proxy;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original process.env
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
test('creates an axios instance without proxy when no proxy env is set', () => {
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toBeNull();
|
||||
});
|
||||
|
||||
test('configures proxy correctly with hostname and protocol', () => {
|
||||
process.env.proxy = 'http://example.com';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'example.com',
|
||||
protocol: 'http',
|
||||
});
|
||||
});
|
||||
|
||||
test('configures proxy correctly with hostname, protocol and port', () => {
|
||||
process.env.proxy = 'https://proxy.example.com:8080';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'https',
|
||||
port: 8080,
|
||||
});
|
||||
});
|
||||
|
||||
test('handles proxy URLs with authentication', () => {
|
||||
process.env.proxy = 'http://user:pass@proxy.example.com:3128';
|
||||
|
||||
const instance = createAxiosInstance();
|
||||
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'http',
|
||||
port: 3128,
|
||||
// Note: The current implementation doesn't handle auth - if needed, add this functionality
|
||||
});
|
||||
});
|
||||
|
||||
test('throws error when proxy URL is invalid', () => {
|
||||
process.env.proxy = 'invalid-url';
|
||||
|
||||
expect(() => createAxiosInstance()).toThrow('Invalid proxy URL');
|
||||
expect(axios.create).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
// If you want to test the actual URL parsing more thoroughly
|
||||
test('handles edge case proxy URLs correctly', () => {
|
||||
// IPv6 address
|
||||
process.env.proxy = 'http://[::1]:8080';
|
||||
|
||||
let instance = createAxiosInstance();
|
||||
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: '::1',
|
||||
protocol: 'http',
|
||||
port: 8080,
|
||||
});
|
||||
|
||||
// URL with path (which should be ignored for proxy config)
|
||||
process.env.proxy = 'http://proxy.example.com:8080/some/path';
|
||||
|
||||
instance = createAxiosInstance();
|
||||
|
||||
expect(instance.defaults.proxy).toEqual({
|
||||
host: 'proxy.example.com',
|
||||
protocol: 'http',
|
||||
port: 8080,
|
||||
});
|
||||
});
|
||||
});
|
||||
77
packages/api/src/utils/axios.ts
Normal file
77
packages/api/src/utils/axios.ts
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
import axios from 'axios';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import type { AxiosInstance, AxiosProxyConfig, AxiosError } from 'axios';
|
||||
|
||||
/**
|
||||
* Logs Axios errors based on the error object and a custom message.
|
||||
* @param options - The options object.
|
||||
* @param options.message - The custom message to be logged.
|
||||
* @param options.error - The Axios error object.
|
||||
* @returns The log message.
|
||||
*/
|
||||
export const logAxiosError = ({ message, error }: { message: string; error: AxiosError }) => {
|
||||
let logMessage = message;
|
||||
try {
|
||||
const stack = error.stack || 'No stack trace available';
|
||||
|
||||
if (error.response?.status) {
|
||||
const { status, headers, data } = error.response;
|
||||
logMessage = `${message} The server responded with status ${status}: ${error.message}`;
|
||||
logger.error(logMessage, {
|
||||
status,
|
||||
headers,
|
||||
data,
|
||||
stack,
|
||||
});
|
||||
} else if (error.request) {
|
||||
const { method, url } = error.config || {};
|
||||
logMessage = `${message} No response received for ${method ? method.toUpperCase() : ''} ${url || ''}: ${error.message}`;
|
||||
logger.error(logMessage, {
|
||||
requestInfo: { method, url },
|
||||
stack,
|
||||
});
|
||||
} else if (error?.message?.includes("Cannot read properties of undefined (reading 'status')")) {
|
||||
logMessage = `${message} It appears the request timed out or was unsuccessful: ${error.message}`;
|
||||
logger.error(logMessage, { stack });
|
||||
} else {
|
||||
logMessage = `${message} An error occurred while setting up the request: ${error.message}`;
|
||||
logger.error(logMessage, { stack });
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
logMessage = `Error in logAxiosError: ${(err as Error).message}`;
|
||||
logger.error(logMessage, { stack: (err as Error).stack || 'No stack trace available' });
|
||||
}
|
||||
return logMessage;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates and configures an Axios instance with optional proxy settings.
|
||||
|
||||
* @returns A configured Axios instance
|
||||
* @throws If there's an issue creating the Axios instance or parsing the proxy URL
|
||||
*/
|
||||
export function createAxiosInstance(): AxiosInstance {
|
||||
const instance = axios.create();
|
||||
|
||||
if (process.env.proxy) {
|
||||
try {
|
||||
const url = new URL(process.env.proxy);
|
||||
|
||||
const proxyConfig: Partial<AxiosProxyConfig> = {
|
||||
host: url.hostname.replace(/^\[|\]$/g, ''),
|
||||
protocol: url.protocol.replace(':', ''),
|
||||
};
|
||||
|
||||
if (url.port) {
|
||||
proxyConfig.port = parseInt(url.port, 10);
|
||||
}
|
||||
|
||||
instance.defaults.proxy = proxyConfig as AxiosProxyConfig;
|
||||
} catch (error) {
|
||||
console.error('Error parsing proxy URL:', error);
|
||||
throw new Error(`Invalid proxy URL: ${process.env.proxy}`);
|
||||
}
|
||||
}
|
||||
|
||||
return instance;
|
||||
}
|
||||
269
packages/api/src/utils/azure.spec.ts
Normal file
269
packages/api/src/utils/azure.spec.ts
Normal file
|
|
@ -0,0 +1,269 @@
|
|||
import {
|
||||
genAzureChatCompletion,
|
||||
getAzureCredentials,
|
||||
constructAzureURL,
|
||||
sanitizeModelName,
|
||||
genAzureEndpoint,
|
||||
} from './azure';
|
||||
import type { GenericClient } from '~/types';
|
||||
|
||||
describe('sanitizeModelName', () => {
|
||||
test('removes periods from the model name', () => {
|
||||
const sanitized = sanitizeModelName('model.name');
|
||||
expect(sanitized).toBe('modelname');
|
||||
});
|
||||
|
||||
test('leaves model name unchanged if no periods are present', () => {
|
||||
const sanitized = sanitizeModelName('modelname');
|
||||
expect(sanitized).toBe('modelname');
|
||||
});
|
||||
});
|
||||
|
||||
describe('genAzureEndpoint', () => {
|
||||
test('generates correct endpoint URL', () => {
|
||||
const url = genAzureEndpoint({
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiDeploymentName: 'deploymentName',
|
||||
});
|
||||
expect(url).toBe('https://instanceName.openai.azure.com/openai/deployments/deploymentName');
|
||||
});
|
||||
});
|
||||
|
||||
describe('genAzureChatCompletion', () => {
|
||||
// Test with both deployment name and model name provided
|
||||
test('prefers model name over deployment name when both are provided and feature enabled', () => {
|
||||
process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'true';
|
||||
const url = genAzureChatCompletion(
|
||||
{
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiDeploymentName: 'deploymentName',
|
||||
azureOpenAIApiVersion: 'v1',
|
||||
},
|
||||
'modelName',
|
||||
);
|
||||
expect(url).toBe(
|
||||
'https://instanceName.openai.azure.com/openai/deployments/modelName/chat/completions?api-version=v1',
|
||||
);
|
||||
});
|
||||
|
||||
// Test with only deployment name provided
|
||||
test('uses deployment name when model name is not provided', () => {
|
||||
const url = genAzureChatCompletion({
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiDeploymentName: 'deploymentName',
|
||||
azureOpenAIApiVersion: 'v1',
|
||||
});
|
||||
expect(url).toBe(
|
||||
'https://instanceName.openai.azure.com/openai/deployments/deploymentName/chat/completions?api-version=v1',
|
||||
);
|
||||
});
|
||||
|
||||
// Test with only model name provided
|
||||
test('uses model name when deployment name is not provided and feature enabled', () => {
|
||||
process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'true';
|
||||
const url = genAzureChatCompletion(
|
||||
{
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiVersion: 'v1',
|
||||
},
|
||||
'modelName',
|
||||
);
|
||||
expect(url).toBe(
|
||||
'https://instanceName.openai.azure.com/openai/deployments/modelName/chat/completions?api-version=v1',
|
||||
);
|
||||
});
|
||||
|
||||
// Test with neither deployment name nor model name provided
|
||||
test('throws error if neither deployment name nor model name is provided', () => {
|
||||
expect(() => {
|
||||
genAzureChatCompletion({
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiVersion: 'v1',
|
||||
});
|
||||
}).toThrow(
|
||||
'Either a model name with the `AZURE_USE_MODEL_AS_DEPLOYMENT_NAME` setting or a deployment name must be provided if `AZURE_OPENAI_BASEURL` is omitted.',
|
||||
);
|
||||
});
|
||||
|
||||
// Test with feature disabled but model name provided
|
||||
test('ignores model name and uses deployment name when feature is disabled', () => {
|
||||
process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'false';
|
||||
const url = genAzureChatCompletion(
|
||||
{
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiDeploymentName: 'deploymentName',
|
||||
azureOpenAIApiVersion: 'v1',
|
||||
},
|
||||
'modelName',
|
||||
);
|
||||
expect(url).toBe(
|
||||
'https://instanceName.openai.azure.com/openai/deployments/deploymentName/chat/completions?api-version=v1',
|
||||
);
|
||||
});
|
||||
|
||||
// Test with sanitized model name
|
||||
test('sanitizes model name when used in URL', () => {
|
||||
process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'true';
|
||||
const url = genAzureChatCompletion(
|
||||
{
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiVersion: 'v1',
|
||||
},
|
||||
'model.name',
|
||||
);
|
||||
expect(url).toBe(
|
||||
'https://instanceName.openai.azure.com/openai/deployments/modelname/chat/completions?api-version=v1',
|
||||
);
|
||||
});
|
||||
|
||||
// Test with client parameter and model name
|
||||
test('updates client with sanitized model name when provided and feature enabled', () => {
|
||||
process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'true';
|
||||
const clientMock = { azure: {} } as GenericClient;
|
||||
const url = genAzureChatCompletion(
|
||||
{
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiVersion: 'v1',
|
||||
},
|
||||
'model.name',
|
||||
clientMock,
|
||||
);
|
||||
expect(url).toBe(
|
||||
'https://instanceName.openai.azure.com/openai/deployments/modelname/chat/completions?api-version=v1',
|
||||
);
|
||||
expect(clientMock.azure.azureOpenAIApiDeploymentName).toBe('modelname');
|
||||
});
|
||||
|
||||
// Test with client parameter but without model name
|
||||
test('does not update client when model name is not provided', () => {
|
||||
const clientMock = { azure: {} } as GenericClient;
|
||||
const url = genAzureChatCompletion(
|
||||
{
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiDeploymentName: 'deploymentName',
|
||||
azureOpenAIApiVersion: 'v1',
|
||||
},
|
||||
undefined,
|
||||
clientMock,
|
||||
);
|
||||
expect(url).toBe(
|
||||
'https://instanceName.openai.azure.com/openai/deployments/deploymentName/chat/completions?api-version=v1',
|
||||
);
|
||||
expect(clientMock.azure.azureOpenAIApiDeploymentName).toBeUndefined();
|
||||
});
|
||||
|
||||
// Test with client parameter and deployment name when feature is disabled
|
||||
test('does not update client when feature is disabled', () => {
|
||||
process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME = 'false';
|
||||
const clientMock = { azure: {} } as GenericClient;
|
||||
const url = genAzureChatCompletion(
|
||||
{
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiDeploymentName: 'deploymentName',
|
||||
azureOpenAIApiVersion: 'v1',
|
||||
},
|
||||
'modelName',
|
||||
clientMock,
|
||||
);
|
||||
expect(url).toBe(
|
||||
'https://instanceName.openai.azure.com/openai/deployments/deploymentName/chat/completions?api-version=v1',
|
||||
);
|
||||
expect(clientMock.azure.azureOpenAIApiDeploymentName).toBeUndefined();
|
||||
});
|
||||
|
||||
// Reset environment variable after tests
|
||||
afterEach(() => {
|
||||
delete process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME;
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAzureCredentials', () => {
|
||||
beforeEach(() => {
|
||||
process.env.AZURE_API_KEY = 'testApiKey';
|
||||
process.env.AZURE_OPENAI_API_INSTANCE_NAME = 'instanceName';
|
||||
process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME = 'deploymentName';
|
||||
process.env.AZURE_OPENAI_API_VERSION = 'v1';
|
||||
});
|
||||
|
||||
test('retrieves Azure OpenAI API credentials from environment variables', () => {
|
||||
const credentials = getAzureCredentials();
|
||||
expect(credentials).toEqual({
|
||||
azureOpenAIApiKey: 'testApiKey',
|
||||
azureOpenAIApiInstanceName: 'instanceName',
|
||||
azureOpenAIApiDeploymentName: 'deploymentName',
|
||||
azureOpenAIApiVersion: 'v1',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('constructAzureURL', () => {
|
||||
test('replaces both placeholders when both properties are provided', () => {
|
||||
const url = constructAzureURL({
|
||||
baseURL: 'https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}',
|
||||
azureOptions: {
|
||||
azureOpenAIApiInstanceName: 'instance1',
|
||||
azureOpenAIApiDeploymentName: 'deployment1',
|
||||
},
|
||||
});
|
||||
expect(url).toBe('https://example.com/instance1/deployment1');
|
||||
});
|
||||
|
||||
test('replaces only INSTANCE_NAME when only azureOpenAIApiInstanceName is provided', () => {
|
||||
const url = constructAzureURL({
|
||||
baseURL: 'https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}',
|
||||
azureOptions: {
|
||||
azureOpenAIApiInstanceName: 'instance2',
|
||||
},
|
||||
});
|
||||
expect(url).toBe('https://example.com/instance2/');
|
||||
});
|
||||
|
||||
test('replaces only DEPLOYMENT_NAME when only azureOpenAIApiDeploymentName is provided', () => {
|
||||
const url = constructAzureURL({
|
||||
baseURL: 'https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}',
|
||||
azureOptions: {
|
||||
azureOpenAIApiDeploymentName: 'deployment2',
|
||||
},
|
||||
});
|
||||
expect(url).toBe('https://example.com//deployment2');
|
||||
});
|
||||
|
||||
test('does not replace any placeholders when azure object is empty', () => {
|
||||
const url = constructAzureURL({
|
||||
baseURL: 'https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}',
|
||||
azureOptions: {},
|
||||
});
|
||||
expect(url).toBe('https://example.com//');
|
||||
});
|
||||
|
||||
test('returns baseURL as is when `azureOptions` object is not provided', () => {
|
||||
const url = constructAzureURL({
|
||||
baseURL: 'https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}',
|
||||
});
|
||||
expect(url).toBe('https://example.com/${INSTANCE_NAME}/${DEPLOYMENT_NAME}');
|
||||
});
|
||||
|
||||
test('returns baseURL as is when no placeholders are set', () => {
|
||||
const url = constructAzureURL({
|
||||
baseURL: 'https://example.com/my_custom_instance/my_deployment',
|
||||
azureOptions: {
|
||||
azureOpenAIApiInstanceName: 'instance1',
|
||||
azureOpenAIApiDeploymentName: 'deployment1',
|
||||
},
|
||||
});
|
||||
expect(url).toBe('https://example.com/my_custom_instance/my_deployment');
|
||||
});
|
||||
|
||||
test('returns regular Azure OpenAI baseURL with placeholders set', () => {
|
||||
const baseURL =
|
||||
'https://${INSTANCE_NAME}.openai.azure.com/openai/deployments/${DEPLOYMENT_NAME}';
|
||||
const url = constructAzureURL({
|
||||
baseURL,
|
||||
azureOptions: {
|
||||
azureOpenAIApiInstanceName: 'instance1',
|
||||
azureOpenAIApiDeploymentName: 'deployment1',
|
||||
},
|
||||
});
|
||||
expect(url).toBe('https://instance1.openai.azure.com/openai/deployments/deployment1');
|
||||
});
|
||||
});
|
||||
120
packages/api/src/utils/azure.ts
Normal file
120
packages/api/src/utils/azure.ts
Normal file
|
|
@ -0,0 +1,120 @@
|
|||
import { isEnabled } from './common';
|
||||
import type { AzureOptions, GenericClient } from '~/types';
|
||||
|
||||
/**
|
||||
* Sanitizes the model name to be used in the URL by removing or replacing disallowed characters.
|
||||
* @param modelName - The model name to be sanitized.
|
||||
* @returns The sanitized model name.
|
||||
*/
|
||||
export const sanitizeModelName = (modelName: string): string => {
|
||||
// Replace periods with empty strings and other disallowed characters as needed.
|
||||
return modelName.replace(/\./g, '');
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates the Azure OpenAI API endpoint URL.
|
||||
* @param params - The parameters object.
|
||||
* @param params.azureOpenAIApiInstanceName - The Azure OpenAI API instance name.
|
||||
* @param params.azureOpenAIApiDeploymentName - The Azure OpenAI API deployment name.
|
||||
* @returns The complete endpoint URL for the Azure OpenAI API.
|
||||
*/
|
||||
export const genAzureEndpoint = ({
|
||||
azureOpenAIApiInstanceName,
|
||||
azureOpenAIApiDeploymentName,
|
||||
}: {
|
||||
azureOpenAIApiInstanceName: string;
|
||||
azureOpenAIApiDeploymentName: string;
|
||||
}): string => {
|
||||
return `https://${azureOpenAIApiInstanceName}.openai.azure.com/openai/deployments/${azureOpenAIApiDeploymentName}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Generates the Azure OpenAI API chat completion endpoint URL with the API version.
|
||||
* If both deploymentName and modelName are provided, modelName takes precedence.
|
||||
* @param azureConfig - The Azure configuration object.
|
||||
* @param azureConfig.azureOpenAIApiInstanceName - The Azure OpenAI API instance name.
|
||||
* @param azureConfig.azureOpenAIApiDeploymentName - The Azure OpenAI API deployment name (optional).
|
||||
* @param azureConfig.azureOpenAIApiVersion - The Azure OpenAI API version.
|
||||
* @param modelName - The model name to be included in the deployment name (optional).
|
||||
* @param client - The API Client class for optionally setting properties (optional).
|
||||
* @returns The complete chat completion endpoint URL for the Azure OpenAI API.
|
||||
* @throws Error if neither azureOpenAIApiDeploymentName nor modelName is provided.
|
||||
*/
|
||||
export const genAzureChatCompletion = (
|
||||
{
|
||||
azureOpenAIApiInstanceName,
|
||||
azureOpenAIApiDeploymentName,
|
||||
azureOpenAIApiVersion,
|
||||
}: {
|
||||
azureOpenAIApiInstanceName: string;
|
||||
azureOpenAIApiDeploymentName?: string;
|
||||
azureOpenAIApiVersion: string;
|
||||
},
|
||||
modelName?: string,
|
||||
client?: GenericClient,
|
||||
): string => {
|
||||
// Determine the deployment segment of the URL based on provided modelName or azureOpenAIApiDeploymentName
|
||||
let deploymentSegment: string;
|
||||
if (isEnabled(process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME) && modelName) {
|
||||
const sanitizedModelName = sanitizeModelName(modelName);
|
||||
deploymentSegment = sanitizedModelName;
|
||||
if (client && typeof client === 'object') {
|
||||
client.azure.azureOpenAIApiDeploymentName = sanitizedModelName;
|
||||
}
|
||||
} else if (azureOpenAIApiDeploymentName) {
|
||||
deploymentSegment = azureOpenAIApiDeploymentName;
|
||||
} else if (!process.env.AZURE_OPENAI_BASEURL) {
|
||||
throw new Error(
|
||||
'Either a model name with the `AZURE_USE_MODEL_AS_DEPLOYMENT_NAME` setting or a deployment name must be provided if `AZURE_OPENAI_BASEURL` is omitted.',
|
||||
);
|
||||
} else {
|
||||
deploymentSegment = '';
|
||||
}
|
||||
|
||||
return `https://${azureOpenAIApiInstanceName}.openai.azure.com/openai/deployments/${deploymentSegment}/chat/completions?api-version=${azureOpenAIApiVersion}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves the Azure OpenAI API credentials from environment variables.
|
||||
* @returns An object containing the Azure OpenAI API credentials.
|
||||
*/
|
||||
export const getAzureCredentials = (): AzureOptions => {
|
||||
return {
|
||||
azureOpenAIApiKey: process.env.AZURE_API_KEY ?? process.env.AZURE_OPENAI_API_KEY,
|
||||
azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_API_INSTANCE_NAME,
|
||||
azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME,
|
||||
azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Constructs a URL by replacing placeholders in the baseURL with values from the azure object.
|
||||
* It specifically looks for '${INSTANCE_NAME}' and '${DEPLOYMENT_NAME}' within the baseURL and replaces
|
||||
* them with 'azureOpenAIApiInstanceName' and 'azureOpenAIApiDeploymentName' from the azure object.
|
||||
* If the respective azure property is not provided, the placeholder is replaced with an empty string.
|
||||
*
|
||||
* @param params - The parameters object.
|
||||
* @param params.baseURL - The baseURL to inspect for replacement placeholders.
|
||||
* @param params.azureOptions - The azure options object containing the instance and deployment names.
|
||||
* @returns The complete baseURL with credentials injected for the Azure OpenAI API.
|
||||
*/
|
||||
export function constructAzureURL({
|
||||
baseURL,
|
||||
azureOptions,
|
||||
}: {
|
||||
baseURL: string;
|
||||
azureOptions?: AzureOptions;
|
||||
}): string {
|
||||
let finalURL = baseURL;
|
||||
|
||||
// Replace INSTANCE_NAME and DEPLOYMENT_NAME placeholders with actual values if available
|
||||
if (azureOptions) {
|
||||
finalURL = finalURL.replace('${INSTANCE_NAME}', azureOptions.azureOpenAIApiInstanceName ?? '');
|
||||
finalURL = finalURL.replace(
|
||||
'${DEPLOYMENT_NAME}',
|
||||
azureOptions.azureOpenAIApiDeploymentName ?? '',
|
||||
);
|
||||
}
|
||||
|
||||
return finalURL;
|
||||
}
|
||||
55
packages/api/src/utils/common.spec.ts
Normal file
55
packages/api/src/utils/common.spec.ts
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
/* eslint-disable @typescript-eslint/ban-ts-comment */
|
||||
import { isEnabled } from './common';
|
||||
|
||||
describe('isEnabled', () => {
|
||||
test('should return true when input is "true"', () => {
|
||||
expect(isEnabled('true')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true when input is "TRUE"', () => {
|
||||
expect(isEnabled('TRUE')).toBe(true);
|
||||
});
|
||||
|
||||
test('should return true when input is true', () => {
|
||||
expect(isEnabled(true)).toBe(true);
|
||||
});
|
||||
|
||||
test('should return false when input is "false"', () => {
|
||||
expect(isEnabled('false')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false when input is false', () => {
|
||||
expect(isEnabled(false)).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false when input is null', () => {
|
||||
expect(isEnabled(null)).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false when input is undefined', () => {
|
||||
expect(isEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false when input is an empty string', () => {
|
||||
expect(isEnabled('')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false when input is a whitespace string', () => {
|
||||
expect(isEnabled(' ')).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false when input is a number', () => {
|
||||
// @ts-expect-error
|
||||
expect(isEnabled(123)).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false when input is an object', () => {
|
||||
// @ts-expect-error
|
||||
expect(isEnabled({})).toBe(false);
|
||||
});
|
||||
|
||||
test('should return false when input is an array', () => {
|
||||
// @ts-expect-error
|
||||
expect(isEnabled([])).toBe(false);
|
||||
});
|
||||
});
|
||||
48
packages/api/src/utils/common.ts
Normal file
48
packages/api/src/utils/common.ts
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
/**
|
||||
* Checks if the given value is truthy by being either the boolean `true` or a string
|
||||
* that case-insensitively matches 'true'.
|
||||
*
|
||||
* @param value - The value to check.
|
||||
* @returns Returns `true` if the value is the boolean `true` or a case-insensitive
|
||||
* match for the string 'true', otherwise returns `false`.
|
||||
* @example
|
||||
*
|
||||
* isEnabled("True"); // returns true
|
||||
* isEnabled("TRUE"); // returns true
|
||||
* isEnabled(true); // returns true
|
||||
* isEnabled("false"); // returns false
|
||||
* isEnabled(false); // returns false
|
||||
* isEnabled(null); // returns false
|
||||
* isEnabled(); // returns false
|
||||
*/
|
||||
export function isEnabled(value?: string | boolean | null | undefined): boolean {
|
||||
if (typeof value === 'boolean') {
|
||||
return value;
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
return value.toLowerCase().trim() === 'true';
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the provided value is 'user_provided'.
|
||||
*
|
||||
* @param value - The value to check.
|
||||
* @returns - Returns true if the value is 'user_provided', otherwise false.
|
||||
*/
|
||||
export const isUserProvided = (value?: string): boolean => value === 'user_provided';
|
||||
|
||||
/**
|
||||
* @param values
|
||||
*/
|
||||
export function optionalChainWithEmptyCheck(
|
||||
...values: (string | number | undefined)[]
|
||||
): string | number | undefined {
|
||||
for (const value of values) {
|
||||
if (value !== undefined && value !== null && value !== '') {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return values[values.length - 1];
|
||||
}
|
||||
429
packages/api/src/utils/env.spec.ts
Normal file
429
packages/api/src/utils/env.spec.ts
Normal file
|
|
@ -0,0 +1,429 @@
|
|||
import { resolveHeaders } from './env';
|
||||
import type { TUser } from 'librechat-data-provider';
|
||||
|
||||
// Helper function to create test user objects
|
||||
function createTestUser(overrides: Partial<TUser> = {}): TUser {
|
||||
return {
|
||||
id: 'test-user-id',
|
||||
username: 'testuser',
|
||||
email: 'test@example.com',
|
||||
name: 'Test User',
|
||||
avatar: 'https://example.com/avatar.png',
|
||||
provider: 'email',
|
||||
role: 'user',
|
||||
createdAt: new Date('2021-01-01').toISOString(),
|
||||
updatedAt: new Date('2021-01-01').toISOString(),
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('resolveHeaders', () => {
|
||||
beforeEach(() => {
|
||||
// Set up test environment variables
|
||||
process.env.TEST_API_KEY = 'test-api-key-value';
|
||||
process.env.ANOTHER_SECRET = 'another-secret-value';
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Clean up environment variables
|
||||
delete process.env.TEST_API_KEY;
|
||||
delete process.env.ANOTHER_SECRET;
|
||||
});
|
||||
|
||||
it('should return empty object when headers is undefined', () => {
|
||||
const result = resolveHeaders(undefined);
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
|
||||
it('should return empty object when headers is null', () => {
|
||||
const result = resolveHeaders(null as unknown as Record<string, string> | undefined);
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
|
||||
it('should return empty object when headers is empty', () => {
|
||||
const result = resolveHeaders({});
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
|
||||
it('should process environment variables in headers', () => {
|
||||
const headers = {
|
||||
Authorization: '${TEST_API_KEY}',
|
||||
'X-Secret': '${ANOTHER_SECRET}',
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers);
|
||||
|
||||
expect(result).toEqual({
|
||||
Authorization: 'test-api-key-value',
|
||||
'X-Secret': 'another-secret-value',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should process user ID placeholder when user has id', () => {
|
||||
const user = { id: 'test-user-123' };
|
||||
const headers = {
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user);
|
||||
|
||||
expect(result).toEqual({
|
||||
'User-Id': 'test-user-123',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not process user ID placeholder when user is undefined', () => {
|
||||
const headers = {
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers);
|
||||
|
||||
expect(result).toEqual({
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not process user ID placeholder when user has no id', () => {
|
||||
const user = { id: '' };
|
||||
const headers = {
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user);
|
||||
|
||||
expect(result).toEqual({
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should process full user object placeholders', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
username: 'testuser',
|
||||
name: 'Test User',
|
||||
role: 'admin',
|
||||
});
|
||||
|
||||
const headers = {
|
||||
'User-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'User-Name': '{{LIBRECHAT_USER_NAME}}',
|
||||
'User-Username': '{{LIBRECHAT_USER_USERNAME}}',
|
||||
'User-Role': '{{LIBRECHAT_USER_ROLE}}',
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user);
|
||||
|
||||
expect(result).toEqual({
|
||||
'User-Email': 'test@example.com',
|
||||
'User-Name': 'Test User',
|
||||
'User-Username': 'testuser',
|
||||
'User-Role': 'admin',
|
||||
'User-Id': 'user-123',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle missing user fields gracefully', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
username: undefined, // explicitly set to undefined
|
||||
});
|
||||
|
||||
const headers = {
|
||||
'User-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'User-Username': '{{LIBRECHAT_USER_USERNAME}}',
|
||||
'Non-Existent': '{{LIBRECHAT_USER_NONEXISTENT}}',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user);
|
||||
|
||||
expect(result).toEqual({
|
||||
'User-Email': 'test@example.com',
|
||||
'User-Username': '', // Empty string for missing field
|
||||
'Non-Existent': '{{LIBRECHAT_USER_NONEXISTENT}}', // Unchanged for non-existent field
|
||||
});
|
||||
});
|
||||
|
||||
it('should process custom user variables', () => {
|
||||
const user = { id: 'user-123' };
|
||||
const customUserVars = {
|
||||
CUSTOM_TOKEN: 'user-specific-token',
|
||||
REGION: 'us-west-1',
|
||||
};
|
||||
|
||||
const headers = {
|
||||
Authorization: 'Bearer {{CUSTOM_TOKEN}}',
|
||||
'X-Region': '{{REGION}}',
|
||||
'X-System-Key': '${TEST_API_KEY}',
|
||||
'X-User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user, customUserVars);
|
||||
|
||||
expect(result).toEqual({
|
||||
Authorization: 'Bearer user-specific-token',
|
||||
'X-Region': 'us-west-1',
|
||||
'X-System-Key': 'test-api-key-value',
|
||||
'X-User-Id': 'user-123',
|
||||
});
|
||||
});
|
||||
|
||||
it('should prioritize custom user variables over user fields', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'user-email@example.com',
|
||||
});
|
||||
const customUserVars = {
|
||||
LIBRECHAT_USER_EMAIL: 'custom-email@example.com',
|
||||
};
|
||||
|
||||
const headers = {
|
||||
'Test-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user, customUserVars);
|
||||
|
||||
expect(result).toEqual({
|
||||
'Test-Email': 'custom-email@example.com',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle boolean user fields', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
// Note: TUser doesn't have these boolean fields, so we'll test with string fields
|
||||
role: 'admin',
|
||||
});
|
||||
|
||||
const headers = {
|
||||
'User-Role': '{{LIBRECHAT_USER_ROLE}}',
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user);
|
||||
|
||||
expect(result).toEqual({
|
||||
'User-Role': 'admin',
|
||||
'User-Id': 'user-123',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multiple occurrences of the same placeholder', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
});
|
||||
|
||||
const headers = {
|
||||
'Primary-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'Secondary-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'Backup-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user);
|
||||
|
||||
expect(result).toEqual({
|
||||
'Primary-Email': 'test@example.com',
|
||||
'Secondary-Email': 'test@example.com',
|
||||
'Backup-Email': 'test@example.com',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle mixed variable types in the same headers object', () => {
|
||||
const user = createTestUser({
|
||||
id: 'user-123',
|
||||
email: 'test@example.com',
|
||||
});
|
||||
const customUserVars = {
|
||||
CUSTOM_TOKEN: 'secret-token',
|
||||
};
|
||||
|
||||
const headers = {
|
||||
Authorization: 'Bearer {{CUSTOM_TOKEN}}',
|
||||
'X-User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'X-System-Key': '${TEST_API_KEY}',
|
||||
'X-User-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'Content-Type': 'application/json',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user, customUserVars);
|
||||
|
||||
expect(result).toEqual({
|
||||
Authorization: 'Bearer secret-token',
|
||||
'X-User-Id': 'user-123',
|
||||
'X-System-Key': 'test-api-key-value',
|
||||
'X-User-Email': 'test@example.com',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not modify the original headers object', () => {
|
||||
const originalHeaders = {
|
||||
Authorization: '${TEST_API_KEY}',
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
};
|
||||
const user = { id: 'user-123' };
|
||||
|
||||
const result = resolveHeaders(originalHeaders, user);
|
||||
|
||||
// Verify the result is processed
|
||||
expect(result).toEqual({
|
||||
Authorization: 'test-api-key-value',
|
||||
'User-Id': 'user-123',
|
||||
});
|
||||
|
||||
// Verify the original object is unchanged
|
||||
expect(originalHeaders).toEqual({
|
||||
Authorization: '${TEST_API_KEY}',
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle special characters in custom variable names', () => {
|
||||
const user = { id: 'user-123' };
|
||||
const customUserVars = {
|
||||
'CUSTOM-VAR': 'dash-value',
|
||||
CUSTOM_VAR: 'underscore-value',
|
||||
'CUSTOM.VAR': 'dot-value',
|
||||
};
|
||||
|
||||
const headers = {
|
||||
'Dash-Header': '{{CUSTOM-VAR}}',
|
||||
'Underscore-Header': '{{CUSTOM_VAR}}',
|
||||
'Dot-Header': '{{CUSTOM.VAR}}',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user, customUserVars);
|
||||
|
||||
expect(result).toEqual({
|
||||
'Dash-Header': 'dash-value',
|
||||
'Underscore-Header': 'underscore-value',
|
||||
'Dot-Header': 'dot-value',
|
||||
});
|
||||
});
|
||||
|
||||
// Additional comprehensive tests for all user field placeholders
|
||||
it('should replace all allowed user field placeholders', () => {
|
||||
const user = {
|
||||
id: 'abc',
|
||||
name: 'Test User',
|
||||
username: 'testuser',
|
||||
email: 'me@example.com',
|
||||
provider: 'google',
|
||||
role: 'admin',
|
||||
googleId: 'gid',
|
||||
facebookId: 'fbid',
|
||||
openidId: 'oid',
|
||||
samlId: 'sid',
|
||||
ldapId: 'lid',
|
||||
githubId: 'ghid',
|
||||
discordId: 'dcid',
|
||||
appleId: 'aid',
|
||||
emailVerified: true,
|
||||
twoFactorEnabled: false,
|
||||
termsAccepted: true,
|
||||
};
|
||||
|
||||
const headers = {
|
||||
'X-User-ID': '{{LIBRECHAT_USER_ID}}',
|
||||
'X-User-Name': '{{LIBRECHAT_USER_NAME}}',
|
||||
'X-User-Username': '{{LIBRECHAT_USER_USERNAME}}',
|
||||
'X-User-Email': '{{LIBRECHAT_USER_EMAIL}}',
|
||||
'X-User-Provider': '{{LIBRECHAT_USER_PROVIDER}}',
|
||||
'X-User-Role': '{{LIBRECHAT_USER_ROLE}}',
|
||||
'X-User-GoogleId': '{{LIBRECHAT_USER_GOOGLEID}}',
|
||||
'X-User-FacebookId': '{{LIBRECHAT_USER_FACEBOOKID}}',
|
||||
'X-User-OpenIdId': '{{LIBRECHAT_USER_OPENIDID}}',
|
||||
'X-User-SamlId': '{{LIBRECHAT_USER_SAMLID}}',
|
||||
'X-User-LdapId': '{{LIBRECHAT_USER_LDAPID}}',
|
||||
'X-User-GithubId': '{{LIBRECHAT_USER_GITHUBID}}',
|
||||
'X-User-DiscordId': '{{LIBRECHAT_USER_DISCORDID}}',
|
||||
'X-User-AppleId': '{{LIBRECHAT_USER_APPLEID}}',
|
||||
'X-User-EmailVerified': '{{LIBRECHAT_USER_EMAILVERIFIED}}',
|
||||
'X-User-TwoFactorEnabled': '{{LIBRECHAT_USER_TWOFACTORENABLED}}',
|
||||
'X-User-TermsAccepted': '{{LIBRECHAT_USER_TERMSACCEPTED}}',
|
||||
};
|
||||
|
||||
const result = resolveHeaders(headers, user);
|
||||
|
||||
expect(result['X-User-ID']).toBe('abc');
|
||||
expect(result['X-User-Name']).toBe('Test User');
|
||||
expect(result['X-User-Username']).toBe('testuser');
|
||||
expect(result['X-User-Email']).toBe('me@example.com');
|
||||
expect(result['X-User-Provider']).toBe('google');
|
||||
expect(result['X-User-Role']).toBe('admin');
|
||||
expect(result['X-User-GoogleId']).toBe('gid');
|
||||
expect(result['X-User-FacebookId']).toBe('fbid');
|
||||
expect(result['X-User-OpenIdId']).toBe('oid');
|
||||
expect(result['X-User-SamlId']).toBe('sid');
|
||||
expect(result['X-User-LdapId']).toBe('lid');
|
||||
expect(result['X-User-GithubId']).toBe('ghid');
|
||||
expect(result['X-User-DiscordId']).toBe('dcid');
|
||||
expect(result['X-User-AppleId']).toBe('aid');
|
||||
expect(result['X-User-EmailVerified']).toBe('true');
|
||||
expect(result['X-User-TwoFactorEnabled']).toBe('false');
|
||||
expect(result['X-User-TermsAccepted']).toBe('true');
|
||||
});
|
||||
|
||||
it('should handle multiple placeholders in one value', () => {
|
||||
const user = { id: 'abc', email: 'me@example.com' };
|
||||
const headers = {
|
||||
'X-Multi': 'User: {{LIBRECHAT_USER_ID}}, Env: ${TEST_API_KEY}, Custom: {{MY_CUSTOM}}',
|
||||
};
|
||||
const customVars = { MY_CUSTOM: 'custom-value' };
|
||||
const result = resolveHeaders(headers, user, customVars);
|
||||
expect(result['X-Multi']).toBe('User: abc, Env: test-api-key-value, Custom: custom-value');
|
||||
});
|
||||
|
||||
it('should leave unknown placeholders unchanged', () => {
|
||||
const user = { id: 'abc' };
|
||||
const headers = {
|
||||
'X-Unknown': '{{SOMETHING_NOT_RECOGNIZED}}',
|
||||
'X-Known': '{{LIBRECHAT_USER_ID}}',
|
||||
};
|
||||
const result = resolveHeaders(headers, user);
|
||||
expect(result['X-Unknown']).toBe('{{SOMETHING_NOT_RECOGNIZED}}');
|
||||
expect(result['X-Known']).toBe('abc');
|
||||
});
|
||||
|
||||
it('should handle a mix of all types', () => {
|
||||
const user = {
|
||||
id: 'abc',
|
||||
email: 'me@example.com',
|
||||
emailVerified: true,
|
||||
twoFactorEnabled: false,
|
||||
};
|
||||
const headers = {
|
||||
'X-User': '{{LIBRECHAT_USER_ID}}',
|
||||
'X-Env': '${TEST_API_KEY}',
|
||||
'X-Custom': '{{MY_CUSTOM}}',
|
||||
'X-Multi': 'ID: {{LIBRECHAT_USER_ID}}, ENV: ${TEST_API_KEY}, CUSTOM: {{MY_CUSTOM}}',
|
||||
'X-Unknown': '{{NOT_A_REAL_PLACEHOLDER}}',
|
||||
'X-Empty': '',
|
||||
'X-Boolean': '{{LIBRECHAT_USER_EMAILVERIFIED}}',
|
||||
};
|
||||
const customVars = { MY_CUSTOM: 'custom-value' };
|
||||
const result = resolveHeaders(headers, user, customVars);
|
||||
|
||||
expect(result['X-User']).toBe('abc');
|
||||
expect(result['X-Env']).toBe('test-api-key-value');
|
||||
expect(result['X-Custom']).toBe('custom-value');
|
||||
expect(result['X-Multi']).toBe('ID: abc, ENV: test-api-key-value, CUSTOM: custom-value');
|
||||
expect(result['X-Unknown']).toBe('{{NOT_A_REAL_PLACEHOLDER}}');
|
||||
expect(result['X-Empty']).toBe('');
|
||||
expect(result['X-Boolean']).toBe('true');
|
||||
});
|
||||
});
|
||||
170
packages/api/src/utils/env.ts
Normal file
170
packages/api/src/utils/env.ts
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
import { extractEnvVariable } from 'librechat-data-provider';
|
||||
import type { TUser, MCPOptions } from 'librechat-data-provider';
|
||||
|
||||
/**
|
||||
* List of allowed user fields that can be used in MCP environment variables.
|
||||
* These are non-sensitive string/boolean fields from the IUser interface.
|
||||
*/
|
||||
const ALLOWED_USER_FIELDS = [
|
||||
'id',
|
||||
'name',
|
||||
'username',
|
||||
'email',
|
||||
'provider',
|
||||
'role',
|
||||
'googleId',
|
||||
'facebookId',
|
||||
'openidId',
|
||||
'samlId',
|
||||
'ldapId',
|
||||
'githubId',
|
||||
'discordId',
|
||||
'appleId',
|
||||
'emailVerified',
|
||||
'twoFactorEnabled',
|
||||
'termsAccepted',
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* Processes a string value to replace user field placeholders
|
||||
* @param value - The string value to process
|
||||
* @param user - The user object
|
||||
* @returns The processed string with placeholders replaced
|
||||
*/
|
||||
function processUserPlaceholders(value: string, user?: TUser): string {
|
||||
if (!user || typeof value !== 'string') {
|
||||
return value;
|
||||
}
|
||||
|
||||
for (const field of ALLOWED_USER_FIELDS) {
|
||||
const placeholder = `{{LIBRECHAT_USER_${field.toUpperCase()}}}`;
|
||||
if (!value.includes(placeholder)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const fieldValue = user[field as keyof TUser];
|
||||
|
||||
// Skip replacement if field doesn't exist in user object
|
||||
if (!(field in user)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Special case for 'id' field: skip if undefined or empty
|
||||
if (field === 'id' && (fieldValue === undefined || fieldValue === '')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const replacementValue = fieldValue == null ? '' : String(fieldValue);
|
||||
value = value.replace(new RegExp(placeholder, 'g'), replacementValue);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a single string value by replacing various types of placeholders
|
||||
* @param originalValue - The original string value to process
|
||||
* @param customUserVars - Optional custom user variables to replace placeholders
|
||||
* @param user - Optional user object for replacing user field placeholders
|
||||
* @returns The processed string with all placeholders replaced
|
||||
*/
|
||||
function processSingleValue({
|
||||
originalValue,
|
||||
customUserVars,
|
||||
user,
|
||||
}: {
|
||||
originalValue: string;
|
||||
customUserVars?: Record<string, string>;
|
||||
user?: TUser;
|
||||
}): string {
|
||||
let value = originalValue;
|
||||
|
||||
// 1. Replace custom user variables
|
||||
if (customUserVars) {
|
||||
for (const [varName, varVal] of Object.entries(customUserVars)) {
|
||||
/** Escaped varName for use in regex to avoid issues with special characters */
|
||||
const escapedVarName = varName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
const placeholderRegex = new RegExp(`\\{\\{${escapedVarName}\\}\\}`, 'g');
|
||||
value = value.replace(placeholderRegex, varVal);
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Replace user field placeholders (e.g., {{LIBRECHAT_USER_EMAIL}}, {{LIBRECHAT_USER_ID}})
|
||||
value = processUserPlaceholders(value, user);
|
||||
|
||||
// 3. Replace system environment variables
|
||||
value = extractEnvVariable(value);
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively processes an object to replace environment variables in string values
|
||||
* @param obj - The object to process
|
||||
* @param user - The user object containing all user fields
|
||||
* @param customUserVars - vars that user set in settings
|
||||
* @returns - The processed object with environment variables replaced
|
||||
*/
|
||||
export function processMCPEnv(
|
||||
obj: Readonly<MCPOptions>,
|
||||
user?: TUser,
|
||||
customUserVars?: Record<string, string>,
|
||||
): MCPOptions {
|
||||
if (obj === null || obj === undefined) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
const newObj: MCPOptions = structuredClone(obj);
|
||||
|
||||
if ('env' in newObj && newObj.env) {
|
||||
const processedEnv: Record<string, string> = {};
|
||||
for (const [key, originalValue] of Object.entries(newObj.env)) {
|
||||
processedEnv[key] = processSingleValue({ originalValue, customUserVars, user });
|
||||
}
|
||||
newObj.env = processedEnv;
|
||||
}
|
||||
|
||||
// Process headers if they exist (for WebSocket, SSE, StreamableHTTP types)
|
||||
// Note: `env` and `headers` are on different branches of the MCPOptions union type.
|
||||
if ('headers' in newObj && newObj.headers) {
|
||||
const processedHeaders: Record<string, string> = {};
|
||||
for (const [key, originalValue] of Object.entries(newObj.headers)) {
|
||||
processedHeaders[key] = processSingleValue({ originalValue, customUserVars, user });
|
||||
}
|
||||
newObj.headers = processedHeaders;
|
||||
}
|
||||
|
||||
// Process URL if it exists (for WebSocket, SSE, StreamableHTTP types)
|
||||
if ('url' in newObj && newObj.url) {
|
||||
newObj.url = processSingleValue({ originalValue: newObj.url, customUserVars, user });
|
||||
}
|
||||
|
||||
return newObj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves header values by replacing user placeholders, custom variables, and environment variables
|
||||
* @param headers - The headers object to process
|
||||
* @param user - Optional user object for replacing user field placeholders (can be partial with just id)
|
||||
* @param customUserVars - Optional custom user variables to replace placeholders
|
||||
* @returns - The processed headers with all placeholders replaced
|
||||
*/
|
||||
export function resolveHeaders(
|
||||
headers: Record<string, string> | undefined,
|
||||
user?: Partial<TUser> | { id: string },
|
||||
customUserVars?: Record<string, string>,
|
||||
) {
|
||||
const resolvedHeaders = { ...(headers ?? {}) };
|
||||
|
||||
if (headers && typeof headers === 'object' && !Array.isArray(headers)) {
|
||||
Object.keys(headers).forEach((key) => {
|
||||
resolvedHeaders[key] = processSingleValue({
|
||||
originalValue: headers[key],
|
||||
customUserVars,
|
||||
user: user as TUser,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return resolvedHeaders;
|
||||
}
|
||||
26
packages/api/src/utils/events.ts
Normal file
26
packages/api/src/utils/events.ts
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import type { Response as ServerResponse } from 'express';
|
||||
import type { ServerSentEvent } from '~/types';
|
||||
|
||||
/**
|
||||
* Sends message data in Server Sent Events format.
|
||||
* @param res - The server response.
|
||||
* @param event - The message event.
|
||||
* @param event.event - The type of event.
|
||||
* @param event.data - The message to be sent.
|
||||
*/
|
||||
export function sendEvent(res: ServerResponse, event: ServerSentEvent): void {
|
||||
if (typeof event.data === 'string' && event.data.length === 0) {
|
||||
return;
|
||||
}
|
||||
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends error data in Server Sent Events format and ends the response.
|
||||
* @param res - The server response.
|
||||
* @param message - The error message.
|
||||
*/
|
||||
export function handleError(res: ServerResponse, message: string): void {
|
||||
res.write(`event: error\ndata: ${JSON.stringify(message)}\n\n`);
|
||||
res.end();
|
||||
}
|
||||
115
packages/api/src/utils/files.spec.ts
Normal file
115
packages/api/src/utils/files.spec.ts
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
import { sanitizeFilename } from './files';
|
||||
|
||||
jest.mock('node:crypto', () => {
|
||||
const actualModule = jest.requireActual('node:crypto');
|
||||
return {
|
||||
...actualModule,
|
||||
randomBytes: jest.fn().mockReturnValue(Buffer.from('abc123', 'hex')),
|
||||
};
|
||||
});
|
||||
|
||||
describe('sanitizeFilename', () => {
|
||||
test('removes directory components (1/2)', () => {
|
||||
expect(sanitizeFilename('/path/to/file.txt')).toBe('file.txt');
|
||||
});
|
||||
|
||||
test('removes directory components (2/2)', () => {
|
||||
expect(sanitizeFilename('../../../../file.txt')).toBe('file.txt');
|
||||
});
|
||||
|
||||
test('replaces non-alphanumeric characters', () => {
|
||||
expect(sanitizeFilename('file name@#$.txt')).toBe('file_name___.txt');
|
||||
});
|
||||
|
||||
test('preserves dots and hyphens', () => {
|
||||
expect(sanitizeFilename('file-name.with.dots.txt')).toBe('file-name.with.dots.txt');
|
||||
});
|
||||
|
||||
test('prepends underscore to filenames starting with a dot', () => {
|
||||
expect(sanitizeFilename('.hiddenfile')).toBe('_.hiddenfile');
|
||||
});
|
||||
|
||||
test('truncates long filenames', () => {
|
||||
const longName = 'a'.repeat(300) + '.txt';
|
||||
const result = sanitizeFilename(longName);
|
||||
expect(result.length).toBe(255);
|
||||
expect(result).toMatch(/^a+-abc123\.txt$/);
|
||||
});
|
||||
|
||||
test('handles filenames with no extension', () => {
|
||||
const longName = 'a'.repeat(300);
|
||||
const result = sanitizeFilename(longName);
|
||||
expect(result.length).toBe(255);
|
||||
expect(result).toMatch(/^a+-abc123$/);
|
||||
});
|
||||
|
||||
test('handles empty input', () => {
|
||||
expect(sanitizeFilename('')).toBe('_');
|
||||
});
|
||||
|
||||
test('handles input with only special characters', () => {
|
||||
expect(sanitizeFilename('@#$%^&*')).toBe('_______');
|
||||
});
|
||||
});
|
||||
|
||||
describe('sanitizeFilename with real crypto', () => {
|
||||
// Temporarily unmock crypto for these tests
|
||||
beforeAll(() => {
|
||||
jest.resetModules();
|
||||
jest.unmock('node:crypto');
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.resetModules();
|
||||
jest.mock('node:crypto', () => {
|
||||
const actualModule = jest.requireActual('node:crypto');
|
||||
return {
|
||||
...actualModule,
|
||||
randomBytes: jest.fn().mockReturnValue(Buffer.from('abc123', 'hex')),
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
test('truncates long filenames with real crypto', async () => {
|
||||
const { sanitizeFilename: realSanitizeFilename } = await import('./files');
|
||||
const longName = 'b'.repeat(300) + '.pdf';
|
||||
const result = realSanitizeFilename(longName);
|
||||
|
||||
expect(result.length).toBe(255);
|
||||
expect(result).toMatch(/^b+-[a-f0-9]{6}\.pdf$/);
|
||||
expect(result.endsWith('.pdf')).toBe(true);
|
||||
});
|
||||
|
||||
test('handles filenames with no extension with real crypto', async () => {
|
||||
const { sanitizeFilename: realSanitizeFilename } = await import('./files');
|
||||
const longName = 'c'.repeat(300);
|
||||
const result = realSanitizeFilename(longName);
|
||||
|
||||
expect(result.length).toBe(255);
|
||||
expect(result).toMatch(/^c+-[a-f0-9]{6}$/);
|
||||
expect(result).not.toContain('.');
|
||||
});
|
||||
|
||||
test('generates unique suffixes for identical long filenames', async () => {
|
||||
const { sanitizeFilename: realSanitizeFilename } = await import('./files');
|
||||
const longName = 'd'.repeat(300) + '.doc';
|
||||
const result1 = realSanitizeFilename(longName);
|
||||
const result2 = realSanitizeFilename(longName);
|
||||
|
||||
expect(result1.length).toBe(255);
|
||||
expect(result2.length).toBe(255);
|
||||
expect(result1).not.toBe(result2); // Should be different due to random suffix
|
||||
expect(result1.endsWith('.doc')).toBe(true);
|
||||
expect(result2.endsWith('.doc')).toBe(true);
|
||||
});
|
||||
|
||||
test('real crypto produces valid hex strings', async () => {
|
||||
const { sanitizeFilename: realSanitizeFilename } = await import('./files');
|
||||
const longName = 'test'.repeat(100) + '.txt';
|
||||
const result = realSanitizeFilename(longName);
|
||||
|
||||
const hexMatch = result.match(/-([a-f0-9]{6})\.txt$/);
|
||||
expect(hexMatch).toBeTruthy();
|
||||
expect(hexMatch![1]).toMatch(/^[a-f0-9]{6}$/);
|
||||
});
|
||||
});
|
||||
33
packages/api/src/utils/files.ts
Normal file
33
packages/api/src/utils/files.ts
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
import path from 'path';
|
||||
import crypto from 'node:crypto';
|
||||
|
||||
/**
|
||||
* Sanitize a filename by removing any directory components, replacing non-alphanumeric characters
|
||||
* @param inputName
|
||||
*/
|
||||
export function sanitizeFilename(inputName: string): string {
|
||||
// Remove any directory components
|
||||
let name = path.basename(inputName);
|
||||
|
||||
// Replace any non-alphanumeric characters except for '.' and '-'
|
||||
name = name.replace(/[^a-zA-Z0-9.-]/g, '_');
|
||||
|
||||
// Ensure the name doesn't start with a dot (hidden file in Unix-like systems)
|
||||
if (name.startsWith('.') || name === '') {
|
||||
name = '_' + name;
|
||||
}
|
||||
|
||||
// Limit the length of the filename
|
||||
const MAX_LENGTH = 255;
|
||||
if (name.length > MAX_LENGTH) {
|
||||
const ext = path.extname(name);
|
||||
const nameWithoutExt = path.basename(name, ext);
|
||||
name =
|
||||
nameWithoutExt.slice(0, MAX_LENGTH - ext.length - 7) +
|
||||
'-' +
|
||||
crypto.randomBytes(3).toString('hex') +
|
||||
ext;
|
||||
}
|
||||
|
||||
return name;
|
||||
}
|
||||
75
packages/api/src/utils/generators.ts
Normal file
75
packages/api/src/utils/generators.ts
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
import fetch from 'node-fetch';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { GraphEvents, sleep } from '@librechat/agents';
|
||||
import type { Response as ServerResponse } from 'express';
|
||||
import type { ServerSentEvent } from '~/types';
|
||||
import { sendEvent } from './events';
|
||||
|
||||
/**
|
||||
* Makes a function to make HTTP request and logs the process.
|
||||
* @param params
|
||||
* @param params.directEndpoint - Whether to use a direct endpoint.
|
||||
* @param params.reverseProxyUrl - The reverse proxy URL to use for the request.
|
||||
* @returns A promise that resolves to the response of the fetch request.
|
||||
*/
|
||||
export function createFetch({
|
||||
directEndpoint = false,
|
||||
reverseProxyUrl = '',
|
||||
}: {
|
||||
directEndpoint?: boolean;
|
||||
reverseProxyUrl?: string;
|
||||
}) {
|
||||
/**
|
||||
* Makes an HTTP request and logs the process.
|
||||
* @param url - The URL to make the request to. Can be a string or a Request object.
|
||||
* @param init - Optional init options for the request.
|
||||
* @returns A promise that resolves to the response of the fetch request.
|
||||
*/
|
||||
return async function (
|
||||
_url: fetch.RequestInfo,
|
||||
init: fetch.RequestInit,
|
||||
): Promise<fetch.Response> {
|
||||
let url = _url;
|
||||
if (directEndpoint) {
|
||||
url = reverseProxyUrl;
|
||||
}
|
||||
logger.debug(`Making request to ${url}`);
|
||||
if (typeof Bun !== 'undefined') {
|
||||
return await fetch(url, init);
|
||||
}
|
||||
return await fetch(url, init);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates event handlers for stream events that don't capture client references
|
||||
* @param res - The response object to send events to
|
||||
* @returns Object containing handler functions
|
||||
*/
|
||||
export function createStreamEventHandlers(res: ServerResponse) {
|
||||
return {
|
||||
[GraphEvents.ON_RUN_STEP]: function (event: ServerSentEvent) {
|
||||
if (res) {
|
||||
sendEvent(res, event);
|
||||
}
|
||||
},
|
||||
[GraphEvents.ON_MESSAGE_DELTA]: function (event: ServerSentEvent) {
|
||||
if (res) {
|
||||
sendEvent(res, event);
|
||||
}
|
||||
},
|
||||
[GraphEvents.ON_REASONING_DELTA]: function (event: ServerSentEvent) {
|
||||
if (res) {
|
||||
sendEvent(res, event);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function createHandleLLMNewToken(streamRate: number) {
|
||||
return async function () {
|
||||
if (streamRate) {
|
||||
await sleep(streamRate);
|
||||
}
|
||||
};
|
||||
}
|
||||
14
packages/api/src/utils/index.ts
Normal file
14
packages/api/src/utils/index.ts
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
export * from './axios';
|
||||
export * from './azure';
|
||||
export * from './common';
|
||||
export * from './env';
|
||||
export * from './events';
|
||||
export * from './files';
|
||||
export * from './generators';
|
||||
export * from './key';
|
||||
export * from './llm';
|
||||
export * from './math';
|
||||
export * from './openid';
|
||||
export * from './tempChatRetention';
|
||||
export { default as Tokenizer } from './tokenizer';
|
||||
export * from './yaml';
|
||||
70
packages/api/src/utils/key.ts
Normal file
70
packages/api/src/utils/key.ts
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import axios from 'axios';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
|
||||
export interface GoogleServiceKey {
|
||||
type?: string;
|
||||
project_id?: string;
|
||||
private_key_id?: string;
|
||||
private_key?: string;
|
||||
client_email?: string;
|
||||
client_id?: string;
|
||||
auth_uri?: string;
|
||||
token_uri?: string;
|
||||
auth_provider_x509_cert_url?: string;
|
||||
client_x509_cert_url?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load Google service key from file path or URL
|
||||
* @param keyPath - The path or URL to the service key file
|
||||
* @returns The parsed service key object or null if failed
|
||||
*/
|
||||
export async function loadServiceKey(keyPath: string): Promise<GoogleServiceKey | null> {
|
||||
if (!keyPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let serviceKey: unknown;
|
||||
|
||||
// Check if it's a URL
|
||||
if (/^https?:\/\//.test(keyPath)) {
|
||||
try {
|
||||
const response = await axios.get(keyPath);
|
||||
serviceKey = response.data;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to fetch the service key from URL: ${keyPath}`, error);
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
// It's a file path
|
||||
try {
|
||||
const absolutePath = path.isAbsolute(keyPath) ? keyPath : path.resolve(keyPath);
|
||||
const fileContent = fs.readFileSync(absolutePath, 'utf8');
|
||||
serviceKey = JSON.parse(fileContent);
|
||||
} catch (error) {
|
||||
logger.error(`Failed to load service key from file: ${keyPath}`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// If the response is a string (e.g., from a URL that returns JSON as text), parse it
|
||||
if (typeof serviceKey === 'string') {
|
||||
try {
|
||||
serviceKey = JSON.parse(serviceKey);
|
||||
} catch (parseError) {
|
||||
logger.error(`Failed to parse service key JSON from ${keyPath}`, parseError);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate the service key has required fields
|
||||
if (!serviceKey || typeof serviceKey !== 'object') {
|
||||
logger.error(`Invalid service key format from ${keyPath}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return serviceKey as GoogleServiceKey;
|
||||
}
|
||||
189
packages/api/src/utils/llm.test.ts
Normal file
189
packages/api/src/utils/llm.test.ts
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
import { extractLibreChatParams } from './llm';
|
||||
|
||||
describe('extractLibreChatParams', () => {
|
||||
it('should return defaults when options is undefined', () => {
|
||||
const result = extractLibreChatParams(undefined);
|
||||
|
||||
expect(result.resendFiles).toBe(true);
|
||||
expect(result.promptPrefix).toBeUndefined();
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeUndefined();
|
||||
expect(result.modelOptions).toEqual({});
|
||||
});
|
||||
|
||||
it('should return defaults when options is null', () => {
|
||||
const result = extractLibreChatParams();
|
||||
|
||||
expect(result.resendFiles).toBe(true);
|
||||
expect(result.promptPrefix).toBeUndefined();
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeUndefined();
|
||||
expect(result.modelOptions).toEqual({});
|
||||
});
|
||||
|
||||
it('should extract all LibreChat params and leave model options', () => {
|
||||
const options = {
|
||||
resendFiles: false,
|
||||
promptPrefix: 'You are a helpful assistant',
|
||||
maxContextTokens: 4096,
|
||||
modelLabel: 'GPT-4',
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
max_tokens: 1000,
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
expect(result.resendFiles).toBe(false);
|
||||
expect(result.promptPrefix).toBe('You are a helpful assistant');
|
||||
expect(result.maxContextTokens).toBe(4096);
|
||||
expect(result.modelLabel).toBe('GPT-4');
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
max_tokens: 1000,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle null values for LibreChat params', () => {
|
||||
const options = {
|
||||
resendFiles: true,
|
||||
promptPrefix: null,
|
||||
maxContextTokens: 2048,
|
||||
modelLabel: null,
|
||||
model: 'claude-3',
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
expect(result.resendFiles).toBe(true);
|
||||
expect(result.promptPrefix).toBeNull();
|
||||
expect(result.maxContextTokens).toBe(2048);
|
||||
expect(result.modelLabel).toBeNull();
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'claude-3',
|
||||
});
|
||||
});
|
||||
|
||||
it('should use default for resendFiles when not provided', () => {
|
||||
const options = {
|
||||
promptPrefix: 'Test prefix',
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0.5,
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
expect(result.resendFiles).toBe(true); // Should use default
|
||||
expect(result.promptPrefix).toBe('Test prefix');
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeUndefined();
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0.5,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle empty options object', () => {
|
||||
const result = extractLibreChatParams({});
|
||||
|
||||
expect(result.resendFiles).toBe(true); // Should use default
|
||||
expect(result.promptPrefix).toBeUndefined();
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeUndefined();
|
||||
expect(result.modelOptions).toEqual({});
|
||||
});
|
||||
|
||||
it('should only extract known LibreChat params', () => {
|
||||
const options = {
|
||||
resendFiles: false,
|
||||
promptPrefix: 'Custom prompt',
|
||||
maxContextTokens: 8192,
|
||||
modelLabel: 'Custom Model',
|
||||
// Model options
|
||||
model: 'gpt-4',
|
||||
temperature: 0.9,
|
||||
top_p: 0.95,
|
||||
frequency_penalty: 0.5,
|
||||
presence_penalty: 0.5,
|
||||
// Unknown params should stay in modelOptions
|
||||
unknownParam: 'should remain',
|
||||
customSetting: 123,
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
// LibreChat params extracted
|
||||
expect(result.resendFiles).toBe(false);
|
||||
expect(result.promptPrefix).toBe('Custom prompt');
|
||||
expect(result.maxContextTokens).toBe(8192);
|
||||
expect(result.modelLabel).toBe('Custom Model');
|
||||
|
||||
// Model options should include everything else
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'gpt-4',
|
||||
temperature: 0.9,
|
||||
top_p: 0.95,
|
||||
frequency_penalty: 0.5,
|
||||
presence_penalty: 0.5,
|
||||
unknownParam: 'should remain',
|
||||
customSetting: 123,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not mutate the original options object', () => {
|
||||
const options = {
|
||||
resendFiles: false,
|
||||
promptPrefix: 'Test',
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
};
|
||||
const originalOptions = { ...options };
|
||||
|
||||
extractLibreChatParams(options);
|
||||
|
||||
// Original object should remain unchanged
|
||||
expect(options).toEqual(originalOptions);
|
||||
});
|
||||
|
||||
it('should handle undefined values for optional LibreChat params', () => {
|
||||
const options = {
|
||||
resendFiles: false,
|
||||
promptPrefix: undefined,
|
||||
maxContextTokens: undefined,
|
||||
modelLabel: undefined,
|
||||
model: 'claude-2',
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
expect(result.resendFiles).toBe(false);
|
||||
expect(result.promptPrefix).toBeUndefined();
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeUndefined();
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'claude-2',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle mixed null and undefined values', () => {
|
||||
const options = {
|
||||
promptPrefix: null,
|
||||
maxContextTokens: undefined,
|
||||
modelLabel: null,
|
||||
model: 'gpt-3.5-turbo',
|
||||
stop: ['\\n', '\\n\\n'],
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
expect(result.resendFiles).toBe(true); // default
|
||||
expect(result.promptPrefix).toBeNull();
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeNull();
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'gpt-3.5-turbo',
|
||||
stop: ['\\n', '\\n\\n'],
|
||||
});
|
||||
});
|
||||
});
|
||||
47
packages/api/src/utils/llm.ts
Normal file
47
packages/api/src/utils/llm.ts
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import { librechat } from 'librechat-data-provider';
|
||||
import type { DynamicSettingProps } from 'librechat-data-provider';
|
||||
|
||||
type LibreChatKeys = keyof typeof librechat;
|
||||
|
||||
type LibreChatParams = {
|
||||
modelOptions: Omit<NonNullable<DynamicSettingProps['conversation']>, LibreChatKeys>;
|
||||
resendFiles: boolean;
|
||||
promptPrefix?: string | null;
|
||||
maxContextTokens?: number;
|
||||
modelLabel?: string | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Separates LibreChat-specific parameters from model options
|
||||
* @param options - The combined options object
|
||||
*/
|
||||
export function extractLibreChatParams(
|
||||
options?: DynamicSettingProps['conversation'],
|
||||
): LibreChatParams {
|
||||
if (!options) {
|
||||
return {
|
||||
modelOptions: {} as Omit<NonNullable<DynamicSettingProps['conversation']>, LibreChatKeys>,
|
||||
resendFiles: librechat.resendFiles.default as boolean,
|
||||
};
|
||||
}
|
||||
|
||||
const modelOptions = { ...options };
|
||||
|
||||
const resendFiles =
|
||||
(delete modelOptions.resendFiles, options.resendFiles) ??
|
||||
(librechat.resendFiles.default as boolean);
|
||||
const promptPrefix = (delete modelOptions.promptPrefix, options.promptPrefix);
|
||||
const maxContextTokens = (delete modelOptions.maxContextTokens, options.maxContextTokens);
|
||||
const modelLabel = (delete modelOptions.modelLabel, options.modelLabel);
|
||||
|
||||
return {
|
||||
modelOptions: modelOptions as Omit<
|
||||
NonNullable<DynamicSettingProps['conversation']>,
|
||||
LibreChatKeys
|
||||
>,
|
||||
maxContextTokens,
|
||||
promptPrefix,
|
||||
resendFiles,
|
||||
modelLabel,
|
||||
};
|
||||
}
|
||||
45
packages/api/src/utils/math.ts
Normal file
45
packages/api/src/utils/math.ts
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
/**
|
||||
* Evaluates a mathematical expression provided as a string and returns the result.
|
||||
*
|
||||
* If the input is already a number, it returns the number as is.
|
||||
* If the input is not a string or contains invalid characters, an error is thrown.
|
||||
* If the evaluated result is not a number, an error is thrown.
|
||||
*
|
||||
* @param str - The mathematical expression to evaluate, or a number.
|
||||
* @param fallbackValue - The default value to return if the input is not a string or number, or if the evaluated result is not a number.
|
||||
*
|
||||
* @returns The result of the evaluated expression or the input number.
|
||||
*
|
||||
* @throws Throws an error if the input is not a string or number, contains invalid characters, or does not evaluate to a number.
|
||||
*/
|
||||
export function math(str: string | number, fallbackValue?: number): number {
|
||||
const fallback = typeof fallbackValue !== 'undefined' && typeof fallbackValue === 'number';
|
||||
if (typeof str !== 'string' && typeof str === 'number') {
|
||||
return str;
|
||||
} else if (typeof str !== 'string') {
|
||||
if (fallback) {
|
||||
return fallbackValue;
|
||||
}
|
||||
throw new Error(`str is ${typeof str}, but should be a string`);
|
||||
}
|
||||
|
||||
const validStr = /^[+\-\d.\s*/%()]+$/.test(str);
|
||||
|
||||
if (!validStr) {
|
||||
if (fallback) {
|
||||
return fallbackValue;
|
||||
}
|
||||
throw new Error('Invalid characters in string');
|
||||
}
|
||||
|
||||
const value = eval(str);
|
||||
|
||||
if (typeof value !== 'number') {
|
||||
if (fallback) {
|
||||
return fallbackValue;
|
||||
}
|
||||
throw new Error(`[math] str did not evaluate to a number but to a ${typeof value}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
51
packages/api/src/utils/openid.ts
Normal file
51
packages/api/src/utils/openid.ts
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
/**
|
||||
* Helper function to safely log sensitive data when debug mode is enabled
|
||||
* @param obj - Object to stringify
|
||||
* @param maxLength - Maximum length of the stringified output
|
||||
* @returns Stringified object with sensitive data masked
|
||||
*/
|
||||
export function safeStringify(obj: unknown, maxLength = 1000): string {
|
||||
try {
|
||||
const str = JSON.stringify(obj, (key, value) => {
|
||||
// Mask sensitive values
|
||||
if (
|
||||
key === 'client_secret' ||
|
||||
key === 'Authorization' ||
|
||||
key.toLowerCase().includes('token') ||
|
||||
key.toLowerCase().includes('password')
|
||||
) {
|
||||
return typeof value === 'string' && value.length > 6
|
||||
? `${value.substring(0, 3)}...${value.substring(value.length - 3)}`
|
||||
: '***MASKED***';
|
||||
}
|
||||
return value;
|
||||
});
|
||||
|
||||
if (str && str.length > maxLength) {
|
||||
return `${str.substring(0, maxLength)}... (truncated)`;
|
||||
}
|
||||
return str;
|
||||
} catch (error) {
|
||||
return `[Error stringifying object: ${(error as Error).message}]`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to log headers without revealing sensitive information
|
||||
* @param headers - Headers object to log
|
||||
* @returns Stringified headers with sensitive data masked
|
||||
*/
|
||||
export function logHeaders(headers: Headers | undefined | null): string {
|
||||
const headerObj: Record<string, string> = {};
|
||||
if (!headers || typeof headers.entries !== 'function') {
|
||||
return 'No headers available';
|
||||
}
|
||||
for (const [key, value] of headers.entries()) {
|
||||
if (key.toLowerCase() === 'authorization' || key.toLowerCase().includes('secret')) {
|
||||
headerObj[key] = '***MASKED***';
|
||||
} else {
|
||||
headerObj[key] = value;
|
||||
}
|
||||
}
|
||||
return safeStringify(headerObj);
|
||||
}
|
||||
133
packages/api/src/utils/tempChatRetention.spec.ts
Normal file
133
packages/api/src/utils/tempChatRetention.spec.ts
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
import {
|
||||
MIN_RETENTION_HOURS,
|
||||
MAX_RETENTION_HOURS,
|
||||
DEFAULT_RETENTION_HOURS,
|
||||
getTempChatRetentionHours,
|
||||
createTempChatExpirationDate,
|
||||
} from './tempChatRetention';
|
||||
import type { TCustomConfig } from 'librechat-data-provider';
|
||||
|
||||
describe('tempChatRetention', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
process.env = { ...originalEnv };
|
||||
delete process.env.TEMP_CHAT_RETENTION_HOURS;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
describe('getTempChatRetentionHours', () => {
|
||||
it('should return default retention hours when no config or env var is set', () => {
|
||||
const result = getTempChatRetentionHours();
|
||||
expect(result).toBe(DEFAULT_RETENTION_HOURS);
|
||||
});
|
||||
|
||||
it('should use environment variable when set', () => {
|
||||
process.env.TEMP_CHAT_RETENTION_HOURS = '48';
|
||||
const result = getTempChatRetentionHours();
|
||||
expect(result).toBe(48);
|
||||
});
|
||||
|
||||
it('should use config value when set', () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
interface: {
|
||||
temporaryChatRetention: 12,
|
||||
},
|
||||
};
|
||||
const result = getTempChatRetentionHours(config);
|
||||
expect(result).toBe(12);
|
||||
});
|
||||
|
||||
it('should prioritize config over environment variable', () => {
|
||||
process.env.TEMP_CHAT_RETENTION_HOURS = '48';
|
||||
const config: Partial<TCustomConfig> = {
|
||||
interface: {
|
||||
temporaryChatRetention: 12,
|
||||
},
|
||||
};
|
||||
const result = getTempChatRetentionHours(config);
|
||||
expect(result).toBe(12);
|
||||
});
|
||||
|
||||
it('should enforce minimum retention period', () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
interface: {
|
||||
temporaryChatRetention: 0,
|
||||
},
|
||||
};
|
||||
const result = getTempChatRetentionHours(config);
|
||||
expect(result).toBe(MIN_RETENTION_HOURS);
|
||||
});
|
||||
|
||||
it('should enforce maximum retention period', () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
interface: {
|
||||
temporaryChatRetention: 10000,
|
||||
},
|
||||
};
|
||||
const result = getTempChatRetentionHours(config);
|
||||
expect(result).toBe(MAX_RETENTION_HOURS);
|
||||
});
|
||||
|
||||
it('should handle invalid environment variable', () => {
|
||||
process.env.TEMP_CHAT_RETENTION_HOURS = 'invalid';
|
||||
const result = getTempChatRetentionHours();
|
||||
expect(result).toBe(DEFAULT_RETENTION_HOURS);
|
||||
});
|
||||
|
||||
it('should handle invalid config value', () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
interface: {
|
||||
temporaryChatRetention: 'invalid' as unknown as number,
|
||||
},
|
||||
};
|
||||
const result = getTempChatRetentionHours(config);
|
||||
expect(result).toBe(DEFAULT_RETENTION_HOURS);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createTempChatExpirationDate', () => {
|
||||
it('should create expiration date with default retention period', () => {
|
||||
const result = createTempChatExpirationDate();
|
||||
|
||||
const expectedDate = new Date();
|
||||
expectedDate.setHours(expectedDate.getHours() + DEFAULT_RETENTION_HOURS);
|
||||
|
||||
// Allow for small time differences in test execution
|
||||
const timeDiff = Math.abs(result.getTime() - expectedDate.getTime());
|
||||
expect(timeDiff).toBeLessThan(1000); // Less than 1 second difference
|
||||
});
|
||||
|
||||
it('should create expiration date with custom retention period', () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
interface: {
|
||||
temporaryChatRetention: 12,
|
||||
},
|
||||
};
|
||||
|
||||
const result = createTempChatExpirationDate(config);
|
||||
|
||||
const expectedDate = new Date();
|
||||
expectedDate.setHours(expectedDate.getHours() + 12);
|
||||
|
||||
// Allow for small time differences in test execution
|
||||
const timeDiff = Math.abs(result.getTime() - expectedDate.getTime());
|
||||
expect(timeDiff).toBeLessThan(1000); // Less than 1 second difference
|
||||
});
|
||||
|
||||
it('should return a Date object', () => {
|
||||
const result = createTempChatExpirationDate();
|
||||
expect(result).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it('should return a future date', () => {
|
||||
const now = new Date();
|
||||
const result = createTempChatExpirationDate();
|
||||
expect(result.getTime()).toBeGreaterThan(now.getTime());
|
||||
});
|
||||
});
|
||||
});
|
||||
77
packages/api/src/utils/tempChatRetention.ts
Normal file
77
packages/api/src/utils/tempChatRetention.ts
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import type { TCustomConfig } from 'librechat-data-provider';
|
||||
|
||||
/**
|
||||
* Default retention period for temporary chats in hours
|
||||
*/
|
||||
export const DEFAULT_RETENTION_HOURS = 24 * 30; // 30 days
|
||||
|
||||
/**
|
||||
* Minimum allowed retention period in hours
|
||||
*/
|
||||
export const MIN_RETENTION_HOURS = 1;
|
||||
|
||||
/**
|
||||
* Maximum allowed retention period in hours (1 year = 8760 hours)
|
||||
*/
|
||||
export const MAX_RETENTION_HOURS = 8760;
|
||||
|
||||
/**
|
||||
* Gets the temporary chat retention period from environment variables or config
|
||||
* @param config - The custom configuration object
|
||||
* @returns The retention period in hours
|
||||
*/
|
||||
export function getTempChatRetentionHours(config?: Partial<TCustomConfig> | null): number {
|
||||
let retentionHours = DEFAULT_RETENTION_HOURS;
|
||||
|
||||
// Check environment variable first
|
||||
if (process.env.TEMP_CHAT_RETENTION_HOURS) {
|
||||
const envValue = parseInt(process.env.TEMP_CHAT_RETENTION_HOURS, 10);
|
||||
if (!isNaN(envValue)) {
|
||||
retentionHours = envValue;
|
||||
} else {
|
||||
logger.warn(
|
||||
`Invalid TEMP_CHAT_RETENTION_HOURS environment variable: ${process.env.TEMP_CHAT_RETENTION_HOURS}. Using default: ${DEFAULT_RETENTION_HOURS} hours.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check config file (takes precedence over environment variable)
|
||||
if (config?.interface?.temporaryChatRetention !== undefined) {
|
||||
const configValue = config.interface.temporaryChatRetention;
|
||||
if (typeof configValue === 'number' && !isNaN(configValue)) {
|
||||
retentionHours = configValue;
|
||||
} else {
|
||||
logger.warn(
|
||||
`Invalid temporaryChatRetention in config: ${configValue}. Using ${retentionHours} hours.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate the retention period
|
||||
if (retentionHours < MIN_RETENTION_HOURS) {
|
||||
logger.warn(
|
||||
`Temporary chat retention period ${retentionHours} is below minimum ${MIN_RETENTION_HOURS} hours. Using minimum value.`,
|
||||
);
|
||||
retentionHours = MIN_RETENTION_HOURS;
|
||||
} else if (retentionHours > MAX_RETENTION_HOURS) {
|
||||
logger.warn(
|
||||
`Temporary chat retention period ${retentionHours} exceeds maximum ${MAX_RETENTION_HOURS} hours. Using maximum value.`,
|
||||
);
|
||||
retentionHours = MAX_RETENTION_HOURS;
|
||||
}
|
||||
|
||||
return retentionHours;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an expiration date for temporary chats
|
||||
* @param config - The custom configuration object
|
||||
* @returns The expiration date
|
||||
*/
|
||||
export function createTempChatExpirationDate(config?: Partial<TCustomConfig>): Date {
|
||||
const retentionHours = getTempChatRetentionHours(config);
|
||||
const expiredAt = new Date();
|
||||
expiredAt.setHours(expiredAt.getHours() + retentionHours);
|
||||
return expiredAt;
|
||||
}
|
||||
143
packages/api/src/utils/tokenizer.spec.ts
Normal file
143
packages/api/src/utils/tokenizer.spec.ts
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
/**
|
||||
* @file Tokenizer.spec.cjs
|
||||
*
|
||||
* Tests the real TokenizerSingleton (no mocking of `tiktoken`).
|
||||
* Make sure to install `tiktoken` and have it configured properly.
|
||||
*/
|
||||
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import type { Tiktoken } from 'tiktoken';
|
||||
import Tokenizer from './tokenizer';
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('Tokenizer', () => {
|
||||
it('should be a singleton (same instance)', async () => {
|
||||
const AnotherTokenizer = await import('./tokenizer'); // same path
|
||||
expect(Tokenizer).toBe(AnotherTokenizer.default);
|
||||
});
|
||||
|
||||
describe('getTokenizer', () => {
|
||||
it('should create an encoder for an explicit model name (e.g., "gpt-4")', () => {
|
||||
// The real `encoding_for_model` will be called internally
|
||||
// as soon as we pass isModelName = true.
|
||||
const tokenizer = Tokenizer.getTokenizer('gpt-4', true);
|
||||
|
||||
// Basic sanity checks
|
||||
expect(tokenizer).toBeDefined();
|
||||
// You can optionally check certain properties from `tiktoken` if they exist
|
||||
// e.g., expect(typeof tokenizer.encode).toBe('function');
|
||||
});
|
||||
|
||||
it('should create an encoder for a known encoding (e.g., "cl100k_base")', () => {
|
||||
// The real `get_encoding` will be called internally
|
||||
// as soon as we pass isModelName = false.
|
||||
const tokenizer = Tokenizer.getTokenizer('cl100k_base', false);
|
||||
|
||||
expect(tokenizer).toBeDefined();
|
||||
// e.g., expect(typeof tokenizer.encode).toBe('function');
|
||||
});
|
||||
|
||||
it('should return cached tokenizer if previously fetched', () => {
|
||||
const tokenizer1 = Tokenizer.getTokenizer('cl100k_base', false);
|
||||
const tokenizer2 = Tokenizer.getTokenizer('cl100k_base', false);
|
||||
// Should be the exact same instance from the cache
|
||||
expect(tokenizer1).toBe(tokenizer2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('freeAndResetAllEncoders', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should free all encoders and reset tokenizerCallsCount to 1', () => {
|
||||
// By creating two different encodings, we populate the cache
|
||||
Tokenizer.getTokenizer('cl100k_base', false);
|
||||
Tokenizer.getTokenizer('r50k_base', false);
|
||||
|
||||
// Now free them
|
||||
Tokenizer.freeAndResetAllEncoders();
|
||||
|
||||
// The internal cache is cleared
|
||||
expect(Tokenizer.tokenizersCache['cl100k_base']).toBeUndefined();
|
||||
expect(Tokenizer.tokenizersCache['r50k_base']).toBeUndefined();
|
||||
|
||||
// tokenizerCallsCount is reset to 1
|
||||
expect(Tokenizer.tokenizerCallsCount).toBe(1);
|
||||
});
|
||||
|
||||
it('should catch and log errors if freeing fails', () => {
|
||||
// Mock logger.error before the test
|
||||
const mockLoggerError = jest.spyOn(logger, 'error');
|
||||
|
||||
// Set up a problematic tokenizer in the cache
|
||||
Tokenizer.tokenizersCache['cl100k_base'] = {
|
||||
free() {
|
||||
throw new Error('Intentional free error');
|
||||
},
|
||||
} as unknown as Tiktoken;
|
||||
|
||||
// Should not throw uncaught errors
|
||||
Tokenizer.freeAndResetAllEncoders();
|
||||
|
||||
// Verify logger.error was called with correct arguments
|
||||
expect(mockLoggerError).toHaveBeenCalledWith(
|
||||
'[Tokenizer] Free and reset encoders error',
|
||||
expect.any(Error),
|
||||
);
|
||||
|
||||
// Clean up
|
||||
mockLoggerError.mockRestore();
|
||||
Tokenizer.tokenizersCache = {};
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTokenCount', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
Tokenizer.freeAndResetAllEncoders();
|
||||
});
|
||||
|
||||
it('should return the number of tokens in the given text', () => {
|
||||
const text = 'Hello, world!';
|
||||
const count = Tokenizer.getTokenCount(text, 'cl100k_base');
|
||||
expect(count).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should reset encoders if an error is thrown', () => {
|
||||
// We can simulate an error by temporarily overriding the selected tokenizer's `encode` method.
|
||||
const tokenizer = Tokenizer.getTokenizer('cl100k_base', false);
|
||||
const originalEncode = tokenizer.encode;
|
||||
tokenizer.encode = () => {
|
||||
throw new Error('Forced error');
|
||||
};
|
||||
|
||||
// Despite the forced error, the code should catch and reset, then re-encode
|
||||
const count = Tokenizer.getTokenCount('Hello again', 'cl100k_base');
|
||||
expect(count).toBeGreaterThan(0);
|
||||
|
||||
// Restore the original encode
|
||||
tokenizer.encode = originalEncode;
|
||||
});
|
||||
|
||||
it('should reset tokenizers after 25 calls', () => {
|
||||
// Spy on freeAndResetAllEncoders
|
||||
const resetSpy = jest.spyOn(Tokenizer, 'freeAndResetAllEncoders');
|
||||
|
||||
// Make 24 calls; should NOT reset yet
|
||||
for (let i = 0; i < 24; i++) {
|
||||
Tokenizer.getTokenCount('test text', 'cl100k_base');
|
||||
}
|
||||
expect(resetSpy).not.toHaveBeenCalled();
|
||||
|
||||
// 25th call triggers the reset
|
||||
Tokenizer.getTokenCount('the 25th call!', 'cl100k_base');
|
||||
expect(resetSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
78
packages/api/src/utils/tokenizer.ts
Normal file
78
packages/api/src/utils/tokenizer.ts
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import { encoding_for_model as encodingForModel, get_encoding as getEncoding } from 'tiktoken';
|
||||
import type { Tiktoken, TiktokenModel, TiktokenEncoding } from 'tiktoken';
|
||||
|
||||
interface TokenizerOptions {
|
||||
debug?: boolean;
|
||||
}
|
||||
|
||||
class Tokenizer {
|
||||
tokenizersCache: Record<string, Tiktoken>;
|
||||
tokenizerCallsCount: number;
|
||||
private options?: TokenizerOptions;
|
||||
|
||||
constructor() {
|
||||
this.tokenizersCache = {};
|
||||
this.tokenizerCallsCount = 0;
|
||||
}
|
||||
|
||||
getTokenizer(
|
||||
encoding: TiktokenModel | TiktokenEncoding,
|
||||
isModelName = false,
|
||||
extendSpecialTokens: Record<string, number> = {},
|
||||
): Tiktoken {
|
||||
let tokenizer: Tiktoken;
|
||||
if (this.tokenizersCache[encoding]) {
|
||||
tokenizer = this.tokenizersCache[encoding];
|
||||
} else {
|
||||
if (isModelName) {
|
||||
tokenizer = encodingForModel(encoding as TiktokenModel, extendSpecialTokens);
|
||||
} else {
|
||||
tokenizer = getEncoding(encoding as TiktokenEncoding, extendSpecialTokens);
|
||||
}
|
||||
this.tokenizersCache[encoding] = tokenizer;
|
||||
}
|
||||
return tokenizer;
|
||||
}
|
||||
|
||||
freeAndResetAllEncoders(): void {
|
||||
try {
|
||||
Object.keys(this.tokenizersCache).forEach((key) => {
|
||||
if (this.tokenizersCache[key]) {
|
||||
this.tokenizersCache[key].free();
|
||||
delete this.tokenizersCache[key];
|
||||
}
|
||||
});
|
||||
this.tokenizerCallsCount = 1;
|
||||
} catch (error) {
|
||||
logger.error('[Tokenizer] Free and reset encoders error', error);
|
||||
}
|
||||
}
|
||||
|
||||
resetTokenizersIfNecessary(): void {
|
||||
if (this.tokenizerCallsCount >= 25) {
|
||||
if (this.options?.debug) {
|
||||
logger.debug('[Tokenizer] freeAndResetAllEncoders: reached 25 encodings, resetting...');
|
||||
}
|
||||
this.freeAndResetAllEncoders();
|
||||
}
|
||||
this.tokenizerCallsCount++;
|
||||
}
|
||||
|
||||
getTokenCount(text: string, encoding: TiktokenModel | TiktokenEncoding = 'cl100k_base'): number {
|
||||
this.resetTokenizersIfNecessary();
|
||||
try {
|
||||
const tokenizer = this.getTokenizer(encoding);
|
||||
return tokenizer.encode(text, 'all').length;
|
||||
} catch (error) {
|
||||
logger.error('[Tokenizer] Error getting token count:', error);
|
||||
this.freeAndResetAllEncoders();
|
||||
const tokenizer = this.getTokenizer(encoding);
|
||||
return tokenizer.encode(text, 'all').length;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const TokenizerSingleton = new Tokenizer();
|
||||
|
||||
export default TokenizerSingleton;
|
||||
11
packages/api/src/utils/yaml.ts
Normal file
11
packages/api/src/utils/yaml.ts
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import fs from 'fs';
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
export function loadYaml(filepath: string) {
|
||||
try {
|
||||
const fileContents = fs.readFileSync(filepath, 'utf8');
|
||||
return yaml.load(fileContents);
|
||||
} catch (e) {
|
||||
return e;
|
||||
}
|
||||
}
|
||||
|
|
@ -18,7 +18,10 @@
|
|||
"isolatedModules": true,
|
||||
"noEmit": true,
|
||||
"sourceMap": true,
|
||||
"baseUrl": "."
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"~/*": ["./src/*"]
|
||||
}
|
||||
},
|
||||
"ts-node": {
|
||||
"experimentalSpecifierResolution": "node",
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "librechat-data-provider",
|
||||
"version": "0.7.86",
|
||||
"version": "0.7.899",
|
||||
"description": "data services for librechat apps",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.es.js",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
import axios from 'axios';
|
||||
import { z } from 'zod';
|
||||
import { OpenAPIV3 } from 'openapi-types';
|
||||
import axios from 'axios';
|
||||
import type { OpenAPIV3 } from 'openapi-types';
|
||||
import type { ParametersSchema } from '../src/actions';
|
||||
import type { FlowchartSchema } from './openapiSpecs';
|
||||
import {
|
||||
createURL,
|
||||
resolveRef,
|
||||
|
|
@ -15,9 +17,7 @@ import {
|
|||
scholarAIOpenapiSpec,
|
||||
swapidev,
|
||||
} from './openapiSpecs';
|
||||
import { AuthorizationTypeEnum, AuthTypeEnum } from '../src/types/assistants';
|
||||
import type { FlowchartSchema } from './openapiSpecs';
|
||||
import type { ParametersSchema } from '../src/actions';
|
||||
import { AuthorizationTypeEnum, AuthTypeEnum } from '../src/types/agents';
|
||||
|
||||
jest.mock('axios');
|
||||
const mockedAxios = axios as jest.Mocked<typeof axios>;
|
||||
|
|
@ -275,8 +275,7 @@ describe('ActionRequest', () => {
|
|||
expect(config?.headers).toEqual({
|
||||
'some-header': 'header-var',
|
||||
});
|
||||
expect(config?.params).toEqual({
|
||||
});
|
||||
expect(config?.params).toEqual({});
|
||||
expect(response.data.success).toBe(true);
|
||||
});
|
||||
|
||||
|
|
@ -285,13 +284,13 @@ describe('ActionRequest', () => {
|
|||
|
||||
const data: Record<string, unknown> = {
|
||||
'api-version': '2025-01-01',
|
||||
'message': 'a body parameter',
|
||||
message: 'a body parameter',
|
||||
'some-header': 'header-var',
|
||||
};
|
||||
|
||||
const loc: Record<string, 'query' | 'path' | 'header' | 'body'> = {
|
||||
'api-version': 'query',
|
||||
'message': 'body',
|
||||
message: 'body',
|
||||
'some-header': 'header',
|
||||
};
|
||||
|
||||
|
|
@ -326,13 +325,13 @@ describe('ActionRequest', () => {
|
|||
|
||||
const data: Record<string, unknown> = {
|
||||
'api-version': '2025-01-01',
|
||||
'message': 'a body parameter',
|
||||
message: 'a body parameter',
|
||||
'some-header': 'header-var',
|
||||
};
|
||||
|
||||
const loc: Record<string, 'query' | 'path' | 'header' | 'body'> = {
|
||||
'api-version': 'query',
|
||||
'message': 'body',
|
||||
message: 'body',
|
||||
'some-header': 'header',
|
||||
};
|
||||
|
||||
|
|
@ -367,13 +366,13 @@ describe('ActionRequest', () => {
|
|||
|
||||
const data: Record<string, unknown> = {
|
||||
'api-version': '2025-01-01',
|
||||
'message': 'a body parameter',
|
||||
message: 'a body parameter',
|
||||
'some-header': 'header-var',
|
||||
};
|
||||
|
||||
const loc: Record<string, 'query' | 'path' | 'header' | 'body'> = {
|
||||
'api-version': 'query',
|
||||
'message': 'body',
|
||||
message: 'body',
|
||||
'some-header': 'header',
|
||||
};
|
||||
|
||||
|
|
@ -443,7 +442,6 @@ describe('ActionRequest', () => {
|
|||
});
|
||||
expect(response.data.success).toBe(true);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
it('throws an error for unsupported HTTP method', async () => {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
/* eslint-disable jest/no-conditional-expect */
|
||||
import { ZodError, z } from 'zod';
|
||||
import { generateDynamicSchema, validateSettingDefinitions, OptionTypes } from '../src/generate';
|
||||
import type { SettingsConfiguration } from '../src/generate';
|
||||
|
|
@ -97,6 +96,37 @@ describe('generateDynamicSchema', () => {
|
|||
expect(result['data']).toEqual({ testEnum: 'option2' });
|
||||
});
|
||||
|
||||
it('should generate a schema for enum settings with empty string option', () => {
|
||||
const settings: SettingsConfiguration = [
|
||||
{
|
||||
key: 'testEnumWithEmpty',
|
||||
description: 'A test enum setting with empty string',
|
||||
type: 'enum',
|
||||
default: '',
|
||||
options: ['', 'option1', 'option2'],
|
||||
enumMappings: {
|
||||
'': 'None',
|
||||
option1: 'First Option',
|
||||
option2: 'Second Option',
|
||||
},
|
||||
component: 'slider',
|
||||
columnSpan: 2,
|
||||
label: 'Test Enum with Empty String',
|
||||
},
|
||||
];
|
||||
|
||||
const schema = generateDynamicSchema(settings);
|
||||
const result = schema.safeParse({ testEnumWithEmpty: '' });
|
||||
|
||||
expect(result.success).toBeTruthy();
|
||||
expect(result['data']).toEqual({ testEnumWithEmpty: '' });
|
||||
|
||||
// Test with non-empty option
|
||||
const result2 = schema.safeParse({ testEnumWithEmpty: 'option1' });
|
||||
expect(result2.success).toBeTruthy();
|
||||
expect(result2['data']).toEqual({ testEnumWithEmpty: 'option1' });
|
||||
});
|
||||
|
||||
it('should fail for incorrect enum value', () => {
|
||||
const settings: SettingsConfiguration = [
|
||||
{
|
||||
|
|
@ -481,6 +511,47 @@ describe('validateSettingDefinitions', () => {
|
|||
|
||||
expect(() => validateSettingDefinitions(settingsExceedingMaxTags)).toThrow(ZodError);
|
||||
});
|
||||
|
||||
// Test for incomplete enumMappings
|
||||
test('should throw error for incomplete enumMappings', () => {
|
||||
const settingsWithIncompleteEnumMappings: SettingsConfiguration = [
|
||||
{
|
||||
key: 'displayMode',
|
||||
type: 'enum',
|
||||
component: 'dropdown',
|
||||
options: ['light', 'dark', 'auto'],
|
||||
enumMappings: {
|
||||
light: 'Light Mode',
|
||||
dark: 'Dark Mode',
|
||||
// Missing mapping for 'auto'
|
||||
},
|
||||
optionType: OptionTypes.Custom,
|
||||
},
|
||||
];
|
||||
|
||||
expect(() => validateSettingDefinitions(settingsWithIncompleteEnumMappings)).toThrow(ZodError);
|
||||
});
|
||||
|
||||
// Test for complete enumMappings including empty string
|
||||
test('should not throw error for complete enumMappings including empty string', () => {
|
||||
const settingsWithCompleteEnumMappings: SettingsConfiguration = [
|
||||
{
|
||||
key: 'selectionMode',
|
||||
type: 'enum',
|
||||
component: 'slider',
|
||||
options: ['', 'single', 'multiple'],
|
||||
enumMappings: {
|
||||
'': 'None',
|
||||
single: 'Single Selection',
|
||||
multiple: 'Multiple Selection',
|
||||
},
|
||||
default: '',
|
||||
optionType: OptionTypes.Custom,
|
||||
},
|
||||
];
|
||||
|
||||
expect(() => validateSettingDefinitions(settingsWithCompleteEnumMappings)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
const settingsConfiguration: SettingsConfiguration = [
|
||||
|
|
@ -515,7 +586,7 @@ const settingsConfiguration: SettingsConfiguration = [
|
|||
{
|
||||
key: 'presence_penalty',
|
||||
description:
|
||||
'Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model\'s likelihood to talk about new topics.',
|
||||
"Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.",
|
||||
type: 'number',
|
||||
default: 0,
|
||||
range: {
|
||||
|
|
@ -529,7 +600,7 @@ const settingsConfiguration: SettingsConfiguration = [
|
|||
{
|
||||
key: 'frequency_penalty',
|
||||
description:
|
||||
'Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model\'s likelihood to repeat the same line verbatim.',
|
||||
"Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.",
|
||||
type: 'number',
|
||||
default: 0,
|
||||
range: {
|
||||
|
|
|
|||
|
|
@ -1,277 +0,0 @@
|
|||
import { StdioOptionsSchema, StreamableHTTPOptionsSchema, processMCPEnv, MCPOptions } from '../src/mcp';
|
||||
|
||||
describe('Environment Variable Extraction (MCP)', () => {
|
||||
const originalEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
process.env = {
|
||||
...originalEnv,
|
||||
TEST_API_KEY: 'test-api-key-value',
|
||||
ANOTHER_SECRET: 'another-secret-value',
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
describe('StdioOptionsSchema', () => {
|
||||
it('should transform environment variables in the env field', () => {
|
||||
const options = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
API_KEY: '${TEST_API_KEY}',
|
||||
ANOTHER_KEY: '${ANOTHER_SECRET}',
|
||||
PLAIN_VALUE: 'plain-value',
|
||||
NON_EXISTENT: '${NON_EXISTENT_VAR}',
|
||||
},
|
||||
};
|
||||
|
||||
const result = StdioOptionsSchema.parse(options);
|
||||
|
||||
expect(result.env).toEqual({
|
||||
API_KEY: 'test-api-key-value',
|
||||
ANOTHER_KEY: 'another-secret-value',
|
||||
PLAIN_VALUE: 'plain-value',
|
||||
NON_EXISTENT: '${NON_EXISTENT_VAR}',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle undefined env field', () => {
|
||||
const options = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
};
|
||||
|
||||
const result = StdioOptionsSchema.parse(options);
|
||||
|
||||
expect(result.env).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('StreamableHTTPOptionsSchema', () => {
|
||||
it('should validate a valid streamable-http configuration', () => {
|
||||
const options = {
|
||||
type: 'streamable-http',
|
||||
url: 'https://example.com/api',
|
||||
headers: {
|
||||
Authorization: 'Bearer token',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
|
||||
const result = StreamableHTTPOptionsSchema.parse(options);
|
||||
|
||||
expect(result).toEqual(options);
|
||||
});
|
||||
|
||||
it('should reject websocket URLs', () => {
|
||||
const options = {
|
||||
type: 'streamable-http',
|
||||
url: 'ws://example.com/socket',
|
||||
};
|
||||
|
||||
expect(() => StreamableHTTPOptionsSchema.parse(options)).toThrow();
|
||||
});
|
||||
|
||||
it('should reject secure websocket URLs', () => {
|
||||
const options = {
|
||||
type: 'streamable-http',
|
||||
url: 'wss://example.com/socket',
|
||||
};
|
||||
|
||||
expect(() => StreamableHTTPOptionsSchema.parse(options)).toThrow();
|
||||
});
|
||||
|
||||
it('should require type field to be set explicitly', () => {
|
||||
const options = {
|
||||
url: 'https://example.com/api',
|
||||
};
|
||||
|
||||
// Type is now required, so parsing should fail
|
||||
expect(() => StreamableHTTPOptionsSchema.parse(options)).toThrow();
|
||||
|
||||
// With type provided, it should pass
|
||||
const validOptions = {
|
||||
type: 'streamable-http' as const,
|
||||
url: 'https://example.com/api',
|
||||
};
|
||||
|
||||
const result = StreamableHTTPOptionsSchema.parse(validOptions);
|
||||
expect(result.type).toBe('streamable-http');
|
||||
});
|
||||
|
||||
it('should validate headers as record of strings', () => {
|
||||
const options = {
|
||||
type: 'streamable-http',
|
||||
url: 'https://example.com/api',
|
||||
headers: {
|
||||
'X-API-Key': '123456',
|
||||
'User-Agent': 'MCP Client',
|
||||
},
|
||||
};
|
||||
|
||||
const result = StreamableHTTPOptionsSchema.parse(options);
|
||||
|
||||
expect(result.headers).toEqual(options.headers);
|
||||
});
|
||||
});
|
||||
|
||||
describe('processMCPEnv', () => {
|
||||
it('should create a deep clone of the input object', () => {
|
||||
const originalObj: MCPOptions = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
API_KEY: '${TEST_API_KEY}',
|
||||
PLAIN_VALUE: 'plain-value',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(originalObj);
|
||||
|
||||
// Verify it's not the same object reference
|
||||
expect(result).not.toBe(originalObj);
|
||||
|
||||
// Modify the result and ensure original is unchanged
|
||||
if ('env' in result && result.env) {
|
||||
result.env.API_KEY = 'modified-value';
|
||||
}
|
||||
|
||||
expect(originalObj.env?.API_KEY).toBe('${TEST_API_KEY}');
|
||||
});
|
||||
|
||||
it('should process environment variables in env field', () => {
|
||||
const obj: MCPOptions = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
env: {
|
||||
API_KEY: '${TEST_API_KEY}',
|
||||
ANOTHER_KEY: '${ANOTHER_SECRET}',
|
||||
PLAIN_VALUE: 'plain-value',
|
||||
NON_EXISTENT: '${NON_EXISTENT_VAR}',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj);
|
||||
|
||||
expect('env' in result && result.env).toEqual({
|
||||
API_KEY: 'test-api-key-value',
|
||||
ANOTHER_KEY: 'another-secret-value',
|
||||
PLAIN_VALUE: 'plain-value',
|
||||
NON_EXISTENT: '${NON_EXISTENT_VAR}',
|
||||
});
|
||||
});
|
||||
|
||||
it('should process user ID in headers field', () => {
|
||||
const userId = 'test-user-123';
|
||||
const obj: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
Authorization: '${TEST_API_KEY}',
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, userId);
|
||||
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
Authorization: 'test-api-key-value',
|
||||
'User-Id': 'test-user-123',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle null or undefined input', () => {
|
||||
// @ts-ignore - Testing null/undefined handling
|
||||
expect(processMCPEnv(null)).toBeNull();
|
||||
// @ts-ignore - Testing null/undefined handling
|
||||
expect(processMCPEnv(undefined)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not modify objects without env or headers', () => {
|
||||
const obj: MCPOptions = {
|
||||
command: 'node',
|
||||
args: ['server.js'],
|
||||
timeout: 5000,
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj);
|
||||
|
||||
expect(result).toEqual(obj);
|
||||
expect(result).not.toBe(obj); // Still a different object (deep clone)
|
||||
});
|
||||
|
||||
it('should ensure different users with same starting config get separate values', () => {
|
||||
// Create a single base configuration
|
||||
const baseConfig: MCPOptions = {
|
||||
type: 'sse',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'API-Key': '${TEST_API_KEY}',
|
||||
},
|
||||
};
|
||||
|
||||
// Process for two different users
|
||||
const user1Id = 'user-123';
|
||||
const user2Id = 'user-456';
|
||||
|
||||
const resultUser1 = processMCPEnv(baseConfig, user1Id);
|
||||
const resultUser2 = processMCPEnv(baseConfig, user2Id);
|
||||
|
||||
// Verify each has the correct user ID
|
||||
expect('headers' in resultUser1 && resultUser1.headers?.['User-Id']).toBe(user1Id);
|
||||
expect('headers' in resultUser2 && resultUser2.headers?.['User-Id']).toBe(user2Id);
|
||||
|
||||
// Verify they're different objects
|
||||
expect(resultUser1).not.toBe(resultUser2);
|
||||
|
||||
// Modify one result and ensure it doesn't affect the other
|
||||
if ('headers' in resultUser1 && resultUser1.headers) {
|
||||
resultUser1.headers['User-Id'] = 'modified-user';
|
||||
}
|
||||
|
||||
// Original config should be unchanged
|
||||
expect(baseConfig.headers?.['User-Id']).toBe('{{LIBRECHAT_USER_ID}}');
|
||||
|
||||
// Second user's config should be unchanged
|
||||
expect('headers' in resultUser2 && resultUser2.headers?.['User-Id']).toBe(user2Id);
|
||||
});
|
||||
|
||||
it('should process headers in streamable-http options', () => {
|
||||
const userId = 'test-user-123';
|
||||
const obj: MCPOptions = {
|
||||
type: 'streamable-http',
|
||||
url: 'https://example.com',
|
||||
headers: {
|
||||
Authorization: '${TEST_API_KEY}',
|
||||
'User-Id': '{{LIBRECHAT_USER_ID}}',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj, userId);
|
||||
|
||||
expect('headers' in result && result.headers).toEqual({
|
||||
Authorization: 'test-api-key-value',
|
||||
'User-Id': 'test-user-123',
|
||||
'Content-Type': 'application/json',
|
||||
});
|
||||
});
|
||||
|
||||
it('should maintain streamable-http type in processed options', () => {
|
||||
const obj: MCPOptions = {
|
||||
type: 'streamable-http',
|
||||
url: 'https://example.com/api',
|
||||
};
|
||||
|
||||
const result = processMCPEnv(obj);
|
||||
|
||||
expect(result.type).toBe('streamable-http');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -3,15 +3,11 @@ import _axios from 'axios';
|
|||
import { URL } from 'url';
|
||||
import crypto from 'crypto';
|
||||
import { load } from 'js-yaml';
|
||||
import type {
|
||||
FunctionTool,
|
||||
Schema,
|
||||
Reference,
|
||||
ActionMetadata,
|
||||
ActionMetadataRuntime,
|
||||
} from './types/assistants';
|
||||
import type { ActionMetadata, ActionMetadataRuntime } from './types/agents';
|
||||
import type { FunctionTool, Schema, Reference } from './types/assistants';
|
||||
import { AuthTypeEnum, AuthorizationTypeEnum } from './types/agents';
|
||||
import type { OpenAPIV3 } from 'openapi-types';
|
||||
import { Tools, AuthTypeEnum, AuthorizationTypeEnum } from './types/assistants';
|
||||
import { Tools } from './types/assistants';
|
||||
|
||||
export type ParametersSchema = {
|
||||
type: string;
|
||||
|
|
@ -303,7 +299,8 @@ class RequestExecutor {
|
|||
if (this.config.parameterLocations && this.params) {
|
||||
for (const key of Object.keys(this.params)) {
|
||||
// Determine parameter placement; default to "query" for GET and "body" for others.
|
||||
const loc: 'query' | 'path' | 'header' | 'body' = this.config.parameterLocations[key] || (method === 'get' ? 'query' : 'body');
|
||||
const loc: 'query' | 'path' | 'header' | 'body' =
|
||||
this.config.parameterLocations[key] || (method === 'get' ? 'query' : 'body');
|
||||
|
||||
const val = this.params[key];
|
||||
if (loc === 'query') {
|
||||
|
|
@ -351,7 +348,15 @@ export class ActionRequest {
|
|||
contentType: string,
|
||||
parameterLocations?: Record<string, 'query' | 'path' | 'header' | 'body'>,
|
||||
) {
|
||||
this.config = new RequestConfig(domain, path, method, operation, isConsequential, contentType, parameterLocations);
|
||||
this.config = new RequestConfig(
|
||||
domain,
|
||||
path,
|
||||
method,
|
||||
operation,
|
||||
isConsequential,
|
||||
contentType,
|
||||
parameterLocations,
|
||||
);
|
||||
}
|
||||
|
||||
// Add getters to maintain backward compatibility
|
||||
|
|
@ -486,12 +491,12 @@ export function openapiToFunction(
|
|||
}
|
||||
// Record the parameter location from the OpenAPI "in" field.
|
||||
paramLocations[paramName] =
|
||||
(resolvedParam.in === 'query' ||
|
||||
resolvedParam.in === 'path' ||
|
||||
resolvedParam.in === 'header' ||
|
||||
resolvedParam.in === 'body')
|
||||
? resolvedParam.in
|
||||
: 'query';
|
||||
resolvedParam.in === 'query' ||
|
||||
resolvedParam.in === 'path' ||
|
||||
resolvedParam.in === 'header' ||
|
||||
resolvedParam.in === 'body'
|
||||
? resolvedParam.in
|
||||
: 'query';
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -70,8 +70,6 @@ export const revokeUserKey = (name: string) => `${keysEndpoint}/${name}`;
|
|||
|
||||
export const revokeAllUserKeys = () => `${keysEndpoint}?all=true`;
|
||||
|
||||
export const abortRequest = (endpoint: string) => `/api/ask/${endpoint}/abort`;
|
||||
|
||||
export const conversationsRoot = '/api/convos';
|
||||
|
||||
export const conversations = (params: q.ConversationListParams) => {
|
||||
|
|
@ -254,6 +252,7 @@ export const getAllPromptGroups = () => `${prompts()}/all`;
|
|||
export const roles = () => '/api/roles';
|
||||
export const getRole = (roleName: string) => `${roles()}/${roleName.toLowerCase()}`;
|
||||
export const updatePromptPermissions = (roleName: string) => `${getRole(roleName)}/prompts`;
|
||||
export const updateMemoryPermissions = (roleName: string) => `${getRole(roleName)}/memories`;
|
||||
export const updateAgentPermissions = (roleName: string) => `${getRole(roleName)}/agents`;
|
||||
|
||||
/* Conversation Tags */
|
||||
|
|
@ -272,6 +271,10 @@ export const userTerms = () => '/api/user/terms';
|
|||
export const acceptUserTerms = () => '/api/user/terms/accept';
|
||||
export const banner = () => '/api/banner';
|
||||
|
||||
// Message Feedback
|
||||
export const feedback = (conversationId: string, messageId: string) =>
|
||||
`/api/messages/${conversationId}/${messageId}/feedback`;
|
||||
|
||||
// Two-Factor Endpoints
|
||||
export const enableTwoFactor = () => '/api/auth/2fa/enable';
|
||||
export const verifyTwoFactor = () => '/api/auth/2fa/verify';
|
||||
|
|
@ -279,3 +282,8 @@ export const confirmTwoFactor = () => '/api/auth/2fa/confirm';
|
|||
export const disableTwoFactor = () => '/api/auth/2fa/disable';
|
||||
export const regenerateBackupCodes = () => '/api/auth/2fa/backup/regenerate';
|
||||
export const verifyTwoFactorTemp = () => '/api/auth/2fa/verify-temp';
|
||||
|
||||
/* Memories */
|
||||
export const memories = () => '/api/memories';
|
||||
export const memory = (key: string) => `${memories()}/${encodeURIComponent(key)}`;
|
||||
export const memoryPreferences = () => `${memories()}/preferences`;
|
||||
|
|
|
|||
|
|
@ -244,21 +244,26 @@ export const defaultAgentCapabilities = [
|
|||
AgentCapabilities.ocr,
|
||||
];
|
||||
|
||||
export const agentsEndpointSChema = baseEndpointSchema.merge(
|
||||
z.object({
|
||||
/* agents specific */
|
||||
recursionLimit: z.number().optional(),
|
||||
disableBuilder: z.boolean().optional(),
|
||||
maxRecursionLimit: z.number().optional(),
|
||||
allowedProviders: z.array(z.union([z.string(), eModelEndpointSchema])).optional(),
|
||||
capabilities: z
|
||||
.array(z.nativeEnum(AgentCapabilities))
|
||||
.optional()
|
||||
.default(defaultAgentCapabilities),
|
||||
}),
|
||||
);
|
||||
export const agentsEndpointSchema = baseEndpointSchema
|
||||
.merge(
|
||||
z.object({
|
||||
/* agents specific */
|
||||
recursionLimit: z.number().optional(),
|
||||
disableBuilder: z.boolean().optional().default(false),
|
||||
maxRecursionLimit: z.number().optional(),
|
||||
allowedProviders: z.array(z.union([z.string(), eModelEndpointSchema])).optional(),
|
||||
capabilities: z
|
||||
.array(z.nativeEnum(AgentCapabilities))
|
||||
.optional()
|
||||
.default(defaultAgentCapabilities),
|
||||
}),
|
||||
)
|
||||
.default({
|
||||
disableBuilder: false,
|
||||
capabilities: defaultAgentCapabilities,
|
||||
});
|
||||
|
||||
export type TAgentsEndpoint = z.infer<typeof agentsEndpointSChema>;
|
||||
export type TAgentsEndpoint = z.infer<typeof agentsEndpointSchema>;
|
||||
|
||||
export const endpointSchema = baseEndpointSchema.merge(
|
||||
z.object({
|
||||
|
|
@ -476,6 +481,12 @@ const termsOfServiceSchema = z.object({
|
|||
|
||||
export type TTermsOfService = z.infer<typeof termsOfServiceSchema>;
|
||||
|
||||
const mcpServersSchema = z.object({
|
||||
placeholder: z.string().optional(),
|
||||
});
|
||||
|
||||
export type TMcpServersConfig = z.infer<typeof mcpServersSchema>;
|
||||
|
||||
export const intefaceSchema = z
|
||||
.object({
|
||||
privacyPolicy: z
|
||||
|
|
@ -486,16 +497,19 @@ export const intefaceSchema = z
|
|||
.optional(),
|
||||
termsOfService: termsOfServiceSchema.optional(),
|
||||
customWelcome: z.string().optional(),
|
||||
mcpServers: mcpServersSchema.optional(),
|
||||
endpointsMenu: z.boolean().optional(),
|
||||
modelSelect: z.boolean().optional(),
|
||||
parameters: z.boolean().optional(),
|
||||
sidePanel: z.boolean().optional(),
|
||||
multiConvo: z.boolean().optional(),
|
||||
bookmarks: z.boolean().optional(),
|
||||
memories: z.boolean().optional(),
|
||||
presets: z.boolean().optional(),
|
||||
prompts: z.boolean().optional(),
|
||||
agents: z.boolean().optional(),
|
||||
temporaryChat: z.boolean().optional(),
|
||||
temporaryChatRetention: z.number().min(1).max(8760).optional(),
|
||||
runCode: z.boolean().optional(),
|
||||
webSearch: z.boolean().optional(),
|
||||
})
|
||||
|
|
@ -507,6 +521,7 @@ export const intefaceSchema = z
|
|||
presets: true,
|
||||
multiConvo: true,
|
||||
bookmarks: true,
|
||||
memories: true,
|
||||
prompts: true,
|
||||
agents: true,
|
||||
temporaryChat: true,
|
||||
|
|
@ -580,11 +595,26 @@ export type TStartupConfig = {
|
|||
scraperType?: ScraperTypes;
|
||||
rerankerType?: RerankerTypes;
|
||||
};
|
||||
mcpServers?: Record<
|
||||
string,
|
||||
{
|
||||
customUserVars: Record<
|
||||
string,
|
||||
{
|
||||
title: string;
|
||||
description: string;
|
||||
}
|
||||
>;
|
||||
}
|
||||
>;
|
||||
mcpPlaceholder?: string;
|
||||
};
|
||||
|
||||
export enum OCRStrategy {
|
||||
MISTRAL_OCR = 'mistral_ocr',
|
||||
CUSTOM_OCR = 'custom_ocr',
|
||||
AZURE_MISTRAL_OCR = 'azure_mistral_ocr',
|
||||
VERTEXAI_MISTRAL_OCR = 'vertexai_mistral_ocr',
|
||||
}
|
||||
|
||||
export enum SearchCategories {
|
||||
|
|
@ -648,11 +678,35 @@ export const balanceSchema = z.object({
|
|||
refillAmount: z.number().optional().default(10000),
|
||||
});
|
||||
|
||||
export const memorySchema = z.object({
|
||||
disabled: z.boolean().optional(),
|
||||
validKeys: z.array(z.string()).optional(),
|
||||
tokenLimit: z.number().optional(),
|
||||
personalize: z.boolean().default(true),
|
||||
messageWindowSize: z.number().optional().default(5),
|
||||
agent: z
|
||||
.union([
|
||||
z.object({
|
||||
id: z.string(),
|
||||
}),
|
||||
z.object({
|
||||
provider: z.string(),
|
||||
model: z.string(),
|
||||
instructions: z.string().optional(),
|
||||
model_parameters: z.record(z.any()).optional(),
|
||||
}),
|
||||
])
|
||||
.optional(),
|
||||
});
|
||||
|
||||
export type TMemoryConfig = z.infer<typeof memorySchema>;
|
||||
|
||||
export const configSchema = z.object({
|
||||
version: z.string(),
|
||||
cache: z.boolean().default(true),
|
||||
ocr: ocrSchema.optional(),
|
||||
webSearch: webSearchSchema.optional(),
|
||||
memory: memorySchema.optional(),
|
||||
secureImageLinks: z.boolean().optional(),
|
||||
imageOutputType: z.nativeEnum(EImageOutputType).default(EImageOutputType.PNG),
|
||||
includedTools: z.array(z.string()).optional(),
|
||||
|
|
@ -693,7 +747,7 @@ export const configSchema = z.object({
|
|||
[EModelEndpoint.azureOpenAI]: azureEndpointSchema.optional(),
|
||||
[EModelEndpoint.azureAssistants]: assistantEndpointSchema.optional(),
|
||||
[EModelEndpoint.assistants]: assistantEndpointSchema.optional(),
|
||||
[EModelEndpoint.agents]: agentsEndpointSChema.optional(),
|
||||
[EModelEndpoint.agents]: agentsEndpointSchema.optional(),
|
||||
[EModelEndpoint.custom]: z.array(endpointSchema.partial()).optional(),
|
||||
[EModelEndpoint.bedrock]: baseEndpointSchema.optional(),
|
||||
})
|
||||
|
|
@ -852,7 +906,6 @@ export const defaultModels = {
|
|||
[EModelEndpoint.assistants]: [...sharedOpenAIModels, 'chatgpt-4o-latest'],
|
||||
[EModelEndpoint.agents]: sharedOpenAIModels, // TODO: Add agent models (agentsModels)
|
||||
[EModelEndpoint.google]: [
|
||||
// Shared Google Models between Vertex AI & Gen AI
|
||||
// Gemini 2.0 Models
|
||||
'gemini-2.0-flash-001',
|
||||
'gemini-2.0-flash-exp',
|
||||
|
|
@ -896,19 +949,11 @@ export const initialModelsConfig: TModelsConfig = {
|
|||
[EModelEndpoint.bedrock]: defaultModels[EModelEndpoint.bedrock],
|
||||
};
|
||||
|
||||
export const EndpointURLs: { [key in EModelEndpoint]: string } = {
|
||||
[EModelEndpoint.openAI]: `/api/ask/${EModelEndpoint.openAI}`,
|
||||
[EModelEndpoint.google]: `/api/ask/${EModelEndpoint.google}`,
|
||||
[EModelEndpoint.custom]: `/api/ask/${EModelEndpoint.custom}`,
|
||||
[EModelEndpoint.anthropic]: `/api/ask/${EModelEndpoint.anthropic}`,
|
||||
[EModelEndpoint.gptPlugins]: `/api/ask/${EModelEndpoint.gptPlugins}`,
|
||||
[EModelEndpoint.azureOpenAI]: `/api/ask/${EModelEndpoint.azureOpenAI}`,
|
||||
[EModelEndpoint.chatGPTBrowser]: `/api/ask/${EModelEndpoint.chatGPTBrowser}`,
|
||||
[EModelEndpoint.azureAssistants]: '/api/assistants/v1/chat',
|
||||
export const EndpointURLs = {
|
||||
[EModelEndpoint.assistants]: '/api/assistants/v2/chat',
|
||||
[EModelEndpoint.azureAssistants]: '/api/assistants/v1/chat',
|
||||
[EModelEndpoint.agents]: `/api/${EModelEndpoint.agents}/chat`,
|
||||
[EModelEndpoint.bedrock]: `/api/${EModelEndpoint.bedrock}/chat`,
|
||||
};
|
||||
} as const;
|
||||
|
||||
export const modularEndpoints = new Set<EModelEndpoint | string>([
|
||||
EModelEndpoint.gptPlugins,
|
||||
|
|
@ -1103,6 +1148,10 @@ export enum CacheKeys {
|
|||
* Key for in-progress flow states.
|
||||
*/
|
||||
FLOWS = 'flows',
|
||||
/**
|
||||
* Key for individual MCP Tool Manifests.
|
||||
*/
|
||||
MCP_TOOLS = 'mcp_tools',
|
||||
/**
|
||||
* Key for pending chat requests (concurrency check)
|
||||
*/
|
||||
|
|
@ -1207,6 +1256,10 @@ export enum ErrorTypes {
|
|||
* Google provider returned an error
|
||||
*/
|
||||
GOOGLE_ERROR = 'google_error',
|
||||
/**
|
||||
* Google provider does not allow custom tools with built-in tools
|
||||
*/
|
||||
GOOGLE_TOOL_CONFLICT = 'google_tool_conflict',
|
||||
/**
|
||||
* Invalid Agent Provider (excluded by Admin)
|
||||
*/
|
||||
|
|
@ -1290,6 +1343,10 @@ export enum SettingsTabValues {
|
|||
* Chat input commands
|
||||
*/
|
||||
COMMANDS = 'commands',
|
||||
/**
|
||||
* Tab for Personalization Settings
|
||||
*/
|
||||
PERSONALIZATION = 'personalization',
|
||||
}
|
||||
|
||||
export enum STTProviders {
|
||||
|
|
@ -1325,9 +1382,9 @@ export enum TTSProviders {
|
|||
/** Enum for app-wide constants */
|
||||
export enum Constants {
|
||||
/** Key for the app's version. */
|
||||
VERSION = 'v0.7.8',
|
||||
VERSION = 'v0.7.9-rc1',
|
||||
/** Key for the Custom Config's version (librechat.yaml). */
|
||||
CONFIG_VERSION = '1.2.6',
|
||||
CONFIG_VERSION = '1.2.8',
|
||||
/** Standard value for the first message's `parentMessageId` value, to indicate no parent exists. */
|
||||
NO_PARENT = '00000000-0000-0000-0000-000000000000',
|
||||
/** Standard value for the initial conversationId before a request is sent */
|
||||
|
|
@ -1354,6 +1411,8 @@ export enum Constants {
|
|||
GLOBAL_PROJECT_NAME = 'instance',
|
||||
/** Delimiter for MCP tools */
|
||||
mcp_delimiter = '_mcp_',
|
||||
/** Prefix for MCP plugins */
|
||||
mcp_prefix = 'mcp_',
|
||||
/** Placeholder Agent ID for Ephemeral Agents */
|
||||
EPHEMERAL_AGENT_ID = 'ephemeral',
|
||||
}
|
||||
|
|
@ -1397,6 +1456,18 @@ export enum LocalStorageKeys {
|
|||
LAST_CODE_TOGGLE_ = 'LAST_CODE_TOGGLE_',
|
||||
/** Last checked toggle for Web Search per conversation ID */
|
||||
LAST_WEB_SEARCH_TOGGLE_ = 'LAST_WEB_SEARCH_TOGGLE_',
|
||||
/** Last checked toggle for File Search per conversation ID */
|
||||
LAST_FILE_SEARCH_TOGGLE_ = 'LAST_FILE_SEARCH_TOGGLE_',
|
||||
/** Key for the last selected agent provider */
|
||||
LAST_AGENT_PROVIDER = 'lastAgentProvider',
|
||||
/** Key for the last selected agent model */
|
||||
LAST_AGENT_MODEL = 'lastAgentModel',
|
||||
/** Pin state for MCP tools per conversation ID */
|
||||
PIN_MCP_ = 'PIN_MCP_',
|
||||
/** Pin state for Web Search per conversation ID */
|
||||
PIN_WEB_SEARCH_ = 'PIN_WEB_SEARCH_',
|
||||
/** Pin state for Code Interpreter per conversation ID */
|
||||
PIN_CODE_INTERPRETER_ = 'PIN_CODE_INTERPRETER_',
|
||||
}
|
||||
|
||||
export enum ForkOptions {
|
||||
|
|
|
|||
|
|
@ -11,31 +11,31 @@ export default function createPayload(submission: t.TSubmission) {
|
|||
isContinued,
|
||||
isTemporary,
|
||||
ephemeralAgent,
|
||||
editedContent,
|
||||
} = submission;
|
||||
const { conversationId } = s.tConvoUpdateSchema.parse(conversation);
|
||||
const { endpoint, endpointType } = endpointOption as {
|
||||
const { endpoint: _e, endpointType } = endpointOption as {
|
||||
endpoint: s.EModelEndpoint;
|
||||
endpointType?: s.EModelEndpoint;
|
||||
};
|
||||
|
||||
let server = EndpointURLs[endpointType ?? endpoint];
|
||||
const isEphemeral = s.isEphemeralAgent(endpoint, ephemeralAgent);
|
||||
|
||||
if (isEdited && s.isAssistantsEndpoint(endpoint)) {
|
||||
server += '/modify';
|
||||
} else if (isEdited) {
|
||||
server = server.replace('/ask/', '/edit/');
|
||||
} else if (isEphemeral) {
|
||||
server = `${EndpointURLs[s.EModelEndpoint.agents]}/${endpoint}`;
|
||||
const endpoint = _e as s.EModelEndpoint;
|
||||
let server = `${EndpointURLs[s.EModelEndpoint.agents]}/${endpoint}`;
|
||||
if (s.isAssistantsEndpoint(endpoint)) {
|
||||
server =
|
||||
EndpointURLs[(endpointType ?? endpoint) as 'assistants' | 'azureAssistants'] +
|
||||
(isEdited ? '/modify' : '');
|
||||
}
|
||||
|
||||
const payload: t.TPayload = {
|
||||
...userMessage,
|
||||
...endpointOption,
|
||||
ephemeralAgent: isEphemeral ? ephemeralAgent : undefined,
|
||||
endpoint,
|
||||
ephemeralAgent: s.isAssistantsEndpoint(endpoint) ? undefined : ephemeralAgent,
|
||||
isContinued: !!(isEdited && isContinued),
|
||||
conversationId,
|
||||
isTemporary,
|
||||
editedContent,
|
||||
};
|
||||
|
||||
return { server, payload };
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import type { AxiosResponse } from 'axios';
|
|||
import type * as t from './types';
|
||||
import * as endpoints from './api-endpoints';
|
||||
import * as a from './types/assistants';
|
||||
import * as ag from './types/agents';
|
||||
import * as m from './types/mutations';
|
||||
import * as q from './types/queries';
|
||||
import * as f from './types/files';
|
||||
|
|
@ -10,14 +11,6 @@ import request from './request';
|
|||
import * as s from './schemas';
|
||||
import * as r from './roles';
|
||||
|
||||
export function abortRequestWithMessage(
|
||||
endpoint: string,
|
||||
abortKey: string,
|
||||
message: string,
|
||||
): Promise<void> {
|
||||
return request.post(endpoints.abortRequest(endpoint), { arg: { abortKey, message } });
|
||||
}
|
||||
|
||||
export function revokeUserKey(name: string): Promise<unknown> {
|
||||
return request.delete(endpoints.revokeUserKey(name));
|
||||
}
|
||||
|
|
@ -150,7 +143,11 @@ export const updateUserPlugins = (payload: t.TUpdateUserPlugins) => {
|
|||
|
||||
/* Config */
|
||||
|
||||
export const getStartupConfig = (): Promise<config.TStartupConfig> => {
|
||||
export const getStartupConfig = (): Promise<
|
||||
config.TStartupConfig & {
|
||||
mcpCustomUserVars?: Record<string, { title: string; description: string }>;
|
||||
}
|
||||
> => {
|
||||
return request.get(endpoints.config());
|
||||
};
|
||||
|
||||
|
|
@ -351,7 +348,7 @@ export const updateAction = (data: m.UpdateActionVariables): Promise<m.UpdateAct
|
|||
);
|
||||
};
|
||||
|
||||
export function getActions(): Promise<a.Action[]> {
|
||||
export function getActions(): Promise<ag.Action[]> {
|
||||
return request.get(
|
||||
endpoints.agents({
|
||||
path: 'actions',
|
||||
|
|
@ -407,7 +404,7 @@ export const updateAgent = ({
|
|||
|
||||
export const duplicateAgent = ({
|
||||
agent_id,
|
||||
}: m.DuplicateAgentBody): Promise<{ agent: a.Agent; actions: a.Action[] }> => {
|
||||
}: m.DuplicateAgentBody): Promise<{ agent: a.Agent; actions: ag.Action[] }> => {
|
||||
return request.post(
|
||||
endpoints.agents({
|
||||
path: `${agent_id}/duplicate`,
|
||||
|
|
@ -718,6 +715,12 @@ export function updateAgentPermissions(
|
|||
return request.put(endpoints.updateAgentPermissions(variables.roleName), variables.updates);
|
||||
}
|
||||
|
||||
export function updateMemoryPermissions(
|
||||
variables: m.UpdateMemoryPermVars,
|
||||
): Promise<m.UpdatePermResponse> {
|
||||
return request.put(endpoints.updateMemoryPermissions(variables.roleName), variables.updates);
|
||||
}
|
||||
|
||||
/* Tags */
|
||||
export function getConversationTags(): Promise<t.TConversationTagsResponse> {
|
||||
return request.get(endpoints.conversationTags());
|
||||
|
|
@ -765,6 +768,15 @@ export function getBanner(): Promise<t.TBannerResponse> {
|
|||
return request.get(endpoints.banner());
|
||||
}
|
||||
|
||||
export function updateFeedback(
|
||||
conversationId: string,
|
||||
messageId: string,
|
||||
payload: t.TUpdateFeedbackRequest,
|
||||
): Promise<t.TUpdateFeedbackResponse> {
|
||||
return request.put(endpoints.feedback(conversationId, messageId), payload);
|
||||
}
|
||||
|
||||
// 2FA
|
||||
export function enableTwoFactor(): Promise<t.TEnable2FAResponse> {
|
||||
return request.get(endpoints.enableTwoFactor());
|
||||
}
|
||||
|
|
@ -790,3 +802,33 @@ export function verifyTwoFactorTemp(
|
|||
): Promise<t.TVerify2FATempResponse> {
|
||||
return request.post(endpoints.verifyTwoFactorTemp(), payload);
|
||||
}
|
||||
|
||||
/* Memories */
|
||||
export const getMemories = (): Promise<q.MemoriesResponse> => {
|
||||
return request.get(endpoints.memories());
|
||||
};
|
||||
|
||||
export const deleteMemory = (key: string): Promise<void> => {
|
||||
return request.delete(endpoints.memory(key));
|
||||
};
|
||||
|
||||
export const updateMemory = (
|
||||
key: string,
|
||||
value: string,
|
||||
originalKey?: string,
|
||||
): Promise<q.TUserMemory> => {
|
||||
return request.patch(endpoints.memory(originalKey || key), { key, value });
|
||||
};
|
||||
|
||||
export const updateMemoryPreferences = (preferences: {
|
||||
memories: boolean;
|
||||
}): Promise<{ updated: boolean; preferences: { memories: boolean } }> => {
|
||||
return request.patch(endpoints.memoryPreferences(), preferences);
|
||||
};
|
||||
|
||||
export const createMemory = (data: {
|
||||
key: string;
|
||||
value: string;
|
||||
}): Promise<{ created: boolean; memory: q.TUserMemory }> => {
|
||||
return request.post(endpoints.memories(), data);
|
||||
};
|
||||
|
|
|
|||
141
packages/data-provider/src/feedback.ts
Normal file
141
packages/data-provider/src/feedback.ts
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
export type TFeedbackRating = 'thumbsUp' | 'thumbsDown';
|
||||
export const FEEDBACK_RATINGS = ['thumbsUp', 'thumbsDown'] as const;
|
||||
|
||||
export const FEEDBACK_REASON_KEYS = [
|
||||
// Down
|
||||
'not_matched',
|
||||
'inaccurate',
|
||||
'bad_style',
|
||||
'missing_image',
|
||||
'unjustified_refusal',
|
||||
'not_helpful',
|
||||
'other',
|
||||
// Up
|
||||
'accurate_reliable',
|
||||
'creative_solution',
|
||||
'clear_well_written',
|
||||
'attention_to_detail',
|
||||
] as const;
|
||||
|
||||
export type TFeedbackTagKey = (typeof FEEDBACK_REASON_KEYS)[number];
|
||||
|
||||
export interface TFeedbackTag {
|
||||
key: TFeedbackTagKey;
|
||||
label: string;
|
||||
direction: TFeedbackRating;
|
||||
icon: string;
|
||||
}
|
||||
|
||||
// --- Tag Registry ---
|
||||
export const FEEDBACK_TAGS: TFeedbackTag[] = [
|
||||
// Thumbs Down
|
||||
{
|
||||
key: 'not_matched',
|
||||
label: 'com_ui_feedback_tag_not_matched',
|
||||
direction: 'thumbsDown',
|
||||
icon: 'AlertCircle',
|
||||
},
|
||||
{
|
||||
key: 'inaccurate',
|
||||
label: 'com_ui_feedback_tag_inaccurate',
|
||||
direction: 'thumbsDown',
|
||||
icon: 'AlertCircle',
|
||||
},
|
||||
{
|
||||
key: 'bad_style',
|
||||
label: 'com_ui_feedback_tag_bad_style',
|
||||
direction: 'thumbsDown',
|
||||
icon: 'PenTool',
|
||||
},
|
||||
{
|
||||
key: 'missing_image',
|
||||
label: 'com_ui_feedback_tag_missing_image',
|
||||
direction: 'thumbsDown',
|
||||
icon: 'ImageOff',
|
||||
},
|
||||
{
|
||||
key: 'unjustified_refusal',
|
||||
label: 'com_ui_feedback_tag_unjustified_refusal',
|
||||
direction: 'thumbsDown',
|
||||
icon: 'Ban',
|
||||
},
|
||||
{
|
||||
key: 'not_helpful',
|
||||
label: 'com_ui_feedback_tag_not_helpful',
|
||||
direction: 'thumbsDown',
|
||||
icon: 'ThumbsDown',
|
||||
},
|
||||
{
|
||||
key: 'other',
|
||||
label: 'com_ui_feedback_tag_other',
|
||||
direction: 'thumbsDown',
|
||||
icon: 'HelpCircle',
|
||||
},
|
||||
// Thumbs Up
|
||||
{
|
||||
key: 'accurate_reliable',
|
||||
label: 'com_ui_feedback_tag_accurate_reliable',
|
||||
direction: 'thumbsUp',
|
||||
icon: 'CheckCircle',
|
||||
},
|
||||
{
|
||||
key: 'creative_solution',
|
||||
label: 'com_ui_feedback_tag_creative_solution',
|
||||
direction: 'thumbsUp',
|
||||
icon: 'Lightbulb',
|
||||
},
|
||||
{
|
||||
key: 'clear_well_written',
|
||||
label: 'com_ui_feedback_tag_clear_well_written',
|
||||
direction: 'thumbsUp',
|
||||
icon: 'PenTool',
|
||||
},
|
||||
{
|
||||
key: 'attention_to_detail',
|
||||
label: 'com_ui_feedback_tag_attention_to_detail',
|
||||
direction: 'thumbsUp',
|
||||
icon: 'Search',
|
||||
},
|
||||
];
|
||||
|
||||
export function getTagsForRating(rating: TFeedbackRating): TFeedbackTag[] {
|
||||
return FEEDBACK_TAGS.filter((tag) => tag.direction === rating);
|
||||
}
|
||||
|
||||
export const feedbackTagKeySchema = z.enum(FEEDBACK_REASON_KEYS);
|
||||
export const feedbackRatingSchema = z.enum(FEEDBACK_RATINGS);
|
||||
|
||||
export const feedbackSchema = z.object({
|
||||
rating: feedbackRatingSchema,
|
||||
tag: feedbackTagKeySchema,
|
||||
text: z.string().max(1024).optional(),
|
||||
});
|
||||
|
||||
export type TMinimalFeedback = z.infer<typeof feedbackSchema>;
|
||||
|
||||
export type TFeedback = {
|
||||
rating: TFeedbackRating;
|
||||
tag: TFeedbackTag | undefined;
|
||||
text?: string;
|
||||
};
|
||||
|
||||
export function toMinimalFeedback(feedback: TFeedback | undefined): TMinimalFeedback | undefined {
|
||||
if (!feedback?.rating || !feedback?.tag || !feedback.tag.key) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
rating: feedback.rating,
|
||||
tag: feedback.tag.key,
|
||||
text: feedback.text,
|
||||
};
|
||||
}
|
||||
|
||||
export function getTagByKey(key: TFeedbackTagKey | undefined): TFeedbackTag | undefined {
|
||||
if (!key) {
|
||||
return undefined;
|
||||
}
|
||||
return FEEDBACK_TAGS.find((tag) => tag.key === key);
|
||||
}
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
/* eslint-disable max-len */
|
||||
import { z } from 'zod';
|
||||
import { EModelEndpoint } from './schemas';
|
||||
import type { FileConfig, EndpointFileConfig } from './types/files';
|
||||
import type { EndpointFileConfig, FileConfig } from './types/files';
|
||||
|
||||
export const supportsFiles = {
|
||||
[EModelEndpoint.openAI]: true,
|
||||
|
|
@ -50,6 +49,8 @@ export const fullMimeTypesList = [
|
|||
'text/javascript',
|
||||
'image/gif',
|
||||
'image/png',
|
||||
'image/heic',
|
||||
'image/heif',
|
||||
'application/x-tar',
|
||||
'application/typescript',
|
||||
'application/xml',
|
||||
|
|
@ -81,6 +82,8 @@ export const codeInterpreterMimeTypesList = [
|
|||
'text/javascript',
|
||||
'image/gif',
|
||||
'image/png',
|
||||
'image/heic',
|
||||
'image/heif',
|
||||
'application/x-tar',
|
||||
'application/typescript',
|
||||
'application/xml',
|
||||
|
|
@ -106,18 +109,18 @@ export const retrievalMimeTypesList = [
|
|||
'text/plain',
|
||||
];
|
||||
|
||||
export const imageExtRegex = /\.(jpg|jpeg|png|gif|webp)$/i;
|
||||
export const imageExtRegex = /\.(jpg|jpeg|png|gif|webp|heic|heif)$/i;
|
||||
|
||||
export const excelMimeTypes =
|
||||
/^application\/(vnd\.ms-excel|msexcel|x-msexcel|x-ms-excel|x-excel|x-dos_ms_excel|xls|x-xls|vnd\.openxmlformats-officedocument\.spreadsheetml\.sheet)$/;
|
||||
|
||||
export const textMimeTypes =
|
||||
/^(text\/(x-c|x-csharp|tab-separated-values|x-c\+\+|x-java|html|markdown|x-php|x-python|x-script\.python|x-ruby|x-tex|plain|css|vtt|javascript|csv))$/;
|
||||
/^(text\/(x-c|x-csharp|tab-separated-values|x-c\+\+|x-h|x-java|html|markdown|x-php|x-python|x-script\.python|x-ruby|x-tex|plain|css|vtt|javascript|csv))$/;
|
||||
|
||||
export const applicationMimeTypes =
|
||||
/^(application\/(epub\+zip|csv|json|pdf|x-tar|typescript|vnd\.openxmlformats-officedocument\.(wordprocessingml\.document|presentationml\.presentation|spreadsheetml\.sheet)|xml|zip))$/;
|
||||
|
||||
export const imageMimeTypes = /^image\/(jpeg|gif|png|webp)$/;
|
||||
export const imageMimeTypes = /^image\/(jpeg|gif|png|webp|heic|heif)$/;
|
||||
|
||||
export const supportedMimeTypes = [
|
||||
textMimeTypes,
|
||||
|
|
@ -139,6 +142,7 @@ export const codeTypeMapping: { [key: string]: string } = {
|
|||
c: 'text/x-c',
|
||||
cs: 'text/x-csharp',
|
||||
cpp: 'text/x-c++',
|
||||
h: 'text/x-h',
|
||||
md: 'text/markdown',
|
||||
php: 'text/x-php',
|
||||
py: 'text/x-python',
|
||||
|
|
@ -156,7 +160,7 @@ export const codeTypeMapping: { [key: string]: string } = {
|
|||
};
|
||||
|
||||
export const retrievalMimeTypes = [
|
||||
/^(text\/(x-c|x-c\+\+|html|x-java|markdown|x-php|x-python|x-script\.python|x-ruby|x-tex|plain|vtt|xml))$/,
|
||||
/^(text\/(x-c|x-c\+\+|x-h|html|x-java|markdown|x-php|x-python|x-script\.python|x-ruby|x-tex|plain|vtt|xml))$/,
|
||||
/^(application\/(json|pdf|vnd\.openxmlformats-officedocument\.(wordprocessingml\.document|presentationml\.presentation)))$/,
|
||||
];
|
||||
|
||||
|
|
@ -188,6 +192,12 @@ export const fileConfig = {
|
|||
},
|
||||
serverFileSizeLimit: defaultSizeLimit,
|
||||
avatarSizeLimit: mbToBytes(2),
|
||||
clientImageResize: {
|
||||
enabled: false,
|
||||
maxWidth: 1900,
|
||||
maxHeight: 1900,
|
||||
quality: 0.92,
|
||||
},
|
||||
checkType: function (fileType: string, supportedTypes: RegExp[] = supportedMimeTypes) {
|
||||
return supportedTypes.some((regex) => regex.test(fileType));
|
||||
},
|
||||
|
|
@ -228,6 +238,14 @@ export const fileConfigSchema = z.object({
|
|||
px: z.number().min(0).optional(),
|
||||
})
|
||||
.optional(),
|
||||
clientImageResize: z
|
||||
.object({
|
||||
enabled: z.boolean().optional(),
|
||||
maxWidth: z.number().min(0).optional(),
|
||||
maxHeight: z.number().min(0).optional(),
|
||||
quality: z.number().min(0).max(1).optional(),
|
||||
})
|
||||
.optional(),
|
||||
});
|
||||
|
||||
/** Helper function to safely convert string patterns to RegExp objects */
|
||||
|
|
@ -256,6 +274,14 @@ export function mergeFileConfig(dynamic: z.infer<typeof fileConfigSchema> | unde
|
|||
mergedConfig.avatarSizeLimit = mbToBytes(dynamic.avatarSizeLimit);
|
||||
}
|
||||
|
||||
// Merge clientImageResize configuration
|
||||
if (dynamic.clientImageResize !== undefined) {
|
||||
mergedConfig.clientImageResize = {
|
||||
...mergedConfig.clientImageResize,
|
||||
...dynamic.clientImageResize,
|
||||
};
|
||||
}
|
||||
|
||||
if (!dynamic.endpoints) {
|
||||
return mergedConfig;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -467,7 +467,11 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi
|
|||
}
|
||||
|
||||
/* Default value checks */
|
||||
if (setting.type === SettingTypes.Number && isNaN(setting.default as number) && setting.default != null) {
|
||||
if (
|
||||
setting.type === SettingTypes.Number &&
|
||||
isNaN(setting.default as number) &&
|
||||
setting.default != null
|
||||
) {
|
||||
errors.push({
|
||||
code: ZodIssueCode.custom,
|
||||
message: `Invalid default value for setting ${setting.key}. Must be a number.`,
|
||||
|
|
@ -475,7 +479,11 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi
|
|||
});
|
||||
}
|
||||
|
||||
if (setting.type === SettingTypes.Boolean && typeof setting.default !== 'boolean' && setting.default != null) {
|
||||
if (
|
||||
setting.type === SettingTypes.Boolean &&
|
||||
typeof setting.default !== 'boolean' &&
|
||||
setting.default != null
|
||||
) {
|
||||
errors.push({
|
||||
code: ZodIssueCode.custom,
|
||||
message: `Invalid default value for setting ${setting.key}. Must be a boolean.`,
|
||||
|
|
@ -485,7 +493,8 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi
|
|||
|
||||
if (
|
||||
(setting.type === SettingTypes.String || setting.type === SettingTypes.Enum) &&
|
||||
typeof setting.default !== 'string' && setting.default != null
|
||||
typeof setting.default !== 'string' &&
|
||||
setting.default != null
|
||||
) {
|
||||
errors.push({
|
||||
code: ZodIssueCode.custom,
|
||||
|
|
@ -520,6 +529,19 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi
|
|||
path: ['default'],
|
||||
});
|
||||
}
|
||||
|
||||
// Validate enumMappings
|
||||
if (setting.enumMappings && setting.type === SettingTypes.Enum && setting.options) {
|
||||
for (const option of setting.options) {
|
||||
if (!(option in setting.enumMappings)) {
|
||||
errors.push({
|
||||
code: ZodIssueCode.custom,
|
||||
message: `Missing enumMapping for option "${option}" in setting ${setting.key}.`,
|
||||
path: ['enumMappings'],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
|
|
|
|||
|
|
@ -16,6 +16,8 @@ export * from './models';
|
|||
export * from './mcp';
|
||||
/* web search */
|
||||
export * from './web';
|
||||
/* memory */
|
||||
export * from './memory';
|
||||
/* RBAC */
|
||||
export * from './permissions';
|
||||
export * from './roles';
|
||||
|
|
@ -39,4 +41,6 @@ import * as dataService from './data-service';
|
|||
export * from './utils';
|
||||
export * from './actions';
|
||||
export { default as createPayload } from './createPayload';
|
||||
/* feedback */
|
||||
export * from './feedback';
|
||||
export * from './parameterSettings';
|
||||
|
|
|
|||
|
|
@ -46,6 +46,8 @@ export enum QueryKeys {
|
|||
health = 'health',
|
||||
userTerms = 'userTerms',
|
||||
banner = 'banner',
|
||||
/* Memories */
|
||||
memories = 'memories',
|
||||
}
|
||||
|
||||
export enum MutationKeys {
|
||||
|
|
@ -70,4 +72,5 @@ export enum MutationKeys {
|
|||
updateRole = 'updateRole',
|
||||
enableTwoFactor = 'enableTwoFactor',
|
||||
verifyTwoFactor = 'verifyTwoFactor',
|
||||
updateMemoryPreferences = 'updateMemoryPreferences',
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { z } from 'zod';
|
||||
import { TokenExchangeMethodEnum } from './types/agents';
|
||||
import { extractEnvVariable } from './utils';
|
||||
|
||||
const BaseOptionsSchema = z.object({
|
||||
|
|
@ -7,6 +8,45 @@ const BaseOptionsSchema = z.object({
|
|||
initTimeout: z.number().optional(),
|
||||
/** Controls visibility in chat dropdown menu (MCPSelect) */
|
||||
chatMenu: z.boolean().optional(),
|
||||
/**
|
||||
* Controls server instruction behavior:
|
||||
* - undefined/not set: No instructions included (default)
|
||||
* - true: Use server-provided instructions
|
||||
* - string: Use custom instructions (overrides server-provided)
|
||||
*/
|
||||
serverInstructions: z.union([z.boolean(), z.string()]).optional(),
|
||||
/**
|
||||
* OAuth configuration for SSE and Streamable HTTP transports
|
||||
* - Optional: OAuth can be auto-discovered on 401 responses
|
||||
* - Pre-configured values will skip discovery steps
|
||||
*/
|
||||
oauth: z
|
||||
.object({
|
||||
/** OAuth authorization endpoint (optional - can be auto-discovered) */
|
||||
authorization_url: z.string().url().optional(),
|
||||
/** OAuth token endpoint (optional - can be auto-discovered) */
|
||||
token_url: z.string().url().optional(),
|
||||
/** OAuth client ID (optional - can use dynamic registration) */
|
||||
client_id: z.string().optional(),
|
||||
/** OAuth client secret (optional - can use dynamic registration) */
|
||||
client_secret: z.string().optional(),
|
||||
/** OAuth scopes to request */
|
||||
scope: z.string().optional(),
|
||||
/** OAuth redirect URI (defaults to /api/mcp/{serverName}/oauth/callback) */
|
||||
redirect_uri: z.string().url().optional(),
|
||||
/** Token exchange method */
|
||||
token_exchange_method: z.nativeEnum(TokenExchangeMethodEnum).optional(),
|
||||
})
|
||||
.optional(),
|
||||
customUserVars: z
|
||||
.record(
|
||||
z.string(),
|
||||
z.object({
|
||||
title: z.string(),
|
||||
description: z.string(),
|
||||
}),
|
||||
)
|
||||
.optional(),
|
||||
});
|
||||
|
||||
export const StdioOptionsSchema = BaseOptionsSchema.extend({
|
||||
|
|
@ -112,41 +152,3 @@ export const MCPOptionsSchema = z.union([
|
|||
export const MCPServersSchema = z.record(z.string(), MCPOptionsSchema);
|
||||
|
||||
export type MCPOptions = z.infer<typeof MCPOptionsSchema>;
|
||||
|
||||
/**
|
||||
* Recursively processes an object to replace environment variables in string values
|
||||
* @param {MCPOptions} obj - The object to process
|
||||
* @param {string} [userId] - The user ID
|
||||
* @returns {MCPOptions} - The processed object with environment variables replaced
|
||||
*/
|
||||
export function processMCPEnv(obj: Readonly<MCPOptions>, userId?: string): MCPOptions {
|
||||
if (obj === null || obj === undefined) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
const newObj: MCPOptions = structuredClone(obj);
|
||||
|
||||
if ('env' in newObj && newObj.env) {
|
||||
const processedEnv: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(newObj.env)) {
|
||||
processedEnv[key] = extractEnvVariable(value);
|
||||
}
|
||||
newObj.env = processedEnv;
|
||||
} else if ('headers' in newObj && newObj.headers) {
|
||||
const processedHeaders: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(newObj.headers)) {
|
||||
if (value === '{{LIBRECHAT_USER_ID}}' && userId != null && userId) {
|
||||
processedHeaders[key] = userId;
|
||||
continue;
|
||||
}
|
||||
processedHeaders[key] = extractEnvVariable(value);
|
||||
}
|
||||
newObj.headers = processedHeaders;
|
||||
}
|
||||
|
||||
if ('url' in newObj && newObj.url) {
|
||||
newObj.url = extractEnvVariable(newObj.url);
|
||||
}
|
||||
|
||||
return newObj;
|
||||
}
|
||||
|
|
|
|||
62
packages/data-provider/src/memory.ts
Normal file
62
packages/data-provider/src/memory.ts
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
import type { TCustomConfig, TMemoryConfig } from './config';
|
||||
|
||||
/**
|
||||
* Loads the memory configuration and validates it
|
||||
* @param config - The memory configuration from librechat.yaml
|
||||
* @returns The validated memory configuration
|
||||
*/
|
||||
export function loadMemoryConfig(config: TCustomConfig['memory']): TMemoryConfig | undefined {
|
||||
if (!config) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// If disabled is explicitly true, return the config as-is
|
||||
if (config.disabled === true) {
|
||||
return config;
|
||||
}
|
||||
|
||||
// Check if the agent configuration is valid
|
||||
const hasValidAgent =
|
||||
config.agent &&
|
||||
(('id' in config.agent && !!config.agent.id) ||
|
||||
('provider' in config.agent &&
|
||||
'model' in config.agent &&
|
||||
!!config.agent.provider &&
|
||||
!!config.agent.model));
|
||||
|
||||
// If agent config is invalid, treat as disabled
|
||||
if (!hasValidAgent) {
|
||||
return {
|
||||
...config,
|
||||
disabled: true,
|
||||
};
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if memory feature is enabled based on the configuration
|
||||
* @param config - The memory configuration
|
||||
* @returns True if memory is enabled, false otherwise
|
||||
*/
|
||||
export function isMemoryEnabled(config: TMemoryConfig | undefined): boolean {
|
||||
if (!config) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (config.disabled === true) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if agent configuration is valid
|
||||
const hasValidAgent =
|
||||
config.agent &&
|
||||
(('id' in config.agent && !!config.agent.id) ||
|
||||
('provider' in config.agent &&
|
||||
'model' in config.agent &&
|
||||
!!config.agent.provider &&
|
||||
!!config.agent.model));
|
||||
|
||||
return !!hasValidAgent;
|
||||
}
|
||||
|
|
@ -4,6 +4,7 @@ import {
|
|||
openAISettings,
|
||||
googleSettings,
|
||||
ReasoningEffort,
|
||||
ReasoningSummary,
|
||||
BedrockProviders,
|
||||
anthropicSettings,
|
||||
} from './types';
|
||||
|
|
@ -71,6 +72,11 @@ const baseDefinitions: Record<string, SettingDefinition> = {
|
|||
default: ImageDetail.auto,
|
||||
component: 'slider',
|
||||
options: [ImageDetail.low, ImageDetail.auto, ImageDetail.high],
|
||||
enumMappings: {
|
||||
[ImageDetail.low]: 'com_ui_low',
|
||||
[ImageDetail.auto]: 'com_ui_auto',
|
||||
[ImageDetail.high]: 'com_ui_high',
|
||||
},
|
||||
optionType: 'conversation',
|
||||
columnSpan: 2,
|
||||
},
|
||||
|
|
@ -83,7 +89,7 @@ const createDefinition = (
|
|||
return { ...base, ...overrides } as SettingDefinition;
|
||||
};
|
||||
|
||||
const librechat: Record<string, SettingDefinition> = {
|
||||
export const librechat = {
|
||||
modelLabel: {
|
||||
key: 'modelLabel',
|
||||
label: 'com_endpoint_custom_name',
|
||||
|
|
@ -94,7 +100,7 @@ const librechat: Record<string, SettingDefinition> = {
|
|||
placeholder: 'com_endpoint_openai_custom_name_placeholder',
|
||||
placeholderCode: true,
|
||||
optionType: 'conversation',
|
||||
},
|
||||
} as const,
|
||||
maxContextTokens: {
|
||||
key: 'maxContextTokens',
|
||||
label: 'com_endpoint_context_tokens',
|
||||
|
|
@ -107,7 +113,7 @@ const librechat: Record<string, SettingDefinition> = {
|
|||
descriptionCode: true,
|
||||
optionType: 'model',
|
||||
columnSpan: 2,
|
||||
},
|
||||
} as const,
|
||||
resendFiles: {
|
||||
key: 'resendFiles',
|
||||
label: 'com_endpoint_plug_resend_files',
|
||||
|
|
@ -120,7 +126,7 @@ const librechat: Record<string, SettingDefinition> = {
|
|||
optionType: 'conversation',
|
||||
showDefault: false,
|
||||
columnSpan: 2,
|
||||
},
|
||||
} as const,
|
||||
promptPrefix: {
|
||||
key: 'promptPrefix',
|
||||
label: 'com_endpoint_prompt_prefix',
|
||||
|
|
@ -131,7 +137,7 @@ const librechat: Record<string, SettingDefinition> = {
|
|||
placeholder: 'com_endpoint_openai_prompt_prefix_placeholder',
|
||||
placeholderCode: true,
|
||||
optionType: 'model',
|
||||
},
|
||||
} as const,
|
||||
};
|
||||
|
||||
const openAIParams: Record<string, SettingDefinition> = {
|
||||
|
|
@ -211,9 +217,70 @@ const openAIParams: Record<string, SettingDefinition> = {
|
|||
description: 'com_endpoint_openai_reasoning_effort',
|
||||
descriptionCode: true,
|
||||
type: 'enum',
|
||||
default: ReasoningEffort.medium,
|
||||
default: ReasoningEffort.none,
|
||||
component: 'slider',
|
||||
options: [ReasoningEffort.low, ReasoningEffort.medium, ReasoningEffort.high],
|
||||
options: [
|
||||
ReasoningEffort.none,
|
||||
ReasoningEffort.low,
|
||||
ReasoningEffort.medium,
|
||||
ReasoningEffort.high,
|
||||
],
|
||||
enumMappings: {
|
||||
[ReasoningEffort.none]: 'com_ui_none',
|
||||
[ReasoningEffort.low]: 'com_ui_low',
|
||||
[ReasoningEffort.medium]: 'com_ui_medium',
|
||||
[ReasoningEffort.high]: 'com_ui_high',
|
||||
},
|
||||
optionType: 'model',
|
||||
columnSpan: 4,
|
||||
},
|
||||
useResponsesApi: {
|
||||
key: 'useResponsesApi',
|
||||
label: 'com_endpoint_use_responses_api',
|
||||
labelCode: true,
|
||||
description: 'com_endpoint_openai_use_responses_api',
|
||||
descriptionCode: true,
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
component: 'switch',
|
||||
optionType: 'model',
|
||||
showDefault: false,
|
||||
columnSpan: 2,
|
||||
},
|
||||
web_search: {
|
||||
key: 'web_search',
|
||||
label: 'com_ui_web_search',
|
||||
labelCode: true,
|
||||
description: 'com_endpoint_openai_use_web_search',
|
||||
descriptionCode: true,
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
component: 'switch',
|
||||
optionType: 'model',
|
||||
showDefault: false,
|
||||
columnSpan: 2,
|
||||
},
|
||||
reasoning_summary: {
|
||||
key: 'reasoning_summary',
|
||||
label: 'com_endpoint_reasoning_summary',
|
||||
labelCode: true,
|
||||
description: 'com_endpoint_openai_reasoning_summary',
|
||||
descriptionCode: true,
|
||||
type: 'enum',
|
||||
default: ReasoningSummary.none,
|
||||
component: 'slider',
|
||||
options: [
|
||||
ReasoningSummary.none,
|
||||
ReasoningSummary.auto,
|
||||
ReasoningSummary.concise,
|
||||
ReasoningSummary.detailed,
|
||||
],
|
||||
enumMappings: {
|
||||
[ReasoningSummary.none]: 'com_ui_none',
|
||||
[ReasoningSummary.auto]: 'com_ui_auto',
|
||||
[ReasoningSummary.concise]: 'com_ui_concise',
|
||||
[ReasoningSummary.detailed]: 'com_ui_detailed',
|
||||
},
|
||||
optionType: 'model',
|
||||
columnSpan: 4,
|
||||
},
|
||||
|
|
@ -347,7 +414,9 @@ const bedrock: Record<string, SettingDefinition> = {
|
|||
labelCode: true,
|
||||
type: 'number',
|
||||
component: 'input',
|
||||
placeholder: 'com_endpoint_anthropic_maxoutputtokens',
|
||||
description: 'com_endpoint_anthropic_maxoutputtokens',
|
||||
descriptionCode: true,
|
||||
placeholder: 'com_nav_theme_system',
|
||||
placeholderCode: true,
|
||||
optionType: 'model',
|
||||
columnSpan: 2,
|
||||
|
|
@ -450,6 +519,50 @@ const google: Record<string, SettingDefinition> = {
|
|||
optionType: 'model',
|
||||
columnSpan: 2,
|
||||
},
|
||||
thinking: {
|
||||
key: 'thinking',
|
||||
label: 'com_endpoint_thinking',
|
||||
labelCode: true,
|
||||
description: 'com_endpoint_google_thinking',
|
||||
descriptionCode: true,
|
||||
type: 'boolean',
|
||||
default: googleSettings.thinking.default,
|
||||
component: 'switch',
|
||||
optionType: 'conversation',
|
||||
showDefault: false,
|
||||
columnSpan: 2,
|
||||
},
|
||||
thinkingBudget: {
|
||||
key: 'thinkingBudget',
|
||||
label: 'com_endpoint_thinking_budget',
|
||||
labelCode: true,
|
||||
description: 'com_endpoint_google_thinking_budget',
|
||||
descriptionCode: true,
|
||||
placeholder: 'com_ui_auto',
|
||||
placeholderCode: true,
|
||||
type: 'number',
|
||||
component: 'input',
|
||||
range: {
|
||||
min: googleSettings.thinkingBudget.min,
|
||||
max: googleSettings.thinkingBudget.max,
|
||||
step: googleSettings.thinkingBudget.step,
|
||||
},
|
||||
optionType: 'conversation',
|
||||
columnSpan: 2,
|
||||
},
|
||||
grounding: {
|
||||
key: 'grounding',
|
||||
label: 'com_endpoint_use_search_grounding',
|
||||
labelCode: true,
|
||||
description: 'com_endpoint_google_use_search_grounding',
|
||||
descriptionCode: true,
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
component: 'switch',
|
||||
optionType: 'model',
|
||||
showDefault: false,
|
||||
columnSpan: 2,
|
||||
},
|
||||
};
|
||||
|
||||
const googleConfig: SettingsConfiguration = [
|
||||
|
|
@ -461,6 +574,9 @@ const googleConfig: SettingsConfiguration = [
|
|||
google.topP,
|
||||
google.topK,
|
||||
librechat.resendFiles,
|
||||
google.thinking,
|
||||
google.thinkingBudget,
|
||||
google.grounding,
|
||||
];
|
||||
|
||||
const googleCol1: SettingsConfiguration = [
|
||||
|
|
@ -476,6 +592,9 @@ const googleCol2: SettingsConfiguration = [
|
|||
google.topP,
|
||||
google.topK,
|
||||
librechat.resendFiles,
|
||||
google.thinking,
|
||||
google.thinkingBudget,
|
||||
google.grounding,
|
||||
];
|
||||
|
||||
const openAI: SettingsConfiguration = [
|
||||
|
|
@ -490,7 +609,10 @@ const openAI: SettingsConfiguration = [
|
|||
baseDefinitions.stop,
|
||||
librechat.resendFiles,
|
||||
baseDefinitions.imageDetail,
|
||||
openAIParams.web_search,
|
||||
openAIParams.reasoning_effort,
|
||||
openAIParams.useResponsesApi,
|
||||
openAIParams.reasoning_summary,
|
||||
];
|
||||
|
||||
const openAICol1: SettingsConfiguration = [
|
||||
|
|
@ -507,9 +629,12 @@ const openAICol2: SettingsConfiguration = [
|
|||
openAIParams.frequency_penalty,
|
||||
openAIParams.presence_penalty,
|
||||
baseDefinitions.stop,
|
||||
openAIParams.reasoning_effort,
|
||||
librechat.resendFiles,
|
||||
baseDefinitions.imageDetail,
|
||||
openAIParams.reasoning_effort,
|
||||
openAIParams.reasoning_summary,
|
||||
openAIParams.useResponsesApi,
|
||||
openAIParams.web_search,
|
||||
];
|
||||
|
||||
const anthropicConfig: SettingsConfiguration = [
|
||||
|
|
|
|||
|
|
@ -122,19 +122,6 @@ export function errorsToString(errors: ZodIssue[]) {
|
|||
.join(' ');
|
||||
}
|
||||
|
||||
/** Resolves header values to env variables if detected */
|
||||
export function resolveHeaders(headers: Record<string, string> | undefined) {
|
||||
const resolvedHeaders = { ...(headers ?? {}) };
|
||||
|
||||
if (headers && typeof headers === 'object' && !Array.isArray(headers)) {
|
||||
Object.keys(headers).forEach((key) => {
|
||||
resolvedHeaders[key] = extractEnvVariable(headers[key]);
|
||||
});
|
||||
}
|
||||
|
||||
return resolvedHeaders;
|
||||
}
|
||||
|
||||
export function getFirstDefinedValue(possibleValues: string[]) {
|
||||
let returnValue;
|
||||
for (const value of possibleValues) {
|
||||
|
|
@ -225,13 +212,15 @@ const extractOmniVersion = (modelStr: string): string => {
|
|||
export const getResponseSender = (endpointOption: t.TEndpointOption): string => {
|
||||
const {
|
||||
model: _m,
|
||||
endpoint,
|
||||
endpoint: _e,
|
||||
endpointType,
|
||||
modelDisplayLabel: _mdl,
|
||||
chatGptLabel: _cgl,
|
||||
modelLabel: _ml,
|
||||
} = endpointOption;
|
||||
|
||||
const endpoint = _e as EModelEndpoint;
|
||||
|
||||
const model = _m ?? '';
|
||||
const modelDisplayLabel = _mdl ?? '';
|
||||
const chatGptLabel = _cgl ?? '';
|
||||
|
|
@ -273,15 +262,11 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
|
|||
if (endpoint === EModelEndpoint.google) {
|
||||
if (modelLabel) {
|
||||
return modelLabel;
|
||||
} else if (model && (model.includes('gemini') || model.includes('learnlm'))) {
|
||||
return 'Gemini';
|
||||
} else if (model?.toLowerCase().includes('gemma') === true) {
|
||||
return 'Gemma';
|
||||
} else if (model && model.includes('code')) {
|
||||
return 'Codey';
|
||||
}
|
||||
|
||||
return 'PaLM2';
|
||||
return 'Gemini';
|
||||
}
|
||||
|
||||
if (endpoint === EModelEndpoint.custom || endpointType === EModelEndpoint.custom) {
|
||||
|
|
|
|||
|
|
@ -16,6 +16,10 @@ export enum PermissionTypes {
|
|||
* Type for Agent Permissions
|
||||
*/
|
||||
AGENTS = 'AGENTS',
|
||||
/**
|
||||
* Type for Memory Permissions
|
||||
*/
|
||||
MEMORIES = 'MEMORIES',
|
||||
/**
|
||||
* Type for Multi-Conversation Permissions
|
||||
*/
|
||||
|
|
@ -45,6 +49,8 @@ export enum Permissions {
|
|||
READ = 'READ',
|
||||
READ_AUTHOR = 'READ_AUTHOR',
|
||||
SHARE = 'SHARE',
|
||||
/** Can disable if desired */
|
||||
OPT_OUT = 'OPT_OUT',
|
||||
}
|
||||
|
||||
export const promptPermissionsSchema = z.object({
|
||||
|
|
@ -60,6 +66,15 @@ export const bookmarkPermissionsSchema = z.object({
|
|||
});
|
||||
export type TBookmarkPermissions = z.infer<typeof bookmarkPermissionsSchema>;
|
||||
|
||||
export const memoryPermissionsSchema = z.object({
|
||||
[Permissions.USE]: z.boolean().default(true),
|
||||
[Permissions.CREATE]: z.boolean().default(true),
|
||||
[Permissions.UPDATE]: z.boolean().default(true),
|
||||
[Permissions.READ]: z.boolean().default(true),
|
||||
[Permissions.OPT_OUT]: z.boolean().default(true),
|
||||
});
|
||||
export type TMemoryPermissions = z.infer<typeof memoryPermissionsSchema>;
|
||||
|
||||
export const agentPermissionsSchema = z.object({
|
||||
[Permissions.SHARED_GLOBAL]: z.boolean().default(false),
|
||||
[Permissions.USE]: z.boolean().default(true),
|
||||
|
|
@ -92,6 +107,7 @@ export type TWebSearchPermissions = z.infer<typeof webSearchPermissionsSchema>;
|
|||
export const permissionsSchema = z.object({
|
||||
[PermissionTypes.PROMPTS]: promptPermissionsSchema,
|
||||
[PermissionTypes.BOOKMARKS]: bookmarkPermissionsSchema,
|
||||
[PermissionTypes.MEMORIES]: memoryPermissionsSchema,
|
||||
[PermissionTypes.AGENTS]: agentPermissionsSchema,
|
||||
[PermissionTypes.MULTI_CONVO]: multiConvoPermissionsSchema,
|
||||
[PermissionTypes.TEMPORARY_CHAT]: temporaryChatPermissionsSchema,
|
||||
|
|
|
|||
|
|
@ -12,23 +12,6 @@ import { QueryKeys } from '../keys';
|
|||
import * as s from '../schemas';
|
||||
import * as t from '../types';
|
||||
|
||||
export const useAbortRequestWithMessage = (): UseMutationResult<
|
||||
void,
|
||||
Error,
|
||||
{ endpoint: string; abortKey: string; message: string }
|
||||
> => {
|
||||
const queryClient = useQueryClient();
|
||||
return useMutation(
|
||||
({ endpoint, abortKey, message }) =>
|
||||
dataService.abortRequestWithMessage(endpoint, abortKey, message),
|
||||
{
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries([QueryKeys.balance]);
|
||||
},
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
export const useGetSharedMessages = (
|
||||
shareId: string,
|
||||
config?: UseQueryOptions<t.TSharedMessagesResponse>,
|
||||
|
|
@ -347,3 +330,19 @@ export const useGetCustomConfigSpeechQuery = (
|
|||
},
|
||||
);
|
||||
};
|
||||
|
||||
export const useUpdateFeedbackMutation = (
|
||||
conversationId: string,
|
||||
messageId: string,
|
||||
): UseMutationResult<t.TUpdateFeedbackResponse, Error, t.TUpdateFeedbackRequest> => {
|
||||
const queryClient = useQueryClient();
|
||||
return useMutation(
|
||||
(payload: t.TUpdateFeedbackRequest) =>
|
||||
dataService.updateFeedback(conversationId, messageId, payload),
|
||||
{
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries([QueryKeys.messages, messageId]);
|
||||
},
|
||||
},
|
||||
);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import {
|
|||
permissionsSchema,
|
||||
agentPermissionsSchema,
|
||||
promptPermissionsSchema,
|
||||
memoryPermissionsSchema,
|
||||
runCodePermissionsSchema,
|
||||
webSearchPermissionsSchema,
|
||||
bookmarkPermissionsSchema,
|
||||
|
|
@ -48,6 +49,13 @@ const defaultRolesSchema = z.object({
|
|||
[PermissionTypes.BOOKMARKS]: bookmarkPermissionsSchema.extend({
|
||||
[Permissions.USE]: z.boolean().default(true),
|
||||
}),
|
||||
[PermissionTypes.MEMORIES]: memoryPermissionsSchema.extend({
|
||||
[Permissions.USE]: z.boolean().default(true),
|
||||
[Permissions.CREATE]: z.boolean().default(true),
|
||||
[Permissions.UPDATE]: z.boolean().default(true),
|
||||
[Permissions.READ]: z.boolean().default(true),
|
||||
[Permissions.OPT_OUT]: z.boolean().default(true),
|
||||
}),
|
||||
[PermissionTypes.AGENTS]: agentPermissionsSchema.extend({
|
||||
[Permissions.SHARED_GLOBAL]: z.boolean().default(true),
|
||||
[Permissions.USE]: z.boolean().default(true),
|
||||
|
|
@ -86,6 +94,13 @@ export const roleDefaults = defaultRolesSchema.parse({
|
|||
[PermissionTypes.BOOKMARKS]: {
|
||||
[Permissions.USE]: true,
|
||||
},
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: true,
|
||||
[Permissions.CREATE]: true,
|
||||
[Permissions.UPDATE]: true,
|
||||
[Permissions.READ]: true,
|
||||
[Permissions.OPT_OUT]: true,
|
||||
},
|
||||
[PermissionTypes.AGENTS]: {
|
||||
[Permissions.SHARED_GLOBAL]: true,
|
||||
[Permissions.USE]: true,
|
||||
|
|
@ -110,6 +125,7 @@ export const roleDefaults = defaultRolesSchema.parse({
|
|||
permissions: {
|
||||
[PermissionTypes.PROMPTS]: {},
|
||||
[PermissionTypes.BOOKMARKS]: {},
|
||||
[PermissionTypes.MEMORIES]: {},
|
||||
[PermissionTypes.AGENTS]: {},
|
||||
[PermissionTypes.MULTI_CONVO]: {},
|
||||
[PermissionTypes.TEMPORARY_CHAT]: {},
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import { z } from 'zod';
|
||||
import { Tools } from './types/assistants';
|
||||
import type { TMessageContentParts, FunctionTool, FunctionToolCall } from './types/assistants';
|
||||
import { TFeedback, feedbackSchema } from './feedback';
|
||||
import type { SearchResultData } from './types/web';
|
||||
import type { TEphemeralAgent } from './types';
|
||||
import type { TFile } from './types/files';
|
||||
|
||||
export const isUUID = z.string().uuid();
|
||||
|
|
@ -90,22 +90,6 @@ export const isAgentsEndpoint = (_endpoint?: EModelEndpoint.agents | null | stri
|
|||
return endpoint === EModelEndpoint.agents;
|
||||
};
|
||||
|
||||
export const isEphemeralAgent = (
|
||||
endpoint?: EModelEndpoint.agents | null | string,
|
||||
ephemeralAgent?: TEphemeralAgent | null,
|
||||
) => {
|
||||
if (!ephemeralAgent) {
|
||||
return false;
|
||||
}
|
||||
if (isAgentsEndpoint(endpoint)) {
|
||||
return false;
|
||||
}
|
||||
const hasMCPSelected = (ephemeralAgent?.mcp?.length ?? 0) > 0;
|
||||
const hasCodeSelected = (ephemeralAgent?.execute_code ?? false) === true;
|
||||
const hasSearchSelected = (ephemeralAgent?.web_search ?? false) === true;
|
||||
return hasMCPSelected || hasCodeSelected || hasSearchSelected;
|
||||
};
|
||||
|
||||
export const isParamEndpoint = (
|
||||
endpoint: EModelEndpoint | string,
|
||||
endpointType?: EModelEndpoint | string,
|
||||
|
|
@ -128,11 +112,19 @@ export enum ImageDetail {
|
|||
}
|
||||
|
||||
export enum ReasoningEffort {
|
||||
none = '',
|
||||
low = 'low',
|
||||
medium = 'medium',
|
||||
high = 'high',
|
||||
}
|
||||
|
||||
export enum ReasoningSummary {
|
||||
none = '',
|
||||
auto = 'auto',
|
||||
concise = 'concise',
|
||||
detailed = 'detailed',
|
||||
}
|
||||
|
||||
export const imageDetailNumeric = {
|
||||
[ImageDetail.low]: 0,
|
||||
[ImageDetail.auto]: 1,
|
||||
|
|
@ -147,6 +139,7 @@ export const imageDetailValue = {
|
|||
|
||||
export const eImageDetailSchema = z.nativeEnum(ImageDetail);
|
||||
export const eReasoningEffortSchema = z.nativeEnum(ReasoningEffort);
|
||||
export const eReasoningSummarySchema = z.nativeEnum(ReasoningSummary);
|
||||
|
||||
export const defaultAssistantFormValues = {
|
||||
assistant: '',
|
||||
|
|
@ -271,6 +264,18 @@ export const googleSettings = {
|
|||
step: 1 as const,
|
||||
default: 40 as const,
|
||||
},
|
||||
thinking: {
|
||||
default: true as const,
|
||||
},
|
||||
thinkingBudget: {
|
||||
min: -1 as const,
|
||||
max: 32768 as const,
|
||||
step: 1 as const,
|
||||
/** `-1` = Dynamic Thinking, meaning the model will adjust
|
||||
* the budget based on the complexity of the request.
|
||||
*/
|
||||
default: -1 as const,
|
||||
},
|
||||
};
|
||||
|
||||
const ANTHROPIC_MAX_OUTPUT = 128000 as const;
|
||||
|
|
@ -416,7 +421,7 @@ export type TPluginAuthConfig = z.infer<typeof tPluginAuthConfigSchema>;
|
|||
export const tPluginSchema = z.object({
|
||||
name: z.string(),
|
||||
pluginKey: z.string(),
|
||||
description: z.string(),
|
||||
description: z.string().optional(),
|
||||
icon: z.string().optional(),
|
||||
authConfig: z.array(tPluginAuthConfigSchema).optional(),
|
||||
authenticated: z.boolean().optional(),
|
||||
|
|
@ -498,6 +503,7 @@ export const tMessageSchema = z.object({
|
|||
title: z.string().nullable().or(z.literal('New Chat')).default('New Chat'),
|
||||
sender: z.string().optional(),
|
||||
text: z.string(),
|
||||
/** @deprecated */
|
||||
generation: z.string().nullable().optional(),
|
||||
isCreatedByUser: z.boolean(),
|
||||
error: z.boolean().optional(),
|
||||
|
|
@ -518,13 +524,22 @@ export const tMessageSchema = z.object({
|
|||
thread_id: z.string().optional(),
|
||||
/* frontend components */
|
||||
iconURL: z.string().nullable().optional(),
|
||||
feedback: feedbackSchema.optional(),
|
||||
});
|
||||
|
||||
export type MemoryArtifact = {
|
||||
key: string;
|
||||
value?: string;
|
||||
tokenCount?: number;
|
||||
type: 'update' | 'delete';
|
||||
};
|
||||
|
||||
export type TAttachmentMetadata = {
|
||||
type?: Tools;
|
||||
messageId: string;
|
||||
toolCallId: string;
|
||||
[Tools.web_search]?: SearchResultData;
|
||||
[Tools.memory]?: MemoryArtifact;
|
||||
};
|
||||
|
||||
export type TAttachment =
|
||||
|
|
@ -543,6 +558,7 @@ export type TMessage = z.input<typeof tMessageSchema> & {
|
|||
siblingIndex?: number;
|
||||
attachments?: TAttachment[];
|
||||
clientTimestamp?: string;
|
||||
feedback?: TFeedback;
|
||||
};
|
||||
|
||||
export const coerceNumber = z.union([z.number(), z.string()]).transform((val) => {
|
||||
|
|
@ -613,8 +629,15 @@ export const tConversationSchema = z.object({
|
|||
file_ids: z.array(z.string()).optional(),
|
||||
/* vision */
|
||||
imageDetail: eImageDetailSchema.optional(),
|
||||
/* OpenAI: o1 only */
|
||||
reasoning_effort: eReasoningEffortSchema.optional(),
|
||||
/* OpenAI: Reasoning models only */
|
||||
reasoning_effort: eReasoningEffortSchema.optional().nullable(),
|
||||
reasoning_summary: eReasoningSummarySchema.optional().nullable(),
|
||||
/* OpenAI: use Responses API */
|
||||
useResponsesApi: z.boolean().optional(),
|
||||
/* OpenAI: use Responses API with Web Search */
|
||||
web_search: z.boolean().optional(),
|
||||
/* Google: use Search Grounding */
|
||||
grounding: z.boolean().optional(),
|
||||
/* assistant */
|
||||
assistant_id: z.string().optional(),
|
||||
/* agents */
|
||||
|
|
@ -711,6 +734,14 @@ export const tQueryParamsSchema = tConversationSchema
|
|||
top_p: true,
|
||||
/** @endpoints openAI, custom, azureOpenAI */
|
||||
max_tokens: true,
|
||||
/** @endpoints openAI, custom, azureOpenAI */
|
||||
reasoning_effort: true,
|
||||
/** @endpoints openAI, custom, azureOpenAI */
|
||||
reasoning_summary: true,
|
||||
/** @endpoints openAI, custom, azureOpenAI */
|
||||
useResponsesApi: true,
|
||||
/** @endpoints google */
|
||||
grounding: true,
|
||||
/** @endpoints google, anthropic, bedrock */
|
||||
topP: true,
|
||||
/** @endpoints google, anthropic */
|
||||
|
|
@ -791,6 +822,9 @@ export const googleBaseSchema = tConversationSchema.pick({
|
|||
artifacts: true,
|
||||
topP: true,
|
||||
topK: true,
|
||||
thinking: true,
|
||||
thinkingBudget: true,
|
||||
grounding: true,
|
||||
iconURL: true,
|
||||
greeting: true,
|
||||
spec: true,
|
||||
|
|
@ -816,6 +850,13 @@ export const googleGenConfigSchema = z
|
|||
presencePenalty: coerceNumber.optional(),
|
||||
frequencyPenalty: coerceNumber.optional(),
|
||||
stopSequences: z.array(z.string()).optional(),
|
||||
thinkingConfig: z
|
||||
.object({
|
||||
includeThoughts: z.boolean().optional(),
|
||||
thinkingBudget: coerceNumber.optional(),
|
||||
})
|
||||
.optional(),
|
||||
grounding: z.boolean().optional(),
|
||||
})
|
||||
.strip()
|
||||
.optional();
|
||||
|
|
@ -1030,10 +1071,13 @@ export const openAIBaseSchema = tConversationSchema.pick({
|
|||
maxContextTokens: true,
|
||||
max_tokens: true,
|
||||
reasoning_effort: true,
|
||||
reasoning_summary: true,
|
||||
useResponsesApi: true,
|
||||
web_search: true,
|
||||
});
|
||||
|
||||
export const openAISchema = openAIBaseSchema
|
||||
.transform((obj: Partial<TConversation>) => removeNullishValues(obj))
|
||||
.transform((obj: Partial<TConversation>) => removeNullishValues(obj, true))
|
||||
.catch(() => ({}));
|
||||
|
||||
export const compactGoogleSchema = googleBaseSchema
|
||||
|
|
|
|||
|
|
@ -1,16 +1,19 @@
|
|||
import type OpenAI from 'openai';
|
||||
import type { InfiniteData } from '@tanstack/react-query';
|
||||
import type {
|
||||
TBanner,
|
||||
TMessage,
|
||||
TResPlugin,
|
||||
ImageDetail,
|
||||
TSharedLink,
|
||||
TConversation,
|
||||
EModelEndpoint,
|
||||
TConversationTag,
|
||||
TBanner,
|
||||
TAttachment,
|
||||
} from './schemas';
|
||||
import { SettingDefinition } from './generate';
|
||||
import type { SettingDefinition } from './generate';
|
||||
import type { TMinimalFeedback } from './feedback';
|
||||
import type { Agent } from './types/assistants';
|
||||
|
||||
export type TOpenAIMessage = OpenAI.Chat.ChatCompletionMessageParam;
|
||||
|
||||
export * from './schemas';
|
||||
|
|
@ -18,33 +21,84 @@ export * from './schemas';
|
|||
export type TMessages = TMessage[];
|
||||
|
||||
/* TODO: Cleanup EndpointOption types */
|
||||
export type TEndpointOption = {
|
||||
spec?: string | null;
|
||||
iconURL?: string | null;
|
||||
endpoint: EModelEndpoint;
|
||||
endpointType?: EModelEndpoint;
|
||||
export type TEndpointOption = Pick<
|
||||
TConversation,
|
||||
// Core conversation fields
|
||||
| 'endpoint'
|
||||
| 'endpointType'
|
||||
| 'model'
|
||||
| 'modelLabel'
|
||||
| 'chatGptLabel'
|
||||
| 'promptPrefix'
|
||||
| 'temperature'
|
||||
| 'topP'
|
||||
| 'topK'
|
||||
| 'top_p'
|
||||
| 'frequency_penalty'
|
||||
| 'presence_penalty'
|
||||
| 'maxOutputTokens'
|
||||
| 'maxContextTokens'
|
||||
| 'max_tokens'
|
||||
| 'maxTokens'
|
||||
| 'resendFiles'
|
||||
| 'imageDetail'
|
||||
| 'reasoning_effort'
|
||||
| 'instructions'
|
||||
| 'additional_instructions'
|
||||
| 'append_current_datetime'
|
||||
| 'tools'
|
||||
| 'stop'
|
||||
| 'region'
|
||||
| 'additionalModelRequestFields'
|
||||
// Anthropic-specific
|
||||
| 'promptCache'
|
||||
| 'thinking'
|
||||
| 'thinkingBudget'
|
||||
// Assistant/Agent fields
|
||||
| 'assistant_id'
|
||||
| 'agent_id'
|
||||
// UI/Display fields
|
||||
| 'iconURL'
|
||||
| 'greeting'
|
||||
| 'spec'
|
||||
// Artifacts
|
||||
| 'artifacts'
|
||||
// Files
|
||||
| 'file_ids'
|
||||
// System field
|
||||
| 'system'
|
||||
// Google examples
|
||||
| 'examples'
|
||||
// Context
|
||||
| 'context'
|
||||
> & {
|
||||
// Fields specific to endpoint options that don't exist on TConversation
|
||||
modelDisplayLabel?: string;
|
||||
resendFiles?: boolean;
|
||||
promptCache?: boolean;
|
||||
maxContextTokens?: number;
|
||||
imageDetail?: ImageDetail;
|
||||
model?: string | null;
|
||||
promptPrefix?: string;
|
||||
temperature?: number;
|
||||
chatGptLabel?: string | null;
|
||||
modelLabel?: string | null;
|
||||
jailbreak?: boolean;
|
||||
key?: string | null;
|
||||
/* assistant */
|
||||
/** @deprecated Assistants API */
|
||||
thread_id?: string;
|
||||
/* multi-response stream */
|
||||
// Conversation identifiers for multi-response streams
|
||||
overrideConvoId?: string;
|
||||
overrideUserMessageId?: string;
|
||||
// Model parameters (used by different endpoints)
|
||||
modelOptions?: Record<string, unknown>;
|
||||
model_parameters?: Record<string, unknown>;
|
||||
// Configuration data (added by middleware)
|
||||
modelsConfig?: TModelsConfig;
|
||||
// File attachments (processed by middleware)
|
||||
attachments?: TAttachment[];
|
||||
// Generated prompts
|
||||
artifactsPrompt?: string;
|
||||
// Agent-specific fields
|
||||
agent?: Promise<Agent>;
|
||||
// Client-specific options
|
||||
clientOptions?: Record<string, unknown>;
|
||||
};
|
||||
|
||||
export type TEphemeralAgent = {
|
||||
mcp?: string[];
|
||||
web_search?: boolean;
|
||||
file_search?: boolean;
|
||||
execute_code?: boolean;
|
||||
};
|
||||
|
||||
|
|
@ -55,6 +109,11 @@ export type TPayload = Partial<TMessage> &
|
|||
messages?: TMessages;
|
||||
isTemporary: boolean;
|
||||
ephemeralAgent?: TEphemeralAgent | null;
|
||||
editedContent?: {
|
||||
index: number;
|
||||
text: string;
|
||||
type: 'text' | 'think';
|
||||
} | null;
|
||||
};
|
||||
|
||||
export type TSubmission = {
|
||||
|
|
@ -73,6 +132,11 @@ export type TSubmission = {
|
|||
endpointOption: TEndpointOption;
|
||||
clientTimestamp?: string;
|
||||
ephemeralAgent?: TEphemeralAgent | null;
|
||||
editedContent?: {
|
||||
index: number;
|
||||
text: string;
|
||||
type: 'text' | 'think';
|
||||
} | null;
|
||||
};
|
||||
|
||||
export type EventSubmission = Omit<TSubmission, 'initialResponse'> & { initialResponse: TMessage };
|
||||
|
|
@ -80,7 +144,7 @@ export type EventSubmission = Omit<TSubmission, 'initialResponse'> & { initialRe
|
|||
export type TPluginAction = {
|
||||
pluginKey: string;
|
||||
action: 'install' | 'uninstall';
|
||||
auth?: Partial<Record<string, string>>;
|
||||
auth?: Partial<Record<string, string>> | null;
|
||||
isEntityTool?: boolean;
|
||||
};
|
||||
|
||||
|
|
@ -90,7 +154,7 @@ export type TUpdateUserPlugins = {
|
|||
isEntityTool?: boolean;
|
||||
pluginKey: string;
|
||||
action: string;
|
||||
auth?: Partial<Record<string, string | null>>;
|
||||
auth?: Partial<Record<string, string | null>> | null;
|
||||
};
|
||||
|
||||
// TODO `label` needs to be changed to the proper `TranslationKeys`
|
||||
|
|
@ -128,6 +192,9 @@ export type TUser = {
|
|||
plugins?: string[];
|
||||
twoFactorEnabled?: boolean;
|
||||
backupCodes?: TBackupCode[];
|
||||
personalization?: {
|
||||
memories?: boolean;
|
||||
};
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
};
|
||||
|
|
@ -547,6 +614,16 @@ export type TAcceptTermsResponse = {
|
|||
|
||||
export type TBannerResponse = TBanner | null;
|
||||
|
||||
export type TUpdateFeedbackRequest = {
|
||||
feedback?: TMinimalFeedback;
|
||||
};
|
||||
|
||||
export type TUpdateFeedbackResponse = {
|
||||
messageId: string;
|
||||
conversationId: string;
|
||||
feedback?: TMinimalFeedback;
|
||||
};
|
||||
|
||||
export type TBalanceResponse = {
|
||||
tokenCredits: number;
|
||||
// Automatic refill settings
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue