🧵 refactor: Migrate Endpoint Initialization to TypeScript (#10794)

* refactor: move endpoint initialization methods to typescript

* refactor: move agent init to packages/api

- Introduced `initialize.ts` for agent initialization, including file processing and tool loading.
- Updated `resources.ts` to allow optional appConfig parameter.
- Enhanced endpoint configuration handling in various initialization files to support model parameters.
- Added new artifacts and prompts for React component generation.
- Refactored existing code to improve type safety and maintainability.

* refactor: streamline endpoint initialization and enhance type safety

- Updated initialization functions across various endpoints to use a consistent request structure, replacing `unknown` types with `ServerResponse`.
- Simplified request handling by directly extracting keys from the request body.
- Improved type safety by ensuring user IDs are safely accessed with optional chaining.
- Removed unnecessary parameters and streamlined model options handling for better clarity and maintainability.

* refactor: moved ModelService and extractBaseURL to packages/api

- Added comprehensive tests for the models fetching functionality, covering scenarios for OpenAI, Anthropic, Google, and Ollama models.
- Updated existing endpoint index to include the new models module.
- Enhanced utility functions for URL extraction and model data processing.
- Improved type safety and error handling across the models fetching logic.

* refactor: consolidate utility functions and remove unused files

- Merged `deriveBaseURL` and `extractBaseURL` into the `@librechat/api` module for better organization.
- Removed redundant utility files and their associated tests to streamline the codebase.
- Updated imports across various client files to utilize the new consolidated functions.
- Enhanced overall maintainability by reducing the number of utility modules.

* refactor: replace ModelService references with direct imports from @librechat/api and remove ModelService file

* refactor: move encrypt/decrypt methods and key db methods to data-schemas, use `getProviderConfig` from `@librechat/api`

* chore: remove unused 'res' from options in AgentClient

* refactor: file model imports and methods

- Updated imports in various controllers and services to use the unified file model from '~/models' instead of '~/models/File'.
- Consolidated file-related methods into a new file methods module in the data-schemas package.
- Added comprehensive tests for file methods including creation, retrieval, updating, and deletion.
- Enhanced the initializeAgent function to accept dependency injection for file-related methods.
- Improved error handling and logging in file methods.

* refactor: streamline database method references in agent initialization

* refactor: enhance file method tests and update type references to IMongoFile

* refactor: consolidate database method imports in agent client and initialization

* chore: remove redundant import of initializeAgent from @librechat/api

* refactor: move checkUserKeyExpiry utility to @librechat/api and update references across endpoints

* refactor: move updateUserPlugins logic to user.ts and simplify UserController

* refactor: update imports for user key management and remove UserService

* refactor: remove unused Anthropics and Bedrock endpoint files and clean up imports

* refactor: consolidate and update encryption imports across various files to use @librechat/data-schemas

* chore: update file model mock to use unified import from '~/models'

* chore: import order

* refactor: remove migrated to TS agent.js file and its associated logic from the endpoints

* chore: add reusable function to extract imports from source code in unused-packages workflow

* chore: enhance unused-packages workflow to include @librechat/api dependencies and improve dependency extraction

* chore: improve dependency extraction in unused-packages workflow with enhanced error handling and debugging output

* chore: add detailed debugging output to unused-packages workflow for better visibility into unused dependencies and exclusion lists

* chore: refine subpath handling in unused-packages workflow to correctly process scoped and non-scoped package imports

* chore: clean up unused debug output in unused-packages workflow and reorganize type imports in initialize.ts
This commit is contained in:
Danny Avila 2025-12-03 17:21:41 -05:00
parent f2ba1696bc
commit b478560c81
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
103 changed files with 4135 additions and 2647 deletions

View file

@ -1,7 +1,15 @@
import 'dotenv/config';
import jwt from 'jsonwebtoken';
import { webcrypto } from 'node:crypto';
import crypto from 'node:crypto';
import { SignPayloadParams } from '~/types';
const { webcrypto } = crypto;
/** Use hex decoding for both key and IV for legacy methods */
const key = Buffer.from(process.env.CREDS_KEY ?? '', 'hex');
const iv = Buffer.from(process.env.CREDS_IV ?? '', 'hex');
const algorithm = 'AES-CBC';
export async function signPayload({
payload,
secret,
@ -15,3 +23,153 @@ export async function hashToken(str: string): Promise<string> {
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
return Buffer.from(hashBuffer).toString('hex');
}
/** --- Legacy v1/v2 Setup: AES-CBC with fixed key and IV --- */
/**
* Encrypts a value using AES-CBC
* @param value - The plaintext to encrypt
* @returns The encrypted string in hex format
*/
export async function encrypt(value: string): Promise<string> {
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
'encrypt',
]);
const encoder = new TextEncoder();
const data = encoder.encode(value);
const encryptedBuffer = await webcrypto.subtle.encrypt(
{ name: algorithm, iv: iv },
cryptoKey,
data,
);
return Buffer.from(encryptedBuffer).toString('hex');
}
/**
* Decrypts an encrypted value using AES-CBC
* @param encryptedValue - The encrypted string in hex format
* @returns The decrypted plaintext
*/
export async function decrypt(encryptedValue: string): Promise<string> {
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
'decrypt',
]);
const encryptedBuffer = Buffer.from(encryptedValue, 'hex');
const decryptedBuffer = await webcrypto.subtle.decrypt(
{ name: algorithm, iv: iv },
cryptoKey,
encryptedBuffer,
);
const decoder = new TextDecoder();
return decoder.decode(decryptedBuffer);
}
/** --- v2: AES-CBC with a random IV per encryption --- */
/**
* Encrypts a value using AES-CBC with a random IV per encryption
* @param value - The plaintext to encrypt
* @returns The encrypted string with IV prepended (iv:ciphertext format)
*/
export async function encryptV2(value: string): Promise<string> {
const gen_iv = webcrypto.getRandomValues(new Uint8Array(16));
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
'encrypt',
]);
const encoder = new TextEncoder();
const data = encoder.encode(value);
const encryptedBuffer = await webcrypto.subtle.encrypt(
{ name: algorithm, iv: gen_iv },
cryptoKey,
data,
);
return Buffer.from(gen_iv).toString('hex') + ':' + Buffer.from(encryptedBuffer).toString('hex');
}
/**
* Decrypts an encrypted value using AES-CBC with random IV
* @param encryptedValue - The encrypted string in iv:ciphertext format
* @returns The decrypted plaintext
*/
export async function decryptV2(encryptedValue: string): Promise<string> {
const parts = encryptedValue.split(':');
if (parts.length === 1) {
return parts[0];
}
const gen_iv = Buffer.from(parts.shift() ?? '', 'hex');
const encrypted = parts.join(':');
const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [
'decrypt',
]);
const encryptedBuffer = Buffer.from(encrypted, 'hex');
const decryptedBuffer = await webcrypto.subtle.decrypt(
{ name: algorithm, iv: gen_iv },
cryptoKey,
encryptedBuffer,
);
const decoder = new TextDecoder();
return decoder.decode(decryptedBuffer);
}
/** --- v3: AES-256-CTR using Node's crypto functions --- */
const algorithm_v3 = 'aes-256-ctr';
/**
* Encrypts a value using AES-256-CTR.
* Note: AES-256 requires a 32-byte key. Ensure that process.env.CREDS_KEY is a 64-character hex string.
* @param value - The plaintext to encrypt.
* @returns The encrypted string with a "v3:" prefix.
*/
export function encryptV3(value: string): string {
if (key.length !== 32) {
throw new Error(`Invalid key length: expected 32 bytes, got ${key.length} bytes`);
}
const iv_v3 = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(algorithm_v3, key, iv_v3);
const encrypted = Buffer.concat([cipher.update(value, 'utf8'), cipher.final()]);
return `v3:${iv_v3.toString('hex')}:${encrypted.toString('hex')}`;
}
/**
* Decrypts an encrypted value using AES-256-CTR.
* @param encryptedValue - The encrypted string with "v3:" prefix.
* @returns The decrypted plaintext.
*/
export function decryptV3(encryptedValue: string): string {
const parts = encryptedValue.split(':');
if (parts[0] !== 'v3') {
throw new Error('Not a v3 encrypted value');
}
const iv_v3 = Buffer.from(parts[1], 'hex');
const encryptedText = Buffer.from(parts.slice(2).join(':'), 'hex');
const decipher = crypto.createDecipheriv(algorithm_v3, key, iv_v3);
const decrypted = Buffer.concat([decipher.update(encryptedText), decipher.final()]);
return decrypted.toString('utf8');
}
/**
* Generates random values as a hex string
* @param length - The number of random bytes to generate
* @returns The random values as a hex string
*/
export async function getRandomValues(length: number): Promise<string> {
if (!Number.isInteger(length) || length <= 0) {
throw new Error('Length must be a positive integer');
}
const randomValues = new Uint8Array(length);
webcrypto.getRandomValues(randomValues);
return Buffer.from(randomValues).toString('hex');
}
/**
* Computes SHA-256 hash for the given input.
* @param input - The input to hash.
* @returns The SHA-256 hash of the input.
*/
export async function hashBackupCode(input: string): Promise<string> {
const encoder = new TextEncoder();
const data = encoder.encode(input);
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
const hashArray = Array.from(new Uint8Array(hashBuffer));
return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
}

View file

@ -0,0 +1,530 @@
import mongoose from 'mongoose';
import { v4 as uuidv4 } from 'uuid';
import { MongoMemoryServer } from 'mongodb-memory-server';
import { EToolResources, FileContext } from 'librechat-data-provider';
import { createFileMethods } from './file';
import { createModels } from '~/models';
let File: mongoose.Model<unknown>;
let fileMethods: ReturnType<typeof createFileMethods>;
let mongoServer: MongoMemoryServer;
let modelsToCleanup: string[] = [];
describe('File Methods', () => {
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
const mongoUri = mongoServer.getUri();
await mongoose.connect(mongoUri);
const models = createModels(mongoose);
modelsToCleanup = Object.keys(models);
Object.assign(mongoose.models, models);
File = mongoose.models.File as mongoose.Model<unknown>;
fileMethods = createFileMethods(mongoose);
});
afterAll(async () => {
const collections = mongoose.connection.collections;
for (const key in collections) {
await collections[key].deleteMany({});
}
for (const modelName of modelsToCleanup) {
if (mongoose.models[modelName]) {
delete mongoose.models[modelName];
}
}
await mongoose.disconnect();
await mongoServer.stop();
});
beforeEach(async () => {
await File.deleteMany({});
});
describe('createFile', () => {
it('should create a new file with TTL', async () => {
const fileId = uuidv4();
const userId = new mongoose.Types.ObjectId();
const file = await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: 'test.txt',
filepath: '/uploads/test.txt',
type: 'text/plain',
bytes: 100,
});
expect(file).not.toBeNull();
expect(file?.file_id).toBe(fileId);
expect(file?.filename).toBe('test.txt');
expect(file?.expiresAt).toBeDefined();
});
it('should create a file without TTL when disableTTL is true', async () => {
const fileId = uuidv4();
const userId = new mongoose.Types.ObjectId();
const file = await fileMethods.createFile(
{
file_id: fileId,
user: userId,
filename: 'permanent.txt',
filepath: '/uploads/permanent.txt',
type: 'text/plain',
bytes: 200,
},
true,
);
expect(file).not.toBeNull();
expect(file?.file_id).toBe(fileId);
expect(file?.expiresAt).toBeUndefined();
});
});
describe('findFileById', () => {
it('should find a file by file_id', async () => {
const fileId = uuidv4();
const userId = new mongoose.Types.ObjectId();
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: 'find-me.txt',
filepath: '/uploads/find-me.txt',
type: 'text/plain',
bytes: 150,
});
const found = await fileMethods.findFileById(fileId);
expect(found).not.toBeNull();
expect(found?.file_id).toBe(fileId);
expect(found?.filename).toBe('find-me.txt');
});
it('should return null for non-existent file', async () => {
const found = await fileMethods.findFileById('non-existent');
expect(found).toBeNull();
});
});
describe('getFiles', () => {
it('should retrieve multiple files matching filter', async () => {
const userId = new mongoose.Types.ObjectId();
const fileIds = [uuidv4(), uuidv4(), uuidv4()];
for (const fileId of fileIds) {
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: `file-${fileId}.txt`,
filepath: `/uploads/${fileId}.txt`,
type: 'text/plain',
bytes: 100,
});
}
const files = await fileMethods.getFiles({ user: userId });
expect(files).toHaveLength(3);
expect(files.map((f) => f.file_id)).toEqual(expect.arrayContaining(fileIds));
});
it('should exclude text field by default', async () => {
const fileId = uuidv4();
const userId = new mongoose.Types.ObjectId();
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: 'with-text.txt',
filepath: '/uploads/with-text.txt',
type: 'text/plain',
bytes: 100,
text: 'Some content here',
});
const files = await fileMethods.getFiles({ file_id: fileId });
expect(files).toHaveLength(1);
expect(files[0].text).toBeUndefined();
});
});
describe('getToolFilesByIds', () => {
it('should retrieve files for file_search tool (embedded files)', async () => {
const userId = new mongoose.Types.ObjectId();
const embeddedFileId = uuidv4();
const regularFileId = uuidv4();
await fileMethods.createFile({
file_id: embeddedFileId,
user: userId,
filename: 'embedded.txt',
filepath: '/uploads/embedded.txt',
type: 'text/plain',
bytes: 100,
embedded: true,
});
await fileMethods.createFile({
file_id: regularFileId,
user: userId,
filename: 'regular.txt',
filepath: '/uploads/regular.txt',
type: 'text/plain',
bytes: 100,
});
const toolSet = new Set([EToolResources.file_search]);
const files = await fileMethods.getToolFilesByIds([embeddedFileId, regularFileId], toolSet);
expect(files).toHaveLength(1);
expect(files[0].file_id).toBe(embeddedFileId);
});
it('should retrieve files for context tool', async () => {
const userId = new mongoose.Types.ObjectId();
const contextFileId = uuidv4();
await fileMethods.createFile({
file_id: contextFileId,
user: userId,
filename: 'context.txt',
filepath: '/uploads/context.txt',
type: 'text/plain',
bytes: 100,
text: 'Context content',
context: FileContext.agents,
});
const toolSet = new Set([EToolResources.context]);
const files = await fileMethods.getToolFilesByIds([contextFileId], toolSet);
expect(files).toHaveLength(1);
expect(files[0].file_id).toBe(contextFileId);
});
it('should retrieve files for execute_code tool', async () => {
const userId = new mongoose.Types.ObjectId();
const codeFileId = uuidv4();
await fileMethods.createFile({
file_id: codeFileId,
user: userId,
filename: 'code.py',
filepath: '/uploads/code.py',
type: 'text/x-python',
bytes: 100,
metadata: { fileIdentifier: 'some-identifier' },
});
const toolSet = new Set([EToolResources.execute_code]);
const files = await fileMethods.getToolFilesByIds([codeFileId], toolSet);
expect(files).toHaveLength(1);
expect(files[0].file_id).toBe(codeFileId);
});
});
describe('updateFile', () => {
it('should update file data and remove TTL', async () => {
const fileId = uuidv4();
const userId = new mongoose.Types.ObjectId();
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: 'original.txt',
filepath: '/uploads/original.txt',
type: 'text/plain',
bytes: 100,
});
const updated = await fileMethods.updateFile({
file_id: fileId,
filename: 'updated.txt',
bytes: 200,
});
expect(updated).not.toBeNull();
expect(updated?.filename).toBe('updated.txt');
expect(updated?.bytes).toBe(200);
expect(updated?.expiresAt).toBeUndefined();
});
});
describe('updateFileUsage', () => {
it('should increment usage count', async () => {
const fileId = uuidv4();
const userId = new mongoose.Types.ObjectId();
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: 'usage-test.txt',
filepath: '/uploads/usage-test.txt',
type: 'text/plain',
bytes: 100,
usage: 0,
});
const updated = await fileMethods.updateFileUsage({ file_id: fileId });
expect(updated?.usage).toBe(1);
const updated2 = await fileMethods.updateFileUsage({ file_id: fileId, inc: 5 });
expect(updated2?.usage).toBe(6);
});
});
describe('updateFilesUsage', () => {
it('should update usage for multiple files', async () => {
const userId = new mongoose.Types.ObjectId();
const fileIds = [uuidv4(), uuidv4()];
for (const fileId of fileIds) {
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: `file-${fileId}.txt`,
filepath: `/uploads/${fileId}.txt`,
type: 'text/plain',
bytes: 100,
usage: 0,
});
}
const files = fileIds.map((file_id) => ({ file_id }));
const updated = await fileMethods.updateFilesUsage(files);
expect(updated).toHaveLength(2);
for (const file of updated) {
expect((file as { usage: number }).usage).toBe(1);
}
});
it('should deduplicate files', async () => {
const userId = new mongoose.Types.ObjectId();
const fileId = uuidv4();
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: 'duplicate-test.txt',
filepath: '/uploads/duplicate-test.txt',
type: 'text/plain',
bytes: 100,
usage: 0,
});
const files = [{ file_id: fileId }, { file_id: fileId }, { file_id: fileId }];
const updated = await fileMethods.updateFilesUsage(files);
expect(updated).toHaveLength(1);
expect((updated[0] as { usage: number }).usage).toBe(1);
});
it('should filter out null results when files do not exist', async () => {
const userId = new mongoose.Types.ObjectId();
const existingFileId = uuidv4();
await fileMethods.createFile({
file_id: existingFileId,
user: userId,
filename: 'existing.txt',
filepath: '/uploads/existing.txt',
type: 'text/plain',
bytes: 100,
usage: 0,
});
const files = [{ file_id: existingFileId }, { file_id: 'non-existent-file' }];
const updated = await fileMethods.updateFilesUsage(files);
expect(updated.length).toBeGreaterThan(0);
expect(updated).not.toContain(null);
expect(updated).not.toContain(undefined);
const existingFile = updated.find(
(f) => (f as { file_id: string }).file_id === existingFileId,
);
expect(existingFile).toBeDefined();
expect((existingFile as { usage: number }).usage).toBe(1);
});
it('should handle empty files array', async () => {
const result = await fileMethods.updateFilesUsage([]);
expect(result).toEqual([]);
});
it('should handle fileIds parameter', async () => {
const userId = new mongoose.Types.ObjectId();
const fileIds = [uuidv4(), uuidv4()];
for (const fileId of fileIds) {
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: `file-${fileId}.txt`,
filepath: `/uploads/${fileId}.txt`,
type: 'text/plain',
bytes: 100,
usage: 0,
});
}
const files = [{ file_id: fileIds[0] }];
const updated = await fileMethods.updateFilesUsage(files, [fileIds[1]]);
expect(updated).toHaveLength(2);
const file1 = updated.find((f) => (f as { file_id: string }).file_id === fileIds[0]);
const file2 = updated.find((f) => (f as { file_id: string }).file_id === fileIds[1]);
expect(file1).toBeDefined();
expect(file2).toBeDefined();
expect((file1 as { usage: number }).usage).toBe(1);
expect((file2 as { usage: number }).usage).toBe(1);
});
it('should deduplicate between files and fileIds parameters', async () => {
const userId = new mongoose.Types.ObjectId();
const fileId = uuidv4();
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: 'test.txt',
filepath: '/uploads/test.txt',
type: 'text/plain',
bytes: 100,
usage: 0,
});
const files = [{ file_id: fileId }];
const updated = await fileMethods.updateFilesUsage(files, [fileId]);
expect(updated).toHaveLength(1);
expect((updated[0] as { usage: number }).usage).toBe(1);
});
});
describe('deleteFile', () => {
it('should delete a file by file_id', async () => {
const fileId = uuidv4();
const userId = new mongoose.Types.ObjectId();
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: 'delete-me.txt',
filepath: '/uploads/delete-me.txt',
type: 'text/plain',
bytes: 100,
});
const deleted = await fileMethods.deleteFile(fileId);
expect(deleted).not.toBeNull();
expect(deleted?.file_id).toBe(fileId);
const found = await fileMethods.findFileById(fileId);
expect(found).toBeNull();
});
});
describe('deleteFiles', () => {
it('should delete multiple files by file_ids', async () => {
const userId = new mongoose.Types.ObjectId();
const fileIds = [uuidv4(), uuidv4(), uuidv4()];
for (const fileId of fileIds) {
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: `file-${fileId}.txt`,
filepath: `/uploads/${fileId}.txt`,
type: 'text/plain',
bytes: 100,
});
}
const result = await fileMethods.deleteFiles(fileIds);
expect(result.deletedCount).toBe(3);
const remaining = await fileMethods.getFiles({ file_id: { $in: fileIds } });
expect(remaining).toHaveLength(0);
});
it('should delete all files for a user', async () => {
const userId = new mongoose.Types.ObjectId();
const otherUserId = new mongoose.Types.ObjectId();
await fileMethods.createFile({
file_id: uuidv4(),
user: userId,
filename: 'user-file-1.txt',
filepath: '/uploads/user-file-1.txt',
type: 'text/plain',
bytes: 100,
});
await fileMethods.createFile({
file_id: uuidv4(),
user: userId,
filename: 'user-file-2.txt',
filepath: '/uploads/user-file-2.txt',
type: 'text/plain',
bytes: 100,
});
await fileMethods.createFile({
file_id: uuidv4(),
user: otherUserId,
filename: 'other-user-file.txt',
filepath: '/uploads/other-user-file.txt',
type: 'text/plain',
bytes: 100,
});
const result = await fileMethods.deleteFiles([], userId.toString());
expect(result.deletedCount).toBe(2);
const remaining = await fileMethods.getFiles({});
expect(remaining).toHaveLength(1);
expect(remaining[0].user?.toString()).toBe(otherUserId.toString());
});
});
describe('batchUpdateFiles', () => {
it('should update multiple file paths', async () => {
const userId = new mongoose.Types.ObjectId();
const fileIds = [uuidv4(), uuidv4()];
for (const fileId of fileIds) {
await fileMethods.createFile({
file_id: fileId,
user: userId,
filename: `file-${fileId}.txt`,
filepath: `/old-path/${fileId}.txt`,
type: 'text/plain',
bytes: 100,
});
}
const updates = fileIds.map((file_id) => ({
file_id,
filepath: `/new-path/${file_id}.txt`,
}));
await fileMethods.batchUpdateFiles(updates);
for (const fileId of fileIds) {
const file = await fileMethods.findFileById(fileId);
expect(file?.filepath).toBe(`/new-path/${fileId}.txt`);
}
});
it('should handle empty updates array gracefully', async () => {
await expect(fileMethods.batchUpdateFiles([])).resolves.toBeUndefined();
});
});
});

View file

@ -0,0 +1,272 @@
import logger from '../config/winston';
import { EToolResources, FileContext } from 'librechat-data-provider';
import type { FilterQuery, SortOrder, Model } from 'mongoose';
import type { IMongoFile } from '~/types/file';
/** Factory function that takes mongoose instance and returns the file methods */
export function createFileMethods(mongoose: typeof import('mongoose')) {
/**
* Finds a file by its file_id with additional query options.
* @param file_id - The unique identifier of the file
* @param options - Query options for filtering, projection, etc.
* @returns A promise that resolves to the file document or null
*/
async function findFileById(
file_id: string,
options: Record<string, unknown> = {},
): Promise<IMongoFile | null> {
const File = mongoose.models.File as Model<IMongoFile>;
return File.findOne({ file_id, ...options }).lean();
}
/** Select fields for query projection - 0 to exclude, 1 to include */
type SelectProjection = Record<string, 0 | 1>;
/**
* Retrieves files matching a given filter, sorted by the most recently updated.
* @param filter - The filter criteria to apply
* @param _sortOptions - Optional sort parameters
* @param selectFields - Fields to include/exclude in the query results. Default excludes the 'text' field
* @param options - Additional query options (userId, agentId for ACL)
* @returns A promise that resolves to an array of file documents
*/
async function getFiles(
filter: FilterQuery<IMongoFile>,
_sortOptions?: Record<string, SortOrder> | null,
selectFields?: SelectProjection | string | null,
): Promise<IMongoFile[] | null> {
const File = mongoose.models.File as Model<IMongoFile>;
const sortOptions = { updatedAt: -1 as SortOrder, ..._sortOptions };
const query = File.find(filter);
if (selectFields != null) {
query.select(selectFields);
} else {
query.select({ text: 0 });
}
return await query.sort(sortOptions).lean();
}
/**
* Retrieves tool files (files that are embedded or have a fileIdentifier) from an array of file IDs
* @param fileIds - Array of file_id strings to search for
* @param toolResourceSet - Optional filter for tool resources
* @returns Files that match the criteria
*/
async function getToolFilesByIds(
fileIds: string[],
toolResourceSet?: Set<EToolResources>,
): Promise<IMongoFile[]> {
if (!fileIds || !fileIds.length || !toolResourceSet?.size) {
return [];
}
try {
const filter: FilterQuery<IMongoFile> = {
file_id: { $in: fileIds },
$or: [],
};
if (toolResourceSet.has(EToolResources.context)) {
filter.$or?.push({ text: { $exists: true, $ne: null }, context: FileContext.agents });
}
if (toolResourceSet.has(EToolResources.file_search)) {
filter.$or?.push({ embedded: true });
}
if (toolResourceSet.has(EToolResources.execute_code)) {
filter.$or?.push({ 'metadata.fileIdentifier': { $exists: true } });
}
const selectFields: SelectProjection = { text: 0 };
const sortOptions = { updatedAt: -1 as SortOrder };
const results = await getFiles(filter, sortOptions, selectFields);
return results ?? [];
} catch (error) {
logger.error('[getToolFilesByIds] Error retrieving tool files:', error);
throw new Error('Error retrieving tool files');
}
}
/**
* Creates a new file with a TTL of 1 hour.
* @param data - The file data to be created, must contain file_id
* @param disableTTL - Whether to disable the TTL
* @returns A promise that resolves to the created file document
*/
async function createFile(
data: Partial<IMongoFile>,
disableTTL?: boolean,
): Promise<IMongoFile | null> {
const File = mongoose.models.File as Model<IMongoFile>;
const fileData: Partial<IMongoFile> = {
...data,
expiresAt: new Date(Date.now() + 3600 * 1000),
};
if (disableTTL) {
delete fileData.expiresAt;
}
return File.findOneAndUpdate({ file_id: data.file_id }, fileData, {
new: true,
upsert: true,
}).lean();
}
/**
* Updates a file identified by file_id with new data and removes the TTL.
* @param data - The data to update, must contain file_id
* @returns A promise that resolves to the updated file document
*/
async function updateFile(
data: Partial<IMongoFile> & { file_id: string },
): Promise<IMongoFile | null> {
const File = mongoose.models.File as Model<IMongoFile>;
const { file_id, ...update } = data;
const updateOperation = {
$set: update,
$unset: { expiresAt: '' },
};
return File.findOneAndUpdate({ file_id }, updateOperation, {
new: true,
}).lean();
}
/**
* Increments the usage of a file identified by file_id.
* @param data - The data to update, must contain file_id and the increment value for usage
* @returns A promise that resolves to the updated file document
*/
async function updateFileUsage(data: {
file_id: string;
inc?: number;
}): Promise<IMongoFile | null> {
const File = mongoose.models.File as Model<IMongoFile>;
const { file_id, inc = 1 } = data;
const updateOperation = {
$inc: { usage: inc },
$unset: { expiresAt: '', temp_file_id: '' },
};
return File.findOneAndUpdate({ file_id }, updateOperation, {
new: true,
}).lean();
}
/**
* Deletes a file identified by file_id.
* @param file_id - The unique identifier of the file to delete
* @returns A promise that resolves to the deleted file document or null
*/
async function deleteFile(file_id: string): Promise<IMongoFile | null> {
const File = mongoose.models.File as Model<IMongoFile>;
return File.findOneAndDelete({ file_id }).lean();
}
/**
* Deletes a file identified by a filter.
* @param filter - The filter criteria to apply
* @returns A promise that resolves to the deleted file document or null
*/
async function deleteFileByFilter(filter: FilterQuery<IMongoFile>): Promise<IMongoFile | null> {
const File = mongoose.models.File as Model<IMongoFile>;
return File.findOneAndDelete(filter).lean();
}
/**
* Deletes multiple files identified by an array of file_ids.
* @param file_ids - The unique identifiers of the files to delete
* @param user - Optional user ID to filter by
* @returns A promise that resolves to the result of the deletion operation
*/
async function deleteFiles(
file_ids: string[],
user?: string,
): Promise<{ deletedCount?: number }> {
const File = mongoose.models.File as Model<IMongoFile>;
let deleteQuery: FilterQuery<IMongoFile> = { file_id: { $in: file_ids } };
if (user) {
deleteQuery = { user: user };
}
return File.deleteMany(deleteQuery);
}
/**
* Batch updates files with new signed URLs in MongoDB
* @param updates - Array of updates in the format { file_id, filepath }
*/
async function batchUpdateFiles(
updates: Array<{ file_id: string; filepath: string }>,
): Promise<void> {
if (!updates || updates.length === 0) {
return;
}
const File = mongoose.models.File as Model<IMongoFile>;
const bulkOperations = updates.map((update) => ({
updateOne: {
filter: { file_id: update.file_id },
update: { $set: { filepath: update.filepath } },
},
}));
const result = await File.bulkWrite(bulkOperations);
logger.info(`Updated ${result.modifiedCount} files with new S3 URLs`);
}
/**
* Updates usage tracking for multiple files.
* Processes files and optional fileIds, updating their usage count in the database.
*
* @param files - Array of file objects to process
* @param fileIds - Optional array of file IDs to process
* @returns Array of updated file documents (with null results filtered out)
*/
async function updateFilesUsage(
files: Array<{ file_id: string }>,
fileIds?: string[],
): Promise<IMongoFile[]> {
const promises: Promise<IMongoFile | null>[] = [];
const seen = new Set<string>();
for (const file of files) {
const { file_id } = file;
if (seen.has(file_id)) {
continue;
}
seen.add(file_id);
promises.push(updateFileUsage({ file_id }));
}
if (!fileIds) {
const results = await Promise.all(promises);
return results.filter((result): result is IMongoFile => result != null);
}
for (const file_id of fileIds) {
if (seen.has(file_id)) {
continue;
}
seen.add(file_id);
promises.push(updateFileUsage({ file_id }));
}
const results = await Promise.all(promises);
return results.filter((result): result is IMongoFile => result != null);
}
return {
findFileById,
getFiles,
getToolFilesByIds,
createFile,
updateFile,
updateFileUsage,
deleteFile,
deleteFiles,
deleteFileByFilter,
batchUpdateFiles,
updateFilesUsage,
};
}
export type FileMethods = ReturnType<typeof createFileMethods>;

View file

@ -2,6 +2,8 @@ import { createSessionMethods, type SessionMethods } from './session';
import { createTokenMethods, type TokenMethods } from './token';
import { createRoleMethods, type RoleMethods } from './role';
import { createUserMethods, type UserMethods } from './user';
import { createKeyMethods, type KeyMethods } from './key';
import { createFileMethods, type FileMethods } from './file';
/* Memories */
import { createMemoryMethods, type MemoryMethods } from './memory';
/* Agent Categories */
@ -18,6 +20,8 @@ export type AllMethods = UserMethods &
SessionMethods &
TokenMethods &
RoleMethods &
KeyMethods &
FileMethods &
MemoryMethods &
AgentCategoryMethods &
UserGroupMethods &
@ -28,6 +32,7 @@ export type AllMethods = UserMethods &
/**
* Creates all database methods for all collections
* @param mongoose - Mongoose instance
*/
export function createMethods(mongoose: typeof import('mongoose')): AllMethods {
return {
@ -35,6 +40,8 @@ export function createMethods(mongoose: typeof import('mongoose')): AllMethods {
...createSessionMethods(mongoose),
...createTokenMethods(mongoose),
...createRoleMethods(mongoose),
...createKeyMethods(mongoose),
...createFileMethods(mongoose),
...createMemoryMethods(mongoose),
...createAgentCategoryMethods(mongoose),
...createAccessRoleMethods(mongoose),
@ -50,6 +57,8 @@ export type {
SessionMethods,
TokenMethods,
RoleMethods,
KeyMethods,
FileMethods,
MemoryMethods,
AgentCategoryMethods,
UserGroupMethods,

View file

@ -0,0 +1,160 @@
import { ErrorTypes } from 'librechat-data-provider';
// Note: checkUserKeyExpiry moved to @librechat/api (utils/key.ts) as it's a pure validation utility
import { encrypt, decrypt } from '~/crypto';
import logger from '~/config/winston';
/** Factory function that takes mongoose instance and returns the key methods */
export function createKeyMethods(mongoose: typeof import('mongoose')) {
/**
* Retrieves and decrypts the key value for a given user identified by userId and identifier name.
* @param params - The parameters object
* @param params.userId - The unique identifier for the user
* @param params.name - The name associated with the key
* @returns The decrypted key value
* @throws Error if the key is not found or if there is a problem during key retrieval
* @description This function searches for a user's key in the database using their userId and name.
* If found, it decrypts the value of the key and returns it. If no key is found, it throws
* an error indicating that there is no user key available.
*/
async function getUserKey(params: { userId: string; name: string }): Promise<string> {
const { userId, name } = params;
const Key = mongoose.models.Key;
const keyValue = (await Key.findOne({ userId, name }).lean()) as {
value: string;
} | null;
if (!keyValue) {
throw new Error(
JSON.stringify({
type: ErrorTypes.NO_USER_KEY,
}),
);
}
return await decrypt(keyValue.value);
}
/**
* Retrieves, decrypts, and parses the key values for a given user identified by userId and name.
* @param params - The parameters object
* @param params.userId - The unique identifier for the user
* @param params.name - The name associated with the key
* @returns The decrypted and parsed key values
* @throws Error if the key is invalid or if there is a problem during key value parsing
* @description This function retrieves a user's encrypted key using their userId and name, decrypts it,
* and then attempts to parse the decrypted string into a JSON object. If the parsing fails,
* it throws an error indicating that the user key is invalid.
*/
async function getUserKeyValues(params: {
userId: string;
name: string;
}): Promise<Record<string, string>> {
const { userId, name } = params;
const userValues = await getUserKey({ userId, name });
try {
return JSON.parse(userValues) as Record<string, string>;
} catch (e) {
logger.error('[getUserKeyValues]', e);
throw new Error(
JSON.stringify({
type: ErrorTypes.INVALID_USER_KEY,
}),
);
}
}
/**
* Retrieves the expiry information of a user's key identified by userId and name.
* @param params - The parameters object
* @param params.userId - The unique identifier for the user
* @param params.name - The name associated with the key
* @returns The expiry date of the key or null if the key doesn't exist
* @description This function fetches a user's key from the database using their userId and name and
* returns its expiry date. If the key is not found, it returns null for the expiry date.
*/
async function getUserKeyExpiry(params: {
userId: string;
name: string;
}): Promise<{ expiresAt: Date | 'never' | null }> {
const { userId, name } = params;
const Key = mongoose.models.Key;
const keyValue = (await Key.findOne({ userId, name }).lean()) as {
expiresAt?: Date;
} | null;
if (!keyValue) {
return { expiresAt: null };
}
return { expiresAt: keyValue.expiresAt || 'never' };
}
/**
* Updates or inserts a new key for a given user identified by userId and name, with a specified value and expiry date.
* @param params - The parameters object
* @param params.userId - The unique identifier for the user
* @param params.name - The name associated with the key
* @param params.value - The value to be encrypted and stored as the key's value
* @param params.expiresAt - The expiry date for the key [optional]
* @returns The updated or newly inserted key document
* @description This function either updates an existing user key or inserts a new one into the database,
* after encrypting the provided value. It sets the provided expiry date for the key (or unsets for no expiry).
*/
async function updateUserKey(params: {
userId: string;
name: string;
value: string;
expiresAt?: Date | null;
}): Promise<unknown> {
const { userId, name, value, expiresAt = null } = params;
const Key = mongoose.models.Key;
const encryptedValue = await encrypt(value);
const updateObject: { userId: string; name: string; value: string; expiresAt?: Date } = {
userId,
name,
value: encryptedValue,
};
const updateQuery: { $set: typeof updateObject; $unset?: { expiresAt: string } } = {
$set: updateObject,
};
if (expiresAt) {
updateObject.expiresAt = new Date(expiresAt);
} else {
updateQuery.$unset = { expiresAt: '' };
}
return await Key.findOneAndUpdate({ userId, name }, updateQuery, {
upsert: true,
new: true,
}).lean();
}
/**
* Deletes a key or all keys for a given user identified by userId, optionally based on a specified name.
* @param params - The parameters object
* @param params.userId - The unique identifier for the user
* @param params.name - The name associated with the key to delete. If not provided and all is true, deletes all keys
* @param params.all - Whether to delete all keys for the user
* @returns The result of the deletion operation
* @description This function deletes a specific key or all keys for a user from the database.
* If a name is provided and all is false, it deletes only the key with that name.
* If all is true, it ignores the name and deletes all keys for the user.
*/
async function deleteUserKey(params: {
userId: string;
name?: string;
all?: boolean;
}): Promise<unknown> {
const { userId, name, all = false } = params;
const Key = mongoose.models.Key;
if (all) {
return await Key.deleteMany({ userId });
}
return await Key.findOneAndDelete({ userId, name }).lean();
}
return {
getUserKey,
updateUserKey,
deleteUserKey,
getUserKeyValues,
getUserKeyExpiry,
};
}
export type KeyMethods = ReturnType<typeof createKeyMethods>;

View file

@ -301,6 +301,32 @@ export function createUserMethods(mongoose: typeof import('mongoose')) {
});
};
/**
* Updates the plugins for a user based on the action specified (install/uninstall).
* @param userId - The user ID whose plugins are to be updated
* @param plugins - The current plugins array
* @param pluginKey - The key of the plugin to install or uninstall
* @param action - The action to perform, 'install' or 'uninstall'
* @returns The result of the update operation or null if action is invalid
*/
async function updateUserPlugins(
userId: string,
plugins: string[] | undefined,
pluginKey: string,
action: 'install' | 'uninstall',
): Promise<IUser | null> {
const userPlugins = plugins ?? [];
if (action === 'install') {
return updateUser(userId, { plugins: [...userPlugins, pluginKey] });
}
if (action === 'uninstall') {
return updateUser(userId, {
plugins: userPlugins.filter((plugin) => plugin !== pluginKey),
});
}
return null;
}
return {
findUser,
countUsers,
@ -310,6 +336,7 @@ export function createUserMethods(mongoose: typeof import('mongoose')) {
getUserById,
generateToken,
deleteUserById,
updateUserPlugins,
toggleUserMemories,
};
}