📏 refactor: Add File Size Limits to Conversation Imports (#12221)

* fix: add file size limits to conversation import multer instance

* fix: address review findings for conversation import file size limits

* fix: use local jest.mock for data-schemas instead of global moduleNameMapper

The global @librechat/data-schemas mock in jest.config.js only provided
logger, breaking all tests that depend on createModels from the same
package. Replace with a virtual jest.mock scoped to the import spec file.

* fix: move import to top of file, pre-compute upload middleware, assert logger.warn in tests

* refactor: move resolveImportMaxFileSize to packages/api

New backend logic belongs in packages/api as TypeScript. Delete the
api/server/utils/import/limits.js wrapper and import directly from
@librechat/api in convos.js and importConversations.js. Resolver unit
tests move to packages/api; the api/ spec retains only multer behavior
tests.

* chore: rename importLimits to import

* fix: stale type reference and mock isolation in import tests

Update typeof import path from '../importLimits' to '../import' after
the rename. Clear mockLogger.warn in beforeEach to prevent cross-test
accumulation.

* fix: add resolveImportMaxFileSize to @librechat/api mock in convos.spec.js

* fix: resolve jest.mock hoisting issue in import tests

jest.mock factories are hoisted above const declarations, so the
mockLogger reference was undefined at factory evaluation time. Use a
direct import of the mocked logger module instead.

* fix: remove virtual flag from data-schemas mock for CI compatibility

virtual: true prevents the mock from intercepting the real module in
CI where @librechat/data-schemas is built, causing import.ts to use
the real logger while the test asserts against the mock.
This commit is contained in:
Danny Avila 2026-03-14 03:06:29 -04:00 committed by GitHub
parent c6982dc180
commit 35a35dc2e9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 223 additions and 7 deletions

View file

@ -9,7 +9,7 @@ module.exports = {
moduleNameMapper: {
'~/(.*)': '<rootDir>/$1',
'~/data/auth.json': '<rootDir>/__mocks__/auth.mock.json',
'^openid-client/passport$': '<rootDir>/test/__mocks__/openid-client-passport.js', // Mock for the passport strategy part
'^openid-client/passport$': '<rootDir>/test/__mocks__/openid-client-passport.js',
'^openid-client$': '<rootDir>/test/__mocks__/openid-client.js',
},
transformIgnorePatterns: ['/node_modules/(?!(openid-client|oauth4webapi|jose)/).*/'],

View file

@ -3,6 +3,7 @@ module.exports = {
api: (overrides = {}) => ({
isEnabled: jest.fn(),
resolveImportMaxFileSize: jest.fn(() => 262144000),
createAxiosInstance: jest.fn(() => ({
get: jest.fn(),
post: jest.fn(),

View file

@ -0,0 +1,98 @@
const express = require('express');
const request = require('supertest');
const multer = require('multer');
const importFileFilter = (req, file, cb) => {
if (file.mimetype === 'application/json') {
cb(null, true);
} else {
cb(new Error('Only JSON files are allowed'), false);
}
};
/** Proxy app that mirrors the production multer + error-handling pattern */
function createImportApp(fileSize) {
const app = express();
const upload = multer({
storage: multer.memoryStorage(),
fileFilter: importFileFilter,
limits: { fileSize },
});
const uploadSingle = upload.single('file');
function handleUpload(req, res, next) {
uploadSingle(req, res, (err) => {
if (err && err.code === 'LIMIT_FILE_SIZE') {
return res.status(413).json({ message: 'File exceeds the maximum allowed size' });
}
if (err) {
return next(err);
}
next();
});
}
app.post('/import', handleUpload, (req, res) => {
res.status(201).json({ message: 'success', size: req.file.size });
});
app.use((err, _req, res, _next) => {
res.status(400).json({ error: err.message });
});
return app;
}
describe('Conversation Import - Multer File Size Limits', () => {
describe('multer rejects files exceeding the configured limit', () => {
it('returns 413 for files larger than the limit', async () => {
const limit = 1024;
const app = createImportApp(limit);
const oversized = Buffer.alloc(limit + 512, 'x');
const res = await request(app)
.post('/import')
.attach('file', oversized, { filename: 'import.json', contentType: 'application/json' });
expect(res.status).toBe(413);
expect(res.body.message).toBe('File exceeds the maximum allowed size');
});
it('accepts files within the limit', async () => {
const limit = 4096;
const app = createImportApp(limit);
const valid = Buffer.from(JSON.stringify({ title: 'test' }));
const res = await request(app)
.post('/import')
.attach('file', valid, { filename: 'import.json', contentType: 'application/json' });
expect(res.status).toBe(201);
expect(res.body.message).toBe('success');
});
it('rejects at the exact boundary (limit + 1 byte)', async () => {
const limit = 512;
const app = createImportApp(limit);
const boundary = Buffer.alloc(limit + 1, 'a');
const res = await request(app)
.post('/import')
.attach('file', boundary, { filename: 'import.json', contentType: 'application/json' });
expect(res.status).toBe(413);
});
it('accepts a file just under the limit', async () => {
const limit = 512;
const app = createImportApp(limit);
const underLimit = Buffer.alloc(limit - 1, 'b');
const res = await request(app)
.post('/import')
.attach('file', underLimit, { filename: 'import.json', contentType: 'application/json' });
expect(res.status).toBe(201);
});
});
});

View file

@ -1,7 +1,7 @@
const multer = require('multer');
const express = require('express');
const { sleep } = require('@librechat/agents');
const { isEnabled } = require('@librechat/api');
const { isEnabled, resolveImportMaxFileSize } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys, EModelEndpoint } = require('librechat-data-provider');
const {
@ -226,7 +226,25 @@ router.post('/update', validateConvoAccess, async (req, res) => {
const { importIpLimiter, importUserLimiter } = createImportLimiters();
/** Fork and duplicate share one rate-limit budget (same "clone" operation class) */
const { forkIpLimiter, forkUserLimiter } = createForkLimiters();
const upload = multer({ storage: storage, fileFilter: importFileFilter });
const importMaxFileSize = resolveImportMaxFileSize();
const upload = multer({
storage,
fileFilter: importFileFilter,
limits: { fileSize: importMaxFileSize },
});
const uploadSingle = upload.single('file');
function handleUpload(req, res, next) {
uploadSingle(req, res, (err) => {
if (err && err.code === 'LIMIT_FILE_SIZE') {
return res.status(413).json({ message: 'File exceeds the maximum allowed size' });
}
if (err) {
return next(err);
}
next();
});
}
/**
* Imports a conversation from a JSON file and saves it to the database.
@ -239,7 +257,7 @@ router.post(
importIpLimiter,
importUserLimiter,
configMiddleware,
upload.single('file'),
handleUpload,
async (req, res) => {
try {
/* TODO: optimize to return imported conversations and add manually */

View file

@ -1,7 +1,10 @@
const fs = require('fs').promises;
const { resolveImportMaxFileSize } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { getImporter } = require('./importers');
const maxFileSize = resolveImportMaxFileSize();
/**
* Job definition for importing a conversation.
* @param {{ filepath, requestUserId }} job - The job object.
@ -11,11 +14,10 @@ const importConversations = async (job) => {
try {
logger.debug(`user: ${requestUserId} | Importing conversation(s) from file...`);
/* error if file is too large */
const fileInfo = await fs.stat(filepath);
if (fileInfo.size > process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES) {
if (fileInfo.size > maxFileSize) {
throw new Error(
`File size is ${fileInfo.size} bytes. It exceeds the maximum limit of ${process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES} bytes.`,
`File size is ${fileInfo.size} bytes. It exceeds the maximum limit of ${maxFileSize} bytes.`,
);
}

View file

@ -0,0 +1,76 @@
jest.mock('@librechat/data-schemas', () => ({
logger: { info: jest.fn(), warn: jest.fn(), error: jest.fn(), debug: jest.fn() },
}));
import { DEFAULT_IMPORT_MAX_FILE_SIZE, resolveImportMaxFileSize } from '../import';
import { logger } from '@librechat/data-schemas';
describe('resolveImportMaxFileSize', () => {
let originalEnv: string | undefined;
beforeEach(() => {
originalEnv = process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES;
jest.clearAllMocks();
});
afterEach(() => {
if (originalEnv !== undefined) {
process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES = originalEnv;
} else {
delete process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES;
}
});
it('returns 262144000 (250 MiB) when env var is not set', () => {
delete process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES;
expect(resolveImportMaxFileSize()).toBe(262144000);
expect(DEFAULT_IMPORT_MAX_FILE_SIZE).toBe(262144000);
});
it('returns default when env var is empty string', () => {
process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES = '';
expect(resolveImportMaxFileSize()).toBe(DEFAULT_IMPORT_MAX_FILE_SIZE);
});
it('respects a custom numeric value', () => {
process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES = '5242880';
expect(resolveImportMaxFileSize()).toBe(5242880);
});
it('parses string env var to number', () => {
process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES = '1048576';
expect(resolveImportMaxFileSize()).toBe(1048576);
});
it('falls back to default and warns for non-numeric string', () => {
process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES = 'abc';
expect(resolveImportMaxFileSize()).toBe(DEFAULT_IMPORT_MAX_FILE_SIZE);
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining('Invalid CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES'),
);
});
it('falls back to default and warns for negative values', () => {
process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES = '-100';
expect(resolveImportMaxFileSize()).toBe(DEFAULT_IMPORT_MAX_FILE_SIZE);
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining('Invalid CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES'),
);
});
it('falls back to default and warns for zero', () => {
process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES = '0';
expect(resolveImportMaxFileSize()).toBe(DEFAULT_IMPORT_MAX_FILE_SIZE);
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining('Invalid CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES'),
);
});
it('falls back to default and warns for Infinity', () => {
process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES = 'Infinity';
expect(resolveImportMaxFileSize()).toBe(DEFAULT_IMPORT_MAX_FILE_SIZE);
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining('Invalid CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES'),
);
});
});

View file

@ -0,0 +1,20 @@
import { logger } from '@librechat/data-schemas';
/** 250 MiB — default max file size for conversation imports */
export const DEFAULT_IMPORT_MAX_FILE_SIZE = 262144000;
/** Resolves the import file-size limit from the env var, falling back to the 250 MiB default */
export function resolveImportMaxFileSize(): number {
const raw = process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES;
if (!raw) {
return DEFAULT_IMPORT_MAX_FILE_SIZE;
}
const parsed = Number(raw);
if (!Number.isFinite(parsed) || parsed <= 0) {
logger.warn(
`[imports] Invalid CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES="${raw}"; using default ${DEFAULT_IMPORT_MAX_FILE_SIZE}`,
);
return DEFAULT_IMPORT_MAX_FILE_SIZE;
}
return parsed;
}

View file

@ -6,6 +6,7 @@ export * from './email';
export * from './env';
export * from './events';
export * from './files';
export * from './import';
export * from './generators';
export * from './graph';
export * from './path';