mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-03-15 04:06:33 +01:00
* fix: add file size limits to conversation import multer instance * fix: address review findings for conversation import file size limits * fix: use local jest.mock for data-schemas instead of global moduleNameMapper The global @librechat/data-schemas mock in jest.config.js only provided logger, breaking all tests that depend on createModels from the same package. Replace with a virtual jest.mock scoped to the import spec file. * fix: move import to top of file, pre-compute upload middleware, assert logger.warn in tests * refactor: move resolveImportMaxFileSize to packages/api New backend logic belongs in packages/api as TypeScript. Delete the api/server/utils/import/limits.js wrapper and import directly from @librechat/api in convos.js and importConversations.js. Resolver unit tests move to packages/api; the api/ spec retains only multer behavior tests. * chore: rename importLimits to import * fix: stale type reference and mock isolation in import tests Update typeof import path from '../importLimits' to '../import' after the rename. Clear mockLogger.warn in beforeEach to prevent cross-test accumulation. * fix: add resolveImportMaxFileSize to @librechat/api mock in convos.spec.js * fix: resolve jest.mock hoisting issue in import tests jest.mock factories are hoisted above const declarations, so the mockLogger reference was undefined at factory evaluation time. Use a direct import of the mocked logger module instead. * fix: remove virtual flag from data-schemas mock for CI compatibility virtual: true prevents the mock from intercepting the real module in CI where @librechat/data-schemas is built, causing import.ts to use the real logger while the test asserts against the mock.
41 lines
1.4 KiB
JavaScript
41 lines
1.4 KiB
JavaScript
const fs = require('fs').promises;
|
|
const { resolveImportMaxFileSize } = require('@librechat/api');
|
|
const { logger } = require('@librechat/data-schemas');
|
|
const { getImporter } = require('./importers');
|
|
|
|
const maxFileSize = resolveImportMaxFileSize();
|
|
|
|
/**
|
|
* Job definition for importing a conversation.
|
|
* @param {{ filepath, requestUserId }} job - The job object.
|
|
*/
|
|
const importConversations = async (job) => {
|
|
const { filepath, requestUserId } = job;
|
|
try {
|
|
logger.debug(`user: ${requestUserId} | Importing conversation(s) from file...`);
|
|
|
|
const fileInfo = await fs.stat(filepath);
|
|
if (fileInfo.size > maxFileSize) {
|
|
throw new Error(
|
|
`File size is ${fileInfo.size} bytes. It exceeds the maximum limit of ${maxFileSize} bytes.`,
|
|
);
|
|
}
|
|
|
|
const fileData = await fs.readFile(filepath, 'utf8');
|
|
const jsonData = JSON.parse(fileData);
|
|
const importer = getImporter(jsonData);
|
|
await importer(jsonData, requestUserId);
|
|
logger.debug(`user: ${requestUserId} | Finished importing conversations`);
|
|
} catch (error) {
|
|
logger.error(`user: ${requestUserId} | Failed to import conversation: `, error);
|
|
throw error; // throw error all the way up so request does not return success
|
|
} finally {
|
|
try {
|
|
await fs.unlink(filepath);
|
|
} catch (error) {
|
|
logger.error(`user: ${requestUserId} | Failed to delete file: ${filepath}`, error);
|
|
}
|
|
}
|
|
};
|
|
|
|
module.exports = importConversations;
|