⚖️ fix: Add Configurable File Size Cap for Conversation Imports (#10012)
Some checks failed
Docker Dev Branch Images Build / build (Dockerfile, lc-dev, node) (push) Has been cancelled
Docker Dev Branch Images Build / build (Dockerfile.multi, lc-dev-api, api-build) (push) Has been cancelled

* Check file size of conversation being imported against a configured max size to prevent bringing down the application by uploading a large file

chore: remove non-english localization as needs to be added via locize

* feat: Implement file size validation for conversation imports to prevent oversized uploads

---------

Co-authored-by: Marc Amick <MarcAmick@jhu.edu>
Co-authored-by: Danny Avila <danny@librechat.ai>
This commit is contained in:
MarcAmick 2025-10-07 14:47:21 -04:00 committed by GitHub
parent bb7a0274fa
commit e9b678dd6a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 38 additions and 1 deletions

View file

@ -650,6 +650,12 @@ HELP_AND_FAQ_URL=https://librechat.ai
# Google tag manager id
#ANALYTICS_GTM_ID=user provided google tag manager id
# limit conversation file imports to a certain number of bytes in size to avoid the container
# maxing out memory limitations by unremarking this line and supplying a file size in bytes
# such as the below example of 250 mib
# CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES=262144000
#===============#
# REDIS Options #
#===============#

View file

@ -115,6 +115,9 @@ router.get('/', async function (req, res) {
sharePointPickerGraphScope: process.env.SHAREPOINT_PICKER_GRAPH_SCOPE,
sharePointPickerSharePointScope: process.env.SHAREPOINT_PICKER_SHAREPOINT_SCOPE,
openidReuseTokens,
conversationImportMaxFileSize: process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES
? parseInt(process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES, 10)
: 0,
};
const minPasswordLength = parseInt(process.env.MIN_PASSWORD_LENGTH, 10);

View file

@ -10,6 +10,15 @@ const importConversations = async (job) => {
const { filepath, requestUserId } = job;
try {
logger.debug(`user: ${requestUserId} | Importing conversation(s) from file...`);
/* error if file is too large */
const fileInfo = await fs.stat(filepath);
if (fileInfo.size > process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES) {
throw new Error(
`File size is ${fileInfo.size} bytes. It exceeds the maximum limit of ${process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES} bytes.`,
);
}
const fileData = await fs.readFile(filepath, 'utf8');
const jsonData = JSON.parse(fileData);
const importer = getImporter(jsonData);
@ -17,6 +26,7 @@ const importConversations = async (job) => {
logger.debug(`user: ${requestUserId} | Finished importing conversations`);
} catch (error) {
logger.error(`user: ${requestUserId} | Failed to import conversation: `, error);
throw error; // throw error all the way up so request does not return success
} finally {
try {
await fs.unlink(filepath);

View file

@ -1,5 +1,7 @@
import { useState, useRef, useCallback } from 'react';
import { Import } from 'lucide-react';
import { useQueryClient } from '@tanstack/react-query';
import { QueryKeys, TStartupConfig } from 'librechat-data-provider';
import { Spinner, useToastContext, Label, Button } from '@librechat/client';
import { useUploadConversationsMutation } from '~/data-provider';
import { NotificationSeverity } from '~/common';
@ -7,6 +9,8 @@ import { useLocalize } from '~/hooks';
import { cn, logger } from '~/utils';
function ImportConversations() {
const queryClient = useQueryClient();
const startupConfig = queryClient.getQueryData<TStartupConfig>([QueryKeys.startupConfig]);
const localize = useLocalize();
const fileInputRef = useRef<HTMLInputElement>(null);
const { showToast } = useToastContext();
@ -49,6 +53,17 @@ function ImportConversations() {
const handleFileUpload = useCallback(
async (file: File) => {
try {
const maxFileSize = (startupConfig as any)?.conversationImportMaxFileSize;
if (maxFileSize && file.size > maxFileSize) {
const size = (maxFileSize / (1024 * 1024)).toFixed(2);
showToast({
message: localize('com_error_files_upload_too_large', { 0: size }),
status: NotificationSeverity.ERROR,
});
setIsUploading(false);
return;
}
const formData = new FormData();
formData.append('file', file, encodeURIComponent(file.name || 'File'));
uploadFile.mutate(formData);
@ -61,13 +76,14 @@ function ImportConversations() {
});
}
},
[uploadFile, showToast, localize],
[uploadFile, showToast, localize, startupConfig],
);
const handleFileChange = useCallback(
(event: React.ChangeEvent<HTMLInputElement>) => {
const file = event.target.files?.[0];
if (file) {
setIsUploading(true);
handleFileUpload(file);
}
event.target.value = '';

View file

@ -365,6 +365,7 @@
"com_error_files_process": "An error occurred while processing the file.",
"com_error_files_upload": "An error occurred while uploading the file.",
"com_error_files_upload_canceled": "The file upload request was canceled. Note: the file upload may still be processing and will need to be manually deleted.",
"com_error_files_upload_too_large": "The file is too large. Please upload a file smaller than {{0}} MB",
"com_error_files_validation": "An error occurred while validating the file.",
"com_error_google_tool_conflict": "Usage of built-in Google tools are not supported with external tools. Please disable either the built-in tools or the external tools.",
"com_error_heic_conversion": "Failed to convert HEIC image to JPEG. Please try converting the image manually or use a different format.",

View file

@ -669,6 +669,7 @@ export type TStartupConfig = {
}
>;
mcpPlaceholder?: string;
conversationImportMaxFileSize?: number;
};
export enum OCRStrategy {