🗂️ refactor: Migrate S3 Storage to TypeScript in packages/api (#11947)

* Migrate S3 storage module with unit and integration tests

  - Migrate S3 CRUD and image operations to packages/api/src/storage/s3/
  - Add S3ImageService class with dependency injection
  - Add unit tests using aws-sdk-client-mock
  - Add integration tests with real s3 bucket (condition presence of  AWS_TEST_BUCKET_NAME)

* AI Review Findings Fixes

* chore: tests and refactor S3 storage types

- Added mock implementations for the 'sharp' library in various test files to improve image processing testing.
- Updated type references in S3 storage files from MongoFile to TFile for consistency and type safety.
- Refactored S3 CRUD operations to ensure proper handling of file types and improve code clarity.
- Enhanced integration tests to validate S3 file operations and error handling more effectively.

* chore: rename test file

* Remove duplicate import of refreshS3Url

* chore: imports order

* fix: remove duplicate imports for S3 URL handling in UserController

* fix: remove duplicate import of refreshS3FileUrls in files.js

* test: Add mock implementations for 'sharp' and '@librechat/api' in UserController tests

- Introduced mock functions for the 'sharp' library to facilitate image processing tests, including metadata retrieval and buffer conversion.
- Enhanced mocking for '@librechat/api' to ensure consistent behavior in tests, particularly for the needsRefresh and getNewS3URL functions.

---------

Co-authored-by: Danny Avila <danny@librechat.ai>
This commit is contained in:
Atef Bellaaj 2026-03-09 20:42:01 +01:00 committed by Danny Avila
parent 428ef2eb15
commit ca6ce8fceb
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
27 changed files with 2455 additions and 1697 deletions

View file

@ -1,6 +1,8 @@
const { logger, webSearchKeys } = require('@librechat/data-schemas');
const { Tools, CacheKeys, Constants, FileSources } = require('librechat-data-provider');
const {
getNewS3URL,
needsRefresh,
MCPOAuthHandler,
MCPTokenStorage,
normalizeHttpError,
@ -10,7 +12,6 @@ const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/service
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
const { getMCPManager, getFlowStateManager, getMCPServersRegistry } = require('~/config');
const { invalidateCachedTools } = require('~/server/services/Config/getCachedTools');
const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
const { processDeleteRequest } = require('~/server/services/Files/process');
const { getAppConfig } = require('~/server/services/Config');
const { getLogStores } = require('~/cache');

View file

@ -59,7 +59,16 @@ jest.mock('~/server/services/AuthService', () => ({
resendVerificationEmail: jest.fn(),
}));
jest.mock('~/server/services/Files/S3/crud', () => ({
jest.mock('sharp', () =>
jest.fn(() => ({
metadata: jest.fn().mockResolvedValue({}),
toFormat: jest.fn().mockReturnThis(),
toBuffer: jest.fn().mockResolvedValue(Buffer.alloc(0)),
})),
);
jest.mock('@librechat/api', () => ({
...jest.requireActual('@librechat/api'),
needsRefresh: jest.fn(),
getNewS3URL: jest.fn(),
}));

View file

@ -3,6 +3,7 @@ const fs = require('fs').promises;
const { nanoid } = require('nanoid');
const { logger } = require('@librechat/data-schemas');
const {
refreshS3Url,
agentCreateSchema,
agentUpdateSchema,
refreshListAvatars,
@ -33,7 +34,6 @@ const {
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { resizeAvatar } = require('~/server/services/Files/images/avatar');
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
const { refreshS3Url } = require('~/server/services/Files/S3/crud');
const { filterFile } = require('~/server/services/Files/process');
const { getCachedTools } = require('~/server/services/Config');
const { getLogStores } = require('~/cache');

View file

@ -22,7 +22,16 @@ jest.mock('~/server/services/Files/images/avatar', () => ({
resizeAvatar: jest.fn(),
}));
jest.mock('~/server/services/Files/S3/crud', () => ({
jest.mock('sharp', () =>
jest.fn(() => ({
metadata: jest.fn().mockResolvedValue({}),
toFormat: jest.fn().mockReturnThis(),
toBuffer: jest.fn().mockResolvedValue(Buffer.alloc(0)),
})),
);
jest.mock('@librechat/api', () => ({
...jest.requireActual('@librechat/api'),
refreshS3Url: jest.fn(),
}));
@ -72,7 +81,7 @@ const {
findPubliclyAccessibleResources,
} = require('~/server/services/PermissionService');
const { refreshS3Url } = require('~/server/services/Files/S3/crud');
const { refreshS3Url } = require('@librechat/api');
/**
* @type {import('mongoose').Model<import('@librechat/data-schemas').IAgent>}

View file

@ -39,7 +39,16 @@ jest.mock('~/server/services/Tools/credentials', () => ({
loadAuthValues: jest.fn(),
}));
jest.mock('~/server/services/Files/S3/crud', () => ({
jest.mock('sharp', () =>
jest.fn(() => ({
metadata: jest.fn().mockResolvedValue({}),
toFormat: jest.fn().mockReturnThis(),
toBuffer: jest.fn().mockResolvedValue(Buffer.alloc(0)),
})),
);
jest.mock('@librechat/api', () => ({
...jest.requireActual('@librechat/api'),
refreshS3FileUrls: jest.fn(),
}));

View file

@ -2,6 +2,7 @@ const fs = require('fs').promises;
const express = require('express');
const { EnvVar } = require('@librechat/agents');
const { logger } = require('@librechat/data-schemas');
const { refreshS3FileUrls } = require('@librechat/api');
const {
Time,
isUUID,
@ -25,7 +26,6 @@ const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
const { checkPermission } = require('~/server/services/PermissionService');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { refreshS3FileUrls } = require('~/server/services/Files/S3/crud');
const { hasAccessToFilesViaAgent } = require('~/server/services/Files');
const { cleanFileName } = require('~/server/utils/files');
const { hasCapability } = require('~/server/middleware');

View file

@ -32,7 +32,16 @@ jest.mock('~/server/services/Tools/credentials', () => ({
loadAuthValues: jest.fn(),
}));
jest.mock('~/server/services/Files/S3/crud', () => ({
jest.mock('sharp', () =>
jest.fn(() => ({
metadata: jest.fn().mockResolvedValue({}),
toFormat: jest.fn().mockReturnThis(),
toBuffer: jest.fn().mockResolvedValue(Buffer.alloc(0)),
})),
);
jest.mock('@librechat/api', () => ({
...jest.requireActual('@librechat/api'),
refreshS3FileUrls: jest.fn(),
}));

View file

@ -1,556 +0,0 @@
const fs = require('fs');
const fetch = require('node-fetch');
const { logger } = require('@librechat/data-schemas');
const { FileSources } = require('librechat-data-provider');
const { getSignedUrl } = require('@aws-sdk/s3-request-presigner');
const { initializeS3, deleteRagFile, isEnabled } = require('@librechat/api');
const {
PutObjectCommand,
GetObjectCommand,
HeadObjectCommand,
DeleteObjectCommand,
} = require('@aws-sdk/client-s3');
const bucketName = process.env.AWS_BUCKET_NAME;
const defaultBasePath = 'images';
const endpoint = process.env.AWS_ENDPOINT_URL;
const forcePathStyle = isEnabled(process.env.AWS_FORCE_PATH_STYLE);
let s3UrlExpirySeconds = 2 * 60; // 2 minutes
let s3RefreshExpiryMs = null;
if (process.env.S3_URL_EXPIRY_SECONDS !== undefined) {
const parsed = parseInt(process.env.S3_URL_EXPIRY_SECONDS, 10);
if (!isNaN(parsed) && parsed > 0) {
s3UrlExpirySeconds = Math.min(parsed, 7 * 24 * 60 * 60);
} else {
logger.warn(
`[S3] Invalid S3_URL_EXPIRY_SECONDS value: "${process.env.S3_URL_EXPIRY_SECONDS}". Using 2-minute expiry.`,
);
}
}
if (process.env.S3_REFRESH_EXPIRY_MS !== null && process.env.S3_REFRESH_EXPIRY_MS) {
const parsed = parseInt(process.env.S3_REFRESH_EXPIRY_MS, 10);
if (!isNaN(parsed) && parsed > 0) {
s3RefreshExpiryMs = parsed;
logger.info(`[S3] Using custom refresh expiry time: ${s3RefreshExpiryMs}ms`);
} else {
logger.warn(
`[S3] Invalid S3_REFRESH_EXPIRY_MS value: "${process.env.S3_REFRESH_EXPIRY_MS}". Using default refresh logic.`,
);
}
}
/**
* Constructs the S3 key based on the base path, user ID, and file name.
*/
const getS3Key = (basePath, userId, fileName) => `${basePath}/${userId}/${fileName}`;
/**
* Uploads a buffer to S3 and returns a signed URL.
*
* @param {Object} params
* @param {string} params.userId - The user's unique identifier.
* @param {Buffer} params.buffer - The buffer containing file data.
* @param {string} params.fileName - The file name to use in S3.
* @param {string} [params.basePath='images'] - The base path in the bucket.
* @returns {Promise<string>} Signed URL of the uploaded file.
*/
async function saveBufferToS3({ userId, buffer, fileName, basePath = defaultBasePath }) {
const key = getS3Key(basePath, userId, fileName);
const params = { Bucket: bucketName, Key: key, Body: buffer };
try {
const s3 = initializeS3();
await s3.send(new PutObjectCommand(params));
return await getS3URL({ userId, fileName, basePath });
} catch (error) {
logger.error('[saveBufferToS3] Error uploading buffer to S3:', error.message);
throw error;
}
}
/**
* Retrieves a URL for a file stored in S3.
* Returns a signed URL with expiration time or a proxy URL based on config
*
* @param {Object} params
* @param {string} params.userId - The user's unique identifier.
* @param {string} params.fileName - The file name in S3.
* @param {string} [params.basePath='images'] - The base path in the bucket.
* @param {string} [params.customFilename] - Custom filename for Content-Disposition header (overrides extracted filename).
* @param {string} [params.contentType] - Custom content type for the response.
* @returns {Promise<string>} A URL to access the S3 object
*/
async function getS3URL({
userId,
fileName,
basePath = defaultBasePath,
customFilename = null,
contentType = null,
}) {
const key = getS3Key(basePath, userId, fileName);
const params = { Bucket: bucketName, Key: key };
// Add response headers if specified
if (customFilename) {
params.ResponseContentDisposition = `attachment; filename="${customFilename}"`;
}
if (contentType) {
params.ResponseContentType = contentType;
}
try {
const s3 = initializeS3();
return await getSignedUrl(s3, new GetObjectCommand(params), { expiresIn: s3UrlExpirySeconds });
} catch (error) {
logger.error('[getS3URL] Error getting signed URL from S3:', error.message);
throw error;
}
}
/**
* Saves a file from a given URL to S3.
*
* @param {Object} params
* @param {string} params.userId - The user's unique identifier.
* @param {string} params.URL - The source URL of the file.
* @param {string} params.fileName - The file name to use in S3.
* @param {string} [params.basePath='images'] - The base path in the bucket.
* @returns {Promise<string>} Signed URL of the uploaded file.
*/
async function saveURLToS3({ userId, URL, fileName, basePath = defaultBasePath }) {
try {
const response = await fetch(URL);
const buffer = await response.buffer();
// Optionally you can call getBufferMetadata(buffer) if needed.
return await saveBufferToS3({ userId, buffer, fileName, basePath });
} catch (error) {
logger.error('[saveURLToS3] Error uploading file from URL to S3:', error.message);
throw error;
}
}
/**
* Deletes a file from S3.
*
* @param {Object} params
* @param {ServerRequest} params.req
* @param {MongoFile} params.file - The file object to delete.
* @returns {Promise<void>}
*/
async function deleteFileFromS3(req, file) {
await deleteRagFile({ userId: req.user.id, file });
const key = extractKeyFromS3Url(file.filepath);
const params = { Bucket: bucketName, Key: key };
if (!key.includes(req.user.id)) {
const message = `[deleteFileFromS3] User ID mismatch: ${req.user.id} vs ${key}`;
logger.error(message);
throw new Error(message);
}
try {
const s3 = initializeS3();
try {
const headCommand = new HeadObjectCommand(params);
await s3.send(headCommand);
logger.debug('[deleteFileFromS3] File exists, proceeding with deletion');
} catch (headErr) {
if (headErr.name === 'NotFound') {
logger.warn(`[deleteFileFromS3] File does not exist: ${key}`);
return;
}
}
const deleteResult = await s3.send(new DeleteObjectCommand(params));
logger.debug('[deleteFileFromS3] Delete command response:', JSON.stringify(deleteResult));
try {
await s3.send(new HeadObjectCommand(params));
logger.error('[deleteFileFromS3] File still exists after deletion!');
} catch (verifyErr) {
if (verifyErr.name === 'NotFound') {
logger.debug(`[deleteFileFromS3] Verified file is deleted: ${key}`);
} else {
logger.error('[deleteFileFromS3] Error verifying deletion:', verifyErr);
}
}
logger.debug('[deleteFileFromS3] S3 File deletion completed');
} catch (error) {
logger.error(`[deleteFileFromS3] Error deleting file from S3: ${error.message}`);
logger.error(error.stack);
// If the file is not found, we can safely return.
if (error.code === 'NoSuchKey') {
return;
}
throw error;
}
}
/**
* Uploads a local file to S3 by streaming it directly without loading into memory.
*
* @param {Object} params
* @param {import('express').Request} params.req - The Express request (must include user).
* @param {Express.Multer.File} params.file - The file object from Multer.
* @param {string} params.file_id - Unique file identifier.
* @param {string} [params.basePath='images'] - The base path in the bucket.
* @returns {Promise<{ filepath: string, bytes: number }>}
*/
async function uploadFileToS3({ req, file, file_id, basePath = defaultBasePath }) {
try {
const inputFilePath = file.path;
const userId = req.user.id;
const fileName = `${file_id}__${file.originalname}`;
const key = getS3Key(basePath, userId, fileName);
const stats = await fs.promises.stat(inputFilePath);
const bytes = stats.size;
const fileStream = fs.createReadStream(inputFilePath);
const s3 = initializeS3();
const uploadParams = {
Bucket: bucketName,
Key: key,
Body: fileStream,
};
await s3.send(new PutObjectCommand(uploadParams));
const fileURL = await getS3URL({ userId, fileName, basePath });
return { filepath: fileURL, bytes };
} catch (error) {
logger.error('[uploadFileToS3] Error streaming file to S3:', error);
try {
if (file && file.path) {
await fs.promises.unlink(file.path);
}
} catch (unlinkError) {
logger.error(
'[uploadFileToS3] Error deleting temporary file, likely already deleted:',
unlinkError.message,
);
}
throw error;
}
}
/**
* Extracts the S3 key from a URL or returns the key if already properly formatted
*
* @param {string} fileUrlOrKey - The file URL or key
* @returns {string} The S3 key
*/
function extractKeyFromS3Url(fileUrlOrKey) {
if (!fileUrlOrKey) {
throw new Error('Invalid input: URL or key is empty');
}
try {
const url = new URL(fileUrlOrKey);
const hostname = url.hostname;
const pathname = url.pathname.substring(1); // Remove leading slash
// Explicit path-style with custom endpoint: use endpoint pathname for precise key extraction.
// Handles endpoints with a base path (e.g. https://example.com/storage/).
if (endpoint && forcePathStyle) {
const endpointUrl = new URL(endpoint);
const startPos =
endpointUrl.pathname.length +
(endpointUrl.pathname.endsWith('/') ? 0 : 1) +
bucketName.length +
1;
const key = url.pathname.substring(startPos);
if (!key) {
logger.warn(
`[extractKeyFromS3Url] Extracted key is empty for endpoint path-style URL: ${fileUrlOrKey}`,
);
} else {
logger.debug(`[extractKeyFromS3Url] fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${key}`);
}
return key;
}
if (
hostname === 's3.amazonaws.com' ||
hostname.match(/^s3[-.][a-z0-9-]+\.amazonaws\.com$/) ||
(bucketName && pathname.startsWith(`${bucketName}/`))
) {
// Path-style: https://s3.amazonaws.com/bucket-name/key or custom endpoint (MinIO, R2, etc.)
// Strip the bucket name (first path segment)
const firstSlashIndex = pathname.indexOf('/');
if (firstSlashIndex > 0) {
const key = pathname.substring(firstSlashIndex + 1);
if (key === '') {
logger.warn(
`[extractKeyFromS3Url] Extracted key is empty after removing bucket name from URL: ${fileUrlOrKey}`,
);
} else {
logger.debug(
`[extractKeyFromS3Url] fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${key}`,
);
}
return key;
} else {
logger.warn(
`[extractKeyFromS3Url] Unable to extract key from path-style URL: ${fileUrlOrKey}`,
);
return '';
}
}
// Virtual-hosted-style or other: https://bucket-name.s3.amazonaws.com/key
// Just return the pathname without leading slash
logger.debug(`[extractKeyFromS3Url] fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${pathname}`);
return pathname;
} catch (error) {
if (fileUrlOrKey.startsWith('http://') || fileUrlOrKey.startsWith('https://')) {
logger.error(
`[extractKeyFromS3Url] Error parsing URL: ${fileUrlOrKey}, Error: ${error.message}`,
);
} else {
logger.debug(`[extractKeyFromS3Url] Non-URL input, using fallback: ${fileUrlOrKey}`);
}
const parts = fileUrlOrKey.split('/');
if (parts.length >= 3 && !fileUrlOrKey.startsWith('http') && !fileUrlOrKey.startsWith('/')) {
return fileUrlOrKey;
}
const key = fileUrlOrKey.startsWith('/') ? fileUrlOrKey.substring(1) : fileUrlOrKey;
logger.debug(
`[extractKeyFromS3Url] FALLBACK. fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${key}`,
);
return key;
}
}
/**
* Retrieves a readable stream for a file stored in S3.
*
* @param {ServerRequest} req - Server request object.
* @param {string} filePath - The S3 key of the file.
* @returns {Promise<NodeJS.ReadableStream>}
*/
async function getS3FileStream(_req, filePath) {
try {
const Key = extractKeyFromS3Url(filePath);
const params = { Bucket: bucketName, Key };
const s3 = initializeS3();
const data = await s3.send(new GetObjectCommand(params));
return data.Body; // Returns a Node.js ReadableStream.
} catch (error) {
logger.error('[getS3FileStream] Error retrieving S3 file stream:', error);
throw error;
}
}
/**
* Determines if a signed S3 URL is close to expiration
*
* @param {string} signedUrl - The signed S3 URL
* @param {number} bufferSeconds - Buffer time in seconds
* @returns {boolean} True if the URL needs refreshing
*/
function needsRefresh(signedUrl, bufferSeconds) {
try {
// Parse the URL
const url = new URL(signedUrl);
// Check if it has the signature parameters that indicate it's a signed URL
// X-Amz-Signature is the most reliable indicator for AWS signed URLs
if (!url.searchParams.has('X-Amz-Signature')) {
// Not a signed URL, so no expiration to check (or it's already a proxy URL)
return false;
}
// Extract the expiration time from the URL
const expiresParam = url.searchParams.get('X-Amz-Expires');
const dateParam = url.searchParams.get('X-Amz-Date');
if (!expiresParam || !dateParam) {
// Missing expiration information, assume it needs refresh to be safe
return true;
}
// Parse the AWS date format (YYYYMMDDTHHMMSSZ)
const year = dateParam.substring(0, 4);
const month = dateParam.substring(4, 6);
const day = dateParam.substring(6, 8);
const hour = dateParam.substring(9, 11);
const minute = dateParam.substring(11, 13);
const second = dateParam.substring(13, 15);
const dateObj = new Date(`${year}-${month}-${day}T${hour}:${minute}:${second}Z`);
const expiresAtDate = new Date(dateObj.getTime() + parseInt(expiresParam) * 1000);
// Check if it's close to expiration
const now = new Date();
// If S3_REFRESH_EXPIRY_MS is set, use it to determine if URL is expired
if (s3RefreshExpiryMs !== null) {
const urlCreationTime = dateObj.getTime();
const urlAge = now.getTime() - urlCreationTime;
return urlAge >= s3RefreshExpiryMs;
}
// Otherwise use the default buffer-based logic
const bufferTime = new Date(now.getTime() + bufferSeconds * 1000);
return expiresAtDate <= bufferTime;
} catch (error) {
logger.error('Error checking URL expiration:', error);
// If we can't determine, assume it needs refresh to be safe
return true;
}
}
/**
* Generates a new URL for an expired S3 URL
* @param {string} currentURL - The current file URL
* @returns {Promise<string | undefined>}
*/
async function getNewS3URL(currentURL) {
try {
const s3Key = extractKeyFromS3Url(currentURL);
if (!s3Key) {
return;
}
const keyParts = s3Key.split('/');
if (keyParts.length < 3) {
return;
}
const basePath = keyParts[0];
const userId = keyParts[1];
const fileName = keyParts.slice(2).join('/');
return await getS3URL({
userId,
fileName,
basePath,
});
} catch (error) {
logger.error('Error getting new S3 URL:', error);
}
}
/**
* Refreshes S3 URLs for an array of files if they're expired or close to expiring
*
* @param {MongoFile[]} files - Array of file documents
* @param {(files: MongoFile[]) => Promise<void>} batchUpdateFiles - Function to update files in the database
* @param {number} [bufferSeconds=3600] - Buffer time in seconds to check for expiration
* @returns {Promise<MongoFile[]>} The files with refreshed URLs if needed
*/
async function refreshS3FileUrls(files, batchUpdateFiles, bufferSeconds = 3600) {
if (!files || !Array.isArray(files) || files.length === 0) {
return files;
}
const filesToUpdate = [];
for (let i = 0; i < files.length; i++) {
const file = files[i];
if (!file?.file_id) {
continue;
}
if (file.source !== FileSources.s3) {
continue;
}
if (!file.filepath) {
continue;
}
if (!needsRefresh(file.filepath, bufferSeconds)) {
continue;
}
try {
const newURL = await getNewS3URL(file.filepath);
if (!newURL) {
continue;
}
filesToUpdate.push({
file_id: file.file_id,
filepath: newURL,
});
files[i].filepath = newURL;
} catch (error) {
logger.error(`Error refreshing S3 URL for file ${file.file_id}:`, error);
}
}
if (filesToUpdate.length > 0) {
await batchUpdateFiles(filesToUpdate);
}
return files;
}
/**
* Refreshes a single S3 URL if it's expired or close to expiring
*
* @param {{ filepath: string, source: string }} fileObj - Simple file object containing filepath and source
* @param {number} [bufferSeconds=3600] - Buffer time in seconds to check for expiration
* @returns {Promise<string>} The refreshed URL or the original URL if no refresh needed
*/
async function refreshS3Url(fileObj, bufferSeconds = 3600) {
if (!fileObj || fileObj.source !== FileSources.s3 || !fileObj.filepath) {
return fileObj?.filepath || '';
}
if (!needsRefresh(fileObj.filepath, bufferSeconds)) {
return fileObj.filepath;
}
try {
const s3Key = extractKeyFromS3Url(fileObj.filepath);
if (!s3Key) {
logger.warn(`Unable to extract S3 key from URL: ${fileObj.filepath}`);
return fileObj.filepath;
}
const keyParts = s3Key.split('/');
if (keyParts.length < 3) {
logger.warn(`Invalid S3 key format: ${s3Key}`);
return fileObj.filepath;
}
const basePath = keyParts[0];
const userId = keyParts[1];
const fileName = keyParts.slice(2).join('/');
const newUrl = await getS3URL({
userId,
fileName,
basePath,
});
logger.debug(`Refreshed S3 URL for key: ${s3Key}`);
return newUrl;
} catch (error) {
logger.error(`Error refreshing S3 URL: ${error.message}`);
return fileObj.filepath;
}
}
module.exports = {
saveBufferToS3,
saveURLToS3,
getS3URL,
deleteFileFromS3,
uploadFileToS3,
getS3FileStream,
refreshS3FileUrls,
refreshS3Url,
needsRefresh,
getNewS3URL,
extractKeyFromS3Url,
};

View file

@ -1,129 +0,0 @@
const fs = require('fs');
const path = require('path');
const sharp = require('sharp');
const { logger } = require('@librechat/data-schemas');
const { resizeImageBuffer } = require('../images/resize');
const { updateUser, updateFile } = require('~/models');
const { saveBufferToS3 } = require('./crud');
const defaultBasePath = 'images';
/**
* Resizes, converts, and uploads an image file to S3.
*
* @param {Object} params
* @param {import('express').Request} params.req - Express request (expects `user` and `appConfig.imageOutputType`).
* @param {Express.Multer.File} params.file - File object from Multer.
* @param {string} params.file_id - Unique file identifier.
* @param {any} params.endpoint - Endpoint identifier used in image processing.
* @param {string} [params.resolution='high'] - Desired image resolution.
* @param {string} [params.basePath='images'] - Base path in the bucket.
* @returns {Promise<{ filepath: string, bytes: number, width: number, height: number }>}
*/
async function uploadImageToS3({
req,
file,
file_id,
endpoint,
resolution = 'high',
basePath = defaultBasePath,
}) {
try {
const appConfig = req.config;
const inputFilePath = file.path;
const inputBuffer = await fs.promises.readFile(inputFilePath);
const {
buffer: resizedBuffer,
width,
height,
} = await resizeImageBuffer(inputBuffer, resolution, endpoint);
const extension = path.extname(inputFilePath);
const userId = req.user.id;
let processedBuffer;
let fileName = `${file_id}__${path.basename(inputFilePath)}`;
const targetExtension = `.${appConfig.imageOutputType}`;
if (extension.toLowerCase() === targetExtension) {
processedBuffer = resizedBuffer;
} else {
processedBuffer = await sharp(resizedBuffer).toFormat(appConfig.imageOutputType).toBuffer();
fileName = fileName.replace(new RegExp(path.extname(fileName) + '$'), targetExtension);
if (!path.extname(fileName)) {
fileName += targetExtension;
}
}
const downloadURL = await saveBufferToS3({
userId,
buffer: processedBuffer,
fileName,
basePath,
});
await fs.promises.unlink(inputFilePath);
const bytes = Buffer.byteLength(processedBuffer);
return { filepath: downloadURL, bytes, width, height };
} catch (error) {
logger.error('[uploadImageToS3] Error uploading image to S3:', error.message);
throw error;
}
}
/**
* Updates a file record and returns its signed URL.
*
* @param {import('express').Request} req - Express request.
* @param {Object} file - File metadata.
* @returns {Promise<[Promise<any>, string]>}
*/
async function prepareImageURLS3(req, file) {
try {
const updatePromise = updateFile({ file_id: file.file_id });
return Promise.all([updatePromise, file.filepath]);
} catch (error) {
logger.error('[prepareImageURLS3] Error preparing image URL:', error.message);
throw error;
}
}
/**
* Processes a user's avatar image by uploading it to S3 and updating the user's avatar URL if required.
*
* @param {Object} params
* @param {Buffer} params.buffer - Avatar image buffer.
* @param {string} params.userId - User's unique identifier.
* @param {string} params.manual - 'true' or 'false' flag for manual update.
* @param {string} [params.agentId] - Optional agent ID if this is an agent avatar.
* @param {string} [params.basePath='images'] - Base path in the bucket.
* @returns {Promise<string>} Signed URL of the uploaded avatar.
*/
async function processS3Avatar({ buffer, userId, manual, agentId, basePath = defaultBasePath }) {
try {
const metadata = await sharp(buffer).metadata();
const extension = metadata.format === 'gif' ? 'gif' : 'png';
const timestamp = new Date().getTime();
/** Unique filename with timestamp and optional agent ID */
const fileName = agentId
? `agent-${agentId}-avatar-${timestamp}.${extension}`
: `avatar-${timestamp}.${extension}`;
const downloadURL = await saveBufferToS3({ userId, buffer, fileName, basePath });
// Only update user record if this is a user avatar (manual === 'true')
if (manual === 'true' && !agentId) {
await updateUser(userId, { avatar: downloadURL });
}
return downloadURL;
} catch (error) {
logger.error('[processS3Avatar] Error processing S3 avatar:', error.message);
throw error;
}
}
module.exports = {
uploadImageToS3,
prepareImageURLS3,
processS3Avatar,
};

View file

@ -1,7 +0,0 @@
const crud = require('./crud');
const images = require('./images');
module.exports = {
...crud,
...images,
};

View file

@ -1,6 +1,13 @@
const { FileSources } = require('librechat-data-provider');
const {
getS3URL,
saveURLToS3,
parseDocument,
uploadFileToS3,
S3ImageService,
saveBufferToS3,
getS3FileStream,
deleteFileFromS3,
uploadMistralOCR,
uploadAzureMistralOCR,
uploadGoogleVertexMistralOCR,
@ -27,17 +34,18 @@ const {
processLocalAvatar,
getLocalFileStream,
} = require('./Local');
const {
getS3URL,
saveURLToS3,
saveBufferToS3,
getS3FileStream,
uploadImageToS3,
prepareImageURLS3,
deleteFileFromS3,
processS3Avatar,
uploadFileToS3,
} = require('./S3');
const { resizeImageBuffer } = require('./images/resize');
const { updateUser, updateFile } = require('~/models');
const s3ImageService = new S3ImageService({
resizeImageBuffer,
updateUser,
updateFile,
});
const uploadImageToS3 = (params) => s3ImageService.uploadImageToS3(params);
const prepareImageURLS3 = (_req, file) => s3ImageService.prepareImageURL(file);
const processS3Avatar = (params) => s3ImageService.processAvatar(params);
const {
saveBufferToAzure,
saveURLToAzure,

View file

@ -1,72 +0,0 @@
const { getS3URL } = require('../../../../../server/services/Files/S3/crud');
// Mock AWS SDK
jest.mock('@aws-sdk/client-s3', () => ({
S3Client: jest.fn(() => ({
send: jest.fn(),
})),
GetObjectCommand: jest.fn(),
}));
jest.mock('@aws-sdk/s3-request-presigner', () => ({
getSignedUrl: jest.fn(),
}));
jest.mock('../../../../../config', () => ({
logger: {
error: jest.fn(),
},
}));
const { getSignedUrl } = require('@aws-sdk/s3-request-presigner');
const { GetObjectCommand } = require('@aws-sdk/client-s3');
describe('S3 crud.js - test only new parameter changes', () => {
beforeEach(() => {
jest.clearAllMocks();
process.env.AWS_BUCKET_NAME = 'test-bucket';
});
// Test only the new customFilename parameter
it('should include customFilename in response headers when provided', async () => {
getSignedUrl.mockResolvedValue('https://test-presigned-url.com');
await getS3URL({
userId: 'user123',
fileName: 'test.pdf',
customFilename: 'cleaned_filename.pdf',
});
// Verify the new ResponseContentDisposition parameter is added to GetObjectCommand
const commandArgs = GetObjectCommand.mock.calls[0][0];
expect(commandArgs.ResponseContentDisposition).toBe(
'attachment; filename="cleaned_filename.pdf"',
);
});
// Test only the new contentType parameter
it('should include contentType in response headers when provided', async () => {
getSignedUrl.mockResolvedValue('https://test-presigned-url.com');
await getS3URL({
userId: 'user123',
fileName: 'test.pdf',
contentType: 'application/pdf',
});
// Verify the new ResponseContentType parameter is added to GetObjectCommand
const commandArgs = GetObjectCommand.mock.calls[0][0];
expect(commandArgs.ResponseContentType).toBe('application/pdf');
});
it('should work without new parameters (backward compatibility)', async () => {
getSignedUrl.mockResolvedValue('https://test-presigned-url.com');
const result = await getS3URL({
userId: 'user123',
fileName: 'test.pdf',
});
expect(result).toBe('https://test-presigned-url.com');
});
});

View file

@ -1,876 +0,0 @@
const fs = require('fs');
const fetch = require('node-fetch');
const { Readable } = require('stream');
const { FileSources } = require('librechat-data-provider');
const {
PutObjectCommand,
GetObjectCommand,
HeadObjectCommand,
DeleteObjectCommand,
} = require('@aws-sdk/client-s3');
const { getSignedUrl } = require('@aws-sdk/s3-request-presigner');
// Mock dependencies
jest.mock('fs');
jest.mock('node-fetch');
jest.mock('@aws-sdk/s3-request-presigner');
jest.mock('@aws-sdk/client-s3');
jest.mock('@librechat/api', () => ({
initializeS3: jest.fn(),
deleteRagFile: jest.fn().mockResolvedValue(undefined),
isEnabled: jest.fn((val) => val === 'true'),
}));
jest.mock('@librechat/data-schemas', () => ({
logger: {
debug: jest.fn(),
info: jest.fn(),
warn: jest.fn(),
error: jest.fn(),
},
}));
const { initializeS3, deleteRagFile } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
// Set env vars before requiring crud so module-level constants pick them up
process.env.AWS_BUCKET_NAME = 'test-bucket';
process.env.S3_URL_EXPIRY_SECONDS = '120';
const {
saveBufferToS3,
saveURLToS3,
getS3URL,
deleteFileFromS3,
uploadFileToS3,
getS3FileStream,
refreshS3FileUrls,
refreshS3Url,
needsRefresh,
getNewS3URL,
extractKeyFromS3Url,
} = require('~/server/services/Files/S3/crud');
describe('S3 CRUD Operations', () => {
let mockS3Client;
beforeEach(() => {
jest.clearAllMocks();
// Setup mock S3 client
mockS3Client = {
send: jest.fn(),
};
initializeS3.mockReturnValue(mockS3Client);
});
afterEach(() => {
delete process.env.S3_URL_EXPIRY_SECONDS;
delete process.env.S3_REFRESH_EXPIRY_MS;
delete process.env.AWS_BUCKET_NAME;
});
describe('saveBufferToS3', () => {
it('should upload a buffer to S3 and return a signed URL', async () => {
const mockBuffer = Buffer.from('test data');
const mockSignedUrl =
'https://s3.amazonaws.com/test-bucket/images/user123/test.jpg?signature=abc';
mockS3Client.send.mockResolvedValue({});
getSignedUrl.mockResolvedValue(mockSignedUrl);
const result = await saveBufferToS3({
userId: 'user123',
buffer: mockBuffer,
fileName: 'test.jpg',
basePath: 'images',
});
expect(mockS3Client.send).toHaveBeenCalledWith(expect.any(PutObjectCommand));
expect(result).toBe(mockSignedUrl);
});
it('should use default basePath if not provided', async () => {
const mockBuffer = Buffer.from('test data');
const mockSignedUrl =
'https://s3.amazonaws.com/test-bucket/images/user123/test.jpg?signature=abc';
mockS3Client.send.mockResolvedValue({});
getSignedUrl.mockResolvedValue(mockSignedUrl);
await saveBufferToS3({
userId: 'user123',
buffer: mockBuffer,
fileName: 'test.jpg',
});
expect(getSignedUrl).toHaveBeenCalled();
});
it('should handle S3 upload errors', async () => {
const mockBuffer = Buffer.from('test data');
const error = new Error('S3 upload failed');
mockS3Client.send.mockRejectedValue(error);
await expect(
saveBufferToS3({
userId: 'user123',
buffer: mockBuffer,
fileName: 'test.jpg',
}),
).rejects.toThrow('S3 upload failed');
expect(logger.error).toHaveBeenCalledWith(
'[saveBufferToS3] Error uploading buffer to S3:',
'S3 upload failed',
);
});
});
describe('getS3URL', () => {
it('should return a signed URL for a file', async () => {
const mockSignedUrl =
'https://s3.amazonaws.com/test-bucket/images/user123/file.pdf?signature=xyz';
getSignedUrl.mockResolvedValue(mockSignedUrl);
const result = await getS3URL({
userId: 'user123',
fileName: 'file.pdf',
basePath: 'documents',
});
expect(result).toBe(mockSignedUrl);
expect(getSignedUrl).toHaveBeenCalledWith(
mockS3Client,
expect.any(GetObjectCommand),
expect.objectContaining({ expiresIn: 120 }),
);
});
it('should add custom filename to Content-Disposition header', async () => {
const mockSignedUrl =
'https://s3.amazonaws.com/test-bucket/images/user123/file.pdf?signature=xyz';
getSignedUrl.mockResolvedValue(mockSignedUrl);
await getS3URL({
userId: 'user123',
fileName: 'file.pdf',
customFilename: 'custom-name.pdf',
});
expect(getSignedUrl).toHaveBeenCalled();
});
it('should add custom content type', async () => {
const mockSignedUrl =
'https://s3.amazonaws.com/test-bucket/images/user123/file.pdf?signature=xyz';
getSignedUrl.mockResolvedValue(mockSignedUrl);
await getS3URL({
userId: 'user123',
fileName: 'file.pdf',
contentType: 'application/pdf',
});
expect(getSignedUrl).toHaveBeenCalled();
});
it('should handle errors when getting signed URL', async () => {
const error = new Error('Failed to sign URL');
getSignedUrl.mockRejectedValue(error);
await expect(
getS3URL({
userId: 'user123',
fileName: 'file.pdf',
}),
).rejects.toThrow('Failed to sign URL');
expect(logger.error).toHaveBeenCalledWith(
'[getS3URL] Error getting signed URL from S3:',
'Failed to sign URL',
);
});
});
describe('saveURLToS3', () => {
it('should fetch a file from URL and save to S3', async () => {
const mockBuffer = Buffer.from('downloaded data');
const mockResponse = {
buffer: jest.fn().mockResolvedValue(mockBuffer),
};
const mockSignedUrl =
'https://s3.amazonaws.com/test-bucket/images/user123/downloaded.jpg?signature=abc';
fetch.mockResolvedValue(mockResponse);
mockS3Client.send.mockResolvedValue({});
getSignedUrl.mockResolvedValue(mockSignedUrl);
const result = await saveURLToS3({
userId: 'user123',
URL: 'https://example.com/image.jpg',
fileName: 'downloaded.jpg',
});
expect(fetch).toHaveBeenCalledWith('https://example.com/image.jpg');
expect(mockS3Client.send).toHaveBeenCalled();
expect(result).toBe(mockSignedUrl);
});
it('should handle fetch errors', async () => {
const error = new Error('Network error');
fetch.mockRejectedValue(error);
await expect(
saveURLToS3({
userId: 'user123',
URL: 'https://example.com/image.jpg',
fileName: 'downloaded.jpg',
}),
).rejects.toThrow('Network error');
expect(logger.error).toHaveBeenCalled();
});
});
describe('deleteFileFromS3', () => {
const mockReq = {
user: { id: 'user123' },
};
it('should delete a file from S3', async () => {
const mockFile = {
filepath: 'https://s3.amazonaws.com/test-bucket/images/user123/file.jpg',
file_id: 'file123',
};
// Mock HeadObject to verify file exists
mockS3Client.send
.mockResolvedValueOnce({}) // First HeadObject - exists
.mockResolvedValueOnce({}) // DeleteObject
.mockRejectedValueOnce({ name: 'NotFound' }); // Second HeadObject - deleted
await deleteFileFromS3(mockReq, mockFile);
expect(deleteRagFile).toHaveBeenCalledWith({ userId: 'user123', file: mockFile });
expect(mockS3Client.send).toHaveBeenCalledWith(expect.any(HeadObjectCommand));
expect(mockS3Client.send).toHaveBeenCalledWith(expect.any(DeleteObjectCommand));
});
it('should handle file not found gracefully', async () => {
const mockFile = {
filepath: 'https://s3.amazonaws.com/test-bucket/images/user123/nonexistent.jpg',
file_id: 'file123',
};
mockS3Client.send.mockRejectedValue({ name: 'NotFound' });
await deleteFileFromS3(mockReq, mockFile);
expect(logger.warn).toHaveBeenCalled();
});
it('should throw error if user ID does not match', async () => {
const mockFile = {
filepath: 'https://s3.amazonaws.com/test-bucket/images/different-user/file.jpg',
file_id: 'file123',
};
await expect(deleteFileFromS3(mockReq, mockFile)).rejects.toThrow('User ID mismatch');
expect(logger.error).toHaveBeenCalled();
});
it('should handle NoSuchKey error', async () => {
const mockFile = {
filepath: 'https://s3.amazonaws.com/test-bucket/images/user123/file.jpg',
file_id: 'file123',
};
mockS3Client.send
.mockResolvedValueOnce({}) // HeadObject - exists
.mockRejectedValueOnce({ code: 'NoSuchKey' }); // DeleteObject fails
await deleteFileFromS3(mockReq, mockFile);
expect(logger.debug).toHaveBeenCalled();
});
});
describe('uploadFileToS3', () => {
const mockReq = {
user: { id: 'user123' },
};
it('should upload a file from disk to S3', async () => {
const mockFile = {
path: '/tmp/upload.jpg',
originalname: 'photo.jpg',
};
const mockStats = { size: 1024 };
const mockSignedUrl =
'https://s3.amazonaws.com/test-bucket/images/user123/file123__photo.jpg?signature=xyz';
fs.promises = { stat: jest.fn().mockResolvedValue(mockStats) };
fs.createReadStream = jest.fn().mockReturnValue(new Readable());
mockS3Client.send.mockResolvedValue({});
getSignedUrl.mockResolvedValue(mockSignedUrl);
const result = await uploadFileToS3({
req: mockReq,
file: mockFile,
file_id: 'file123',
basePath: 'images',
});
expect(result).toEqual({
filepath: mockSignedUrl,
bytes: 1024,
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload.jpg');
expect(mockS3Client.send).toHaveBeenCalledWith(expect.any(PutObjectCommand));
});
it('should handle upload errors and clean up temp file', async () => {
const mockFile = {
path: '/tmp/upload.jpg',
originalname: 'photo.jpg',
};
const error = new Error('Upload failed');
fs.promises = {
stat: jest.fn().mockResolvedValue({ size: 1024 }),
unlink: jest.fn().mockResolvedValue(),
};
fs.createReadStream = jest.fn().mockReturnValue(new Readable());
mockS3Client.send.mockRejectedValue(error);
await expect(
uploadFileToS3({
req: mockReq,
file: mockFile,
file_id: 'file123',
}),
).rejects.toThrow('Upload failed');
expect(logger.error).toHaveBeenCalledWith(
'[uploadFileToS3] Error streaming file to S3:',
error,
);
});
});
describe('getS3FileStream', () => {
it('should return a readable stream for a file', async () => {
const mockStream = new Readable();
const mockResponse = { Body: mockStream };
mockS3Client.send.mockResolvedValue(mockResponse);
const result = await getS3FileStream(
{},
'https://s3.amazonaws.com/test-bucket/images/user123/file.pdf',
);
expect(result).toBe(mockStream);
expect(mockS3Client.send).toHaveBeenCalledWith(expect.any(GetObjectCommand));
});
it('should handle errors when retrieving stream', async () => {
const error = new Error('Stream error');
mockS3Client.send.mockRejectedValue(error);
await expect(getS3FileStream({}, 'images/user123/file.pdf')).rejects.toThrow('Stream error');
expect(logger.error).toHaveBeenCalled();
});
});
describe('needsRefresh', () => {
it('should return false for non-signed URLs', () => {
const url = 'https://example.com/proxy/file.jpg';
const result = needsRefresh(url, 3600);
expect(result).toBe(false);
});
it('should return true for expired signed URLs', () => {
const now = new Date();
const past = new Date(now.getTime() - 3600 * 1000); // 1 hour ago
const dateStr = past
.toISOString()
.replace(/[-:]/g, '')
.replace(/\.\d{3}/, '');
const url = `https://s3.amazonaws.com/bucket/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`;
const result = needsRefresh(url, 60);
expect(result).toBe(true);
});
it('should return false for URLs that are not close to expiration', () => {
const now = new Date();
const recent = new Date(now.getTime() - 10 * 1000); // 10 seconds ago
const dateStr = recent
.toISOString()
.replace(/[-:]/g, '')
.replace(/\.\d{3}/, '');
const url = `https://s3.amazonaws.com/bucket/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=7200`;
const result = needsRefresh(url, 60);
expect(result).toBe(false);
});
it('should use custom refresh expiry when S3_REFRESH_EXPIRY_MS is set', () => {
process.env.S3_REFRESH_EXPIRY_MS = '30000'; // 30 seconds
const now = new Date();
const recent = new Date(now.getTime() - 31 * 1000); // 31 seconds ago
const dateStr = recent
.toISOString()
.replace(/[-:]/g, '')
.replace(/\.\d{3}/, '');
const url = `https://s3.amazonaws.com/bucket/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=7200`;
// Need to reload the module to pick up the env var change
jest.resetModules();
const { needsRefresh: needsRefreshReloaded } = require('~/server/services/Files/S3/crud');
const result = needsRefreshReloaded(url, 60);
expect(result).toBe(true);
});
it('should return true for malformed URLs', () => {
const url = 'not-a-valid-url';
const result = needsRefresh(url, 3600);
expect(result).toBe(true);
});
});
describe('getNewS3URL', () => {
it('should generate a new URL from an existing S3 URL', async () => {
const currentURL =
'https://s3.amazonaws.com/test-bucket/images/user123/file.jpg?signature=old';
const newURL = 'https://s3.amazonaws.com/test-bucket/images/user123/file.jpg?signature=new';
getSignedUrl.mockResolvedValue(newURL);
const result = await getNewS3URL(currentURL);
expect(result).toBe(newURL);
expect(getSignedUrl).toHaveBeenCalled();
});
it('should return undefined for invalid URLs', async () => {
const result = await getNewS3URL('invalid-url');
expect(result).toBeUndefined();
});
it('should handle errors gracefully', async () => {
const currentURL = 'https://s3.amazonaws.com/test-bucket/images/user123/file.jpg';
getSignedUrl.mockRejectedValue(new Error('Failed'));
const result = await getNewS3URL(currentURL);
expect(result).toBeUndefined();
expect(logger.error).toHaveBeenCalledWith('Error getting new S3 URL:', expect.any(Error));
});
it('should construct GetObjectCommand with correct key (no bucket name duplication)', async () => {
const currentURL =
'https://s3.amazonaws.com/my-bucket/images/user123/file.jpg?X-Amz-Signature=old';
getSignedUrl.mockResolvedValue(
'https://s3.amazonaws.com/test-bucket/images/user123/file.jpg?signature=new',
);
await getNewS3URL(currentURL);
expect(GetObjectCommand).toHaveBeenCalledWith(
expect.objectContaining({ Key: 'images/user123/file.jpg' }),
);
});
});
describe('refreshS3FileUrls', () => {
it('should refresh expired URLs for multiple files', async () => {
const now = new Date();
const past = new Date(now.getTime() - 3600 * 1000);
const dateStr = past
.toISOString()
.replace(/[-:]/g, '')
.replace(/\.\d{3}/, '');
const files = [
{
file_id: 'file1',
source: FileSources.s3,
filepath: `https://s3.amazonaws.com/bucket/images/user123/file1.jpg?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
},
{
file_id: 'file2',
source: FileSources.s3,
filepath: `https://s3.amazonaws.com/bucket/images/user123/file2.jpg?X-Amz-Signature=def&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
},
];
const newURL1 = 'https://s3.amazonaws.com/bucket/images/user123/file1.jpg?signature=new1';
const newURL2 = 'https://s3.amazonaws.com/bucket/images/user123/file2.jpg?signature=new2';
getSignedUrl.mockResolvedValueOnce(newURL1).mockResolvedValueOnce(newURL2);
const mockBatchUpdate = jest.fn().mockResolvedValue();
const result = await refreshS3FileUrls(files, mockBatchUpdate, 60);
expect(result[0].filepath).toBe(newURL1);
expect(result[1].filepath).toBe(newURL2);
expect(mockBatchUpdate).toHaveBeenCalledWith([
{ file_id: 'file1', filepath: newURL1 },
{ file_id: 'file2', filepath: newURL2 },
]);
});
it('should skip non-S3 files', async () => {
const files = [
{
file_id: 'file1',
source: 'local',
filepath: '/local/path/file.jpg',
},
];
const mockBatchUpdate = jest.fn();
const result = await refreshS3FileUrls(files, mockBatchUpdate);
expect(result).toEqual(files);
expect(mockBatchUpdate).not.toHaveBeenCalled();
});
it('should handle empty or invalid input', async () => {
const mockBatchUpdate = jest.fn();
const result1 = await refreshS3FileUrls(null, mockBatchUpdate);
expect(result1).toBe(null);
const result2 = await refreshS3FileUrls([], mockBatchUpdate);
expect(result2).toEqual([]);
expect(mockBatchUpdate).not.toHaveBeenCalled();
});
it('should handle errors for individual files gracefully', async () => {
const now = new Date();
const past = new Date(now.getTime() - 3600 * 1000);
const dateStr = past
.toISOString()
.replace(/[-:]/g, '')
.replace(/\.\d{3}/, '');
const files = [
{
file_id: 'file1',
source: FileSources.s3,
filepath: `https://s3.amazonaws.com/bucket/images/user123/file1.jpg?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
},
];
getSignedUrl.mockRejectedValue(new Error('Failed to refresh'));
const mockBatchUpdate = jest.fn();
await refreshS3FileUrls(files, mockBatchUpdate, 60);
expect(logger.error).toHaveBeenCalledWith('Error getting new S3 URL:', expect.any(Error));
expect(mockBatchUpdate).not.toHaveBeenCalled();
});
});
describe('refreshS3Url', () => {
it('should refresh an expired S3 URL', async () => {
const now = new Date();
const past = new Date(now.getTime() - 3600 * 1000);
const dateStr = past
.toISOString()
.replace(/[-:]/g, '')
.replace(/\.\d{3}/, '');
const fileObj = {
source: FileSources.s3,
filepath: `https://s3.amazonaws.com/bucket/images/user123/file.jpg?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
};
const newURL = 'https://s3.amazonaws.com/bucket/images/user123/file.jpg?signature=new';
getSignedUrl.mockResolvedValue(newURL);
const result = await refreshS3Url(fileObj, 60);
expect(result).toBe(newURL);
});
it('should return original URL if not expired', async () => {
const fileObj = {
source: FileSources.s3,
filepath: 'https://example.com/proxy/file.jpg',
};
const result = await refreshS3Url(fileObj, 3600);
expect(result).toBe(fileObj.filepath);
expect(getSignedUrl).not.toHaveBeenCalled();
});
it('should return empty string for null input', async () => {
const result = await refreshS3Url(null);
expect(result).toBe('');
});
it('should return original URL for non-S3 files', async () => {
const fileObj = {
source: 'local',
filepath: '/local/path/file.jpg',
};
const result = await refreshS3Url(fileObj);
expect(result).toBe(fileObj.filepath);
});
it('should handle errors and return original URL', async () => {
const now = new Date();
const past = new Date(now.getTime() - 3600 * 1000);
const dateStr = past
.toISOString()
.replace(/[-:]/g, '')
.replace(/\.\d{3}/, '');
const fileObj = {
source: FileSources.s3,
filepath: `https://s3.amazonaws.com/bucket/images/user123/file.jpg?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
};
getSignedUrl.mockRejectedValue(new Error('Refresh failed'));
const result = await refreshS3Url(fileObj, 60);
expect(result).toBe(fileObj.filepath);
expect(logger.error).toHaveBeenCalled();
});
});
describe('extractKeyFromS3Url', () => {
it('should extract key from a full S3 URL', () => {
const url = 'https://s3.amazonaws.com/test-bucket/images/user123/file.jpg';
const result = extractKeyFromS3Url(url);
expect(result).toBe('images/user123/file.jpg');
});
it('should extract key from a signed S3 URL with query parameters', () => {
const url =
'https://s3.amazonaws.com/test-bucket/documents/user456/report.pdf?X-Amz-Signature=abc123&X-Amz-Date=20260107';
const result = extractKeyFromS3Url(url);
expect(result).toBe('documents/user456/report.pdf');
});
it('should extract key from S3 URL with different domain format', () => {
const url = 'https://test-bucket.s3.amazonaws.com/uploads/user789/image.png';
const result = extractKeyFromS3Url(url);
expect(result).toBe('uploads/user789/image.png');
});
it('should return key as-is if already properly formatted (3+ parts, no http)', () => {
const key = 'images/user123/file.jpg';
const result = extractKeyFromS3Url(key);
expect(result).toBe('images/user123/file.jpg');
});
it('should handle key with leading slash by removing it', () => {
const key = '/images/user123/file.jpg';
const result = extractKeyFromS3Url(key);
expect(result).toBe('images/user123/file.jpg');
});
it('should handle simple key without slashes', () => {
const key = 'simple-file.txt';
const result = extractKeyFromS3Url(key);
expect(result).toBe('simple-file.txt');
});
it('should handle key with only two parts', () => {
const key = 'folder/file.txt';
const result = extractKeyFromS3Url(key);
expect(result).toBe('folder/file.txt');
});
it('should throw error for empty input', () => {
expect(() => extractKeyFromS3Url('')).toThrow('Invalid input: URL or key is empty');
});
it('should throw error for null input', () => {
expect(() => extractKeyFromS3Url(null)).toThrow('Invalid input: URL or key is empty');
});
it('should throw error for undefined input', () => {
expect(() => extractKeyFromS3Url(undefined)).toThrow('Invalid input: URL or key is empty');
});
it('should handle URLs with encoded characters', () => {
const url = 'https://s3.amazonaws.com/test-bucket/images/user123/my%20file%20name.jpg';
const result = extractKeyFromS3Url(url);
expect(result).toBe('images/user123/my%20file%20name.jpg');
});
it('should handle deep nested paths', () => {
const url = 'https://s3.amazonaws.com/bucket/a/b/c/d/e/f/file.jpg';
const result = extractKeyFromS3Url(url);
expect(result).toBe('a/b/c/d/e/f/file.jpg');
});
it('should log debug message when extracting from URL', () => {
const url = 'https://s3.amazonaws.com/bucket/images/user123/file.jpg';
extractKeyFromS3Url(url);
expect(logger.debug).toHaveBeenCalledWith(
expect.stringContaining('[extractKeyFromS3Url] fileUrlOrKey:'),
);
});
it('should log fallback debug message for non-URL input', () => {
const key = 'simple-file.txt';
extractKeyFromS3Url(key);
expect(logger.debug).toHaveBeenCalledWith(
expect.stringContaining('[extractKeyFromS3Url] FALLBACK'),
);
});
it('should handle valid URLs that contain only a bucket', () => {
const url = 'https://s3.amazonaws.com/test-bucket/';
const result = extractKeyFromS3Url(url);
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining(
'[extractKeyFromS3Url] Extracted key is empty after removing bucket name from URL: https://s3.amazonaws.com/test-bucket/',
),
);
expect(result).toBe('');
});
it('should handle invalid URLs that contain only a bucket', () => {
const url = 'https://s3.amazonaws.com/test-bucket';
const result = extractKeyFromS3Url(url);
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining(
'[extractKeyFromS3Url] Unable to extract key from path-style URL: https://s3.amazonaws.com/test-bucket',
),
);
expect(result).toBe('');
});
// https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html
// Path-style requests
// https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html#path-style-access
// https://s3.region-code.amazonaws.com/bucket-name/key-name
it('should handle formatted according to Path-style regional endpoint', () => {
const url = 'https://s3.us-west-2.amazonaws.com/amzn-s3-demo-bucket1/dogs/puppy.jpg';
const result = extractKeyFromS3Url(url);
expect(result).toBe('dogs/puppy.jpg');
});
// virtual host style
// https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html#virtual-hosted-style-access
// https://bucket-name.s3.region-code.amazonaws.com/key-name
it('should handle formatted according to Virtual-hostedstyle Regional endpoint', () => {
const url = 'https://amzn-s3-demo-bucket1.s3.us-west-2.amazonaws.com/dogs/puppy.png';
const result = extractKeyFromS3Url(url);
expect(result).toBe('dogs/puppy.png');
});
// Legacy endpoints
// https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html#VirtualHostingBackwardsCompatibility
// s3Region
// https://bucket-name.s3-region-code.amazonaws.com
it('should handle formatted according to s3Region', () => {
const url = 'https://amzn-s3-demo-bucket1.s3-us-west-2.amazonaws.com/puppy.png';
const result = extractKeyFromS3Url(url);
expect(result).toBe('puppy.png');
const testcase2 = 'https://amzn-s3-demo-bucket1.s3-us-west-2.amazonaws.com/cats/kitten.png';
const result2 = extractKeyFromS3Url(testcase2);
expect(result2).toBe('cats/kitten.png');
});
// Legacy global endpoint
// bucket-name.s3.amazonaws.com
it('should handle formatted according to Legacy global endpoint', () => {
const url = 'https://amzn-s3-demo-bucket1.s3.amazonaws.com/dogs/puppy.png';
const result = extractKeyFromS3Url(url);
expect(result).toBe('dogs/puppy.png');
});
it('should handle malformed URL and log error', () => {
const malformedUrl = 'https://invalid url with spaces.com/key';
const result = extractKeyFromS3Url(malformedUrl);
expect(logger.error).toHaveBeenCalledWith(
expect.stringContaining('[extractKeyFromS3Url] Error parsing URL:'),
);
expect(logger.error).toHaveBeenCalledWith(expect.stringContaining(malformedUrl));
expect(result).toBe(malformedUrl);
});
it('should return empty string for regional path-style URL with only bucket (no key)', () => {
const url = 'https://s3.us-west-2.amazonaws.com/my-bucket';
const result = extractKeyFromS3Url(url);
expect(result).toBe('');
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining('[extractKeyFromS3Url] Unable to extract key from path-style URL:'),
);
});
it('should not log error when given a plain S3 key (non-URL input)', () => {
extractKeyFromS3Url('images/user123/file.jpg');
expect(logger.error).not.toHaveBeenCalled();
});
it('should strip bucket from custom endpoint URLs (MinIO, R2, etc.) using bucketName', () => {
// bucketName is the module-level const 'test-bucket', set before require at top of file
expect(
extractKeyFromS3Url('https://minio.example.com/test-bucket/images/user123/file.jpg'),
).toBe('images/user123/file.jpg');
expect(
extractKeyFromS3Url(
'https://abc123.r2.cloudflarestorage.com/test-bucket/images/user123/avatar.png',
),
).toBe('images/user123/avatar.png');
});
it('should use endpoint base path when AWS_ENDPOINT_URL and AWS_FORCE_PATH_STYLE are set', () => {
process.env.AWS_BUCKET_NAME = 'test-bucket';
process.env.AWS_ENDPOINT_URL = 'https://minio.example.com';
process.env.AWS_FORCE_PATH_STYLE = 'true';
jest.resetModules();
const { extractKeyFromS3Url: fn } = require('~/server/services/Files/S3/crud');
expect(fn('https://minio.example.com/test-bucket/images/user123/file.jpg')).toBe(
'images/user123/file.jpg',
);
delete process.env.AWS_ENDPOINT_URL;
delete process.env.AWS_FORCE_PATH_STYLE;
});
it('should handle endpoint with a base path', () => {
process.env.AWS_BUCKET_NAME = 'test-bucket';
process.env.AWS_ENDPOINT_URL = 'https://example.com/storage/';
process.env.AWS_FORCE_PATH_STYLE = 'true';
jest.resetModules();
const { extractKeyFromS3Url: fn } = require('~/server/services/Files/S3/crud');
expect(fn('https://example.com/storage/test-bucket/images/user123/file.jpg')).toBe(
'images/user123/file.jpg',
);
delete process.env.AWS_ENDPOINT_URL;
delete process.env.AWS_FORCE_PATH_STYLE;
});
});
});

206
package-lock.json generated
View file

@ -310,44 +310,6 @@
"url": "https://github.com/sponsors/panva"
}
},
"api/node_modules/sharp": {
"version": "0.33.5",
"resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz",
"integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==",
"hasInstallScript": true,
"dependencies": {
"color": "^4.2.3",
"detect-libc": "^2.0.3",
"semver": "^7.6.3"
},
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
"funding": {
"url": "https://opencollective.com/libvips"
},
"optionalDependencies": {
"@img/sharp-darwin-arm64": "0.33.5",
"@img/sharp-darwin-x64": "0.33.5",
"@img/sharp-libvips-darwin-arm64": "1.0.4",
"@img/sharp-libvips-darwin-x64": "1.0.4",
"@img/sharp-libvips-linux-arm": "1.0.5",
"@img/sharp-libvips-linux-arm64": "1.0.4",
"@img/sharp-libvips-linux-s390x": "1.0.4",
"@img/sharp-libvips-linux-x64": "1.0.4",
"@img/sharp-libvips-linuxmusl-arm64": "1.0.4",
"@img/sharp-libvips-linuxmusl-x64": "1.0.4",
"@img/sharp-linux-arm": "0.33.5",
"@img/sharp-linux-arm64": "0.33.5",
"@img/sharp-linux-s390x": "0.33.5",
"@img/sharp-linux-x64": "0.33.5",
"@img/sharp-linuxmusl-arm64": "0.33.5",
"@img/sharp-linuxmusl-x64": "0.33.5",
"@img/sharp-wasm32": "0.33.5",
"@img/sharp-win32-ia32": "0.33.5",
"@img/sharp-win32-x64": "0.33.5"
}
},
"api/node_modules/winston-daily-rotate-file": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/winston-daily-rotate-file/-/winston-daily-rotate-file-5.0.0.tgz",
@ -19520,6 +19482,34 @@
"@sinonjs/commons": "^3.0.1"
}
},
"node_modules/@sinonjs/samsam": {
"version": "8.0.3",
"resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-8.0.3.tgz",
"integrity": "sha512-hw6HbX+GyVZzmaYNh82Ecj1vdGZrqVIn/keDTg63IgAwiQPO+xCz99uG6Woqgb4tM0mUiFENKZ4cqd7IX94AXQ==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"@sinonjs/commons": "^3.0.1",
"type-detect": "^4.1.0"
}
},
"node_modules/@sinonjs/samsam/node_modules/type-detect": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.1.0.tgz",
"integrity": "sha512-Acylog8/luQ8L7il+geoSxhEkazvkslg7PSNKOX59mbB9cOveP5aq9h74Y7YU8yDpJwetzQQrfIwtf4Wp4LKcw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/@sinonjs/text-encoding": {
"version": "0.7.3",
"resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.3.tgz",
"integrity": "sha512-DE427ROAphMQzU4ENbliGYrBSYPXF+TtLg9S8vzeA+OF4ZKzoDdzfL8sxuMUGS/lgRhM6j1URSk9ghf7Xo1tyA==",
"dev": true,
"license": "(Unlicense OR Apache-2.0)"
},
"node_modules/@smithy/abort-controller": {
"version": "4.2.8",
"resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.8.tgz",
@ -21483,6 +21473,23 @@
"@types/send": "*"
}
},
"node_modules/@types/sinon": {
"version": "17.0.4",
"resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-17.0.4.tgz",
"integrity": "sha512-RHnIrhfPO3+tJT0s7cFaXGZvsL4bbR3/k7z3P312qMS4JaS2Tk+KiwiLx1S0rQ56ERj00u1/BtdyVd0FY+Pdew==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/sinonjs__fake-timers": "*"
}
},
"node_modules/@types/sinonjs__fake-timers": {
"version": "15.0.1",
"resolved": "https://registry.npmjs.org/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-15.0.1.tgz",
"integrity": "sha512-Ko2tjWJq8oozHzHV+reuvS5KYIRAokHnGbDwGh/J64LntgpbuylF74ipEL24HCyRjf9FOlBiBHWBR1RlVKsI1w==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/stack-utils": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz",
@ -22762,6 +22769,18 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/aws-sdk-client-mock": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/aws-sdk-client-mock/-/aws-sdk-client-mock-4.1.0.tgz",
"integrity": "sha512-h/tOYTkXEsAcV3//6C1/7U4ifSpKyJvb6auveAepqqNJl6TdZaPFEtKjBQNf8UxQdDP850knB2i/whq4zlsxJw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/sinon": "^17.0.3",
"sinon": "^18.0.1",
"tslib": "^2.1.0"
}
},
"node_modules/axe-core": {
"version": "4.10.2",
"resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.10.2.tgz",
@ -31639,6 +31658,13 @@
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==",
"license": "MIT"
},
"node_modules/just-extend": {
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/just-extend/-/just-extend-6.2.0.tgz",
"integrity": "sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw==",
"dev": true,
"license": "MIT"
},
"node_modules/jwa": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.1.tgz",
@ -34625,6 +34651,30 @@
"resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.1.0.tgz",
"integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ=="
},
"node_modules/nise": {
"version": "6.1.1",
"resolved": "https://registry.npmjs.org/nise/-/nise-6.1.1.tgz",
"integrity": "sha512-aMSAzLVY7LyeM60gvBS423nBmIPP+Wy7St7hsb+8/fc1HmeoHJfLO8CKse4u3BtOZvQLJghYPI2i/1WZrEj5/g==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"@sinonjs/commons": "^3.0.1",
"@sinonjs/fake-timers": "^13.0.1",
"@sinonjs/text-encoding": "^0.7.3",
"just-extend": "^6.2.0",
"path-to-regexp": "^8.1.0"
}
},
"node_modules/nise/node_modules/@sinonjs/fake-timers": {
"version": "13.0.5",
"resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-13.0.5.tgz",
"integrity": "sha512-36/hTbH2uaWuGVERyC6da9YwGWnzUZXuPro/F2LfsdOsLnCojz/iSH8MxUt/FD2S5XBSVPhmArFUXcpCQ2Hkiw==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"@sinonjs/commons": "^3.0.1"
}
},
"node_modules/node-domexception": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
@ -40188,6 +40238,45 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/sharp": {
"version": "0.33.5",
"resolved": "https://registry.npmjs.org/sharp/-/sharp-0.33.5.tgz",
"integrity": "sha512-haPVm1EkS9pgvHrQ/F3Xy+hgcuMV0Wm9vfIBSiwZ05k+xgb0PkBQpGsAA/oWdDobNaZTH5ppvHtzCFbnSEwHVw==",
"hasInstallScript": true,
"license": "Apache-2.0",
"dependencies": {
"color": "^4.2.3",
"detect-libc": "^2.0.3",
"semver": "^7.6.3"
},
"engines": {
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
},
"funding": {
"url": "https://opencollective.com/libvips"
},
"optionalDependencies": {
"@img/sharp-darwin-arm64": "0.33.5",
"@img/sharp-darwin-x64": "0.33.5",
"@img/sharp-libvips-darwin-arm64": "1.0.4",
"@img/sharp-libvips-darwin-x64": "1.0.4",
"@img/sharp-libvips-linux-arm": "1.0.5",
"@img/sharp-libvips-linux-arm64": "1.0.4",
"@img/sharp-libvips-linux-s390x": "1.0.4",
"@img/sharp-libvips-linux-x64": "1.0.4",
"@img/sharp-libvips-linuxmusl-arm64": "1.0.4",
"@img/sharp-libvips-linuxmusl-x64": "1.0.4",
"@img/sharp-linux-arm": "0.33.5",
"@img/sharp-linux-arm64": "0.33.5",
"@img/sharp-linux-s390x": "0.33.5",
"@img/sharp-linux-x64": "0.33.5",
"@img/sharp-linuxmusl-arm64": "0.33.5",
"@img/sharp-linuxmusl-x64": "0.33.5",
"@img/sharp-wasm32": "0.33.5",
"@img/sharp-win32-ia32": "0.33.5",
"@img/sharp-win32-x64": "0.33.5"
}
},
"node_modules/shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
@ -40321,6 +40410,45 @@
"integrity": "sha512-xMO/8eNREtaROt7tJvWJqHBDTMFN4eiQ5I4JRMuilwfnFcV5W9u7RUkueNkdw0jPqGMX36iCywelS5yilTuOxg==",
"license": "MIT"
},
"node_modules/sinon": {
"version": "18.0.1",
"resolved": "https://registry.npmjs.org/sinon/-/sinon-18.0.1.tgz",
"integrity": "sha512-a2N2TDY1uGviajJ6r4D1CyRAkzE9NNVlYOV1wX5xQDuAk0ONgzgRl0EjCQuRCPxOwp13ghsMwt9Gdldujs39qw==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"@sinonjs/commons": "^3.0.1",
"@sinonjs/fake-timers": "11.2.2",
"@sinonjs/samsam": "^8.0.0",
"diff": "^5.2.0",
"nise": "^6.0.0",
"supports-color": "^7"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/sinon"
}
},
"node_modules/sinon/node_modules/@sinonjs/fake-timers": {
"version": "11.2.2",
"resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-11.2.2.tgz",
"integrity": "sha512-G2piCSxQ7oWOxwGSAyFHfPIsyeJGXYtc6mFbnFA+kRXkiEnTl8c/8jul2S329iFBnDI9HGoeWWAZvuvOkZccgw==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"@sinonjs/commons": "^3.0.0"
}
},
"node_modules/sinon/node_modules/diff": {
"version": "5.2.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-5.2.2.tgz",
"integrity": "sha512-vtcDfH3TOjP8UekytvnHH1o1P4FcUdt4eQ1Y+Abap1tk/OB2MWQvcwS2ClCd1zuIhc3JKOx6p3kod8Vfys3E+A==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.3.1"
}
},
"node_modules/slash": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz",
@ -44182,6 +44310,7 @@
"@types/node-fetch": "^2.6.13",
"@types/react": "^18.2.18",
"@types/winston": "^2.4.4",
"aws-sdk-client-mock": "^4.1.0",
"jest": "^30.2.0",
"jest-junit": "^16.0.0",
"librechat-data-provider": "*",
@ -44232,6 +44361,7 @@
"node-fetch": "2.7.0",
"pdfjs-dist": "^5.4.624",
"rate-limit-redis": "^4.2.0",
"sharp": "^0.33.5",
"undici": "^7.18.2",
"zod": "^3.22.4"
}

View file

@ -25,6 +25,7 @@
"test:cache-integration:mcp": "jest --testPathPatterns=\"src/mcp/.*\\.cache_integration\\.spec\\.ts$\" --coverage=false",
"test:cache-integration:stream": "jest --testPathPatterns=\"src/stream/.*\\.stream_integration\\.spec\\.ts$\" --coverage=false --runInBand --forceExit",
"test:cache-integration": "npm run test:cache-integration:core && npm run test:cache-integration:cluster && npm run test:cache-integration:mcp && npm run test:cache-integration:stream",
"test:s3-integration": "jest --testPathPatterns=\"src/storage/s3/.*\\.s3_integration\\.spec\\.ts$\" --coverage=false --runInBand",
"verify": "npm run test:ci",
"b:clean": "bun run rimraf dist",
"b:build": "bun run b:clean && bun run rollup -c --silent --bundleConfigAsCjs",
@ -64,6 +65,7 @@
"@types/node-fetch": "^2.6.13",
"@types/react": "^18.2.18",
"@types/winston": "^2.4.4",
"aws-sdk-client-mock": "^4.1.0",
"jest": "^30.2.0",
"jest-junit": "^16.0.0",
"librechat-data-provider": "*",
@ -117,6 +119,7 @@
"node-fetch": "2.7.0",
"pdfjs-dist": "^5.4.624",
"rate-limit-redis": "^4.2.0",
"sharp": "^0.33.5",
"undici": "^7.18.2",
"zod": "^3.22.4"
}

View file

@ -101,6 +101,14 @@ describe('initializeS3', () => {
);
});
it('should throw when AWS_BUCKET_NAME is not set', async () => {
delete process.env.AWS_BUCKET_NAME;
const { initializeS3 } = await load();
expect(() => initializeS3()).toThrow(
'[S3] AWS_BUCKET_NAME environment variable is required for S3 operations.',
);
});
it('should return the same instance on subsequent calls', async () => {
const { MockS3Client, initializeS3 } = await load();
const first = initializeS3();

View file

@ -25,6 +25,13 @@ export const initializeS3 = (): S3Client | null => {
return null;
}
if (!process.env.AWS_BUCKET_NAME) {
throw new Error(
'[S3] AWS_BUCKET_NAME environment variable is required for S3 operations. ' +
'Please set this environment variable to enable S3 storage.',
);
}
// Read the custom endpoint if provided.
const endpoint = process.env.AWS_ENDPOINT_URL;
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;

View file

@ -37,6 +37,8 @@ export * from './prompts';
export * from './endpoints';
/* Files */
export * from './files';
/* Storage */
export * from './storage';
/* Tools */
export * from './tools';
/* web search */

View file

@ -0,0 +1,2 @@
export * from './s3';
export * from './types';

View file

@ -0,0 +1,770 @@
import fs from 'fs';
import { Readable } from 'stream';
import { mockClient } from 'aws-sdk-client-mock';
import { sdkStreamMixin } from '@smithy/util-stream';
import { FileSources } from 'librechat-data-provider';
import {
S3Client,
PutObjectCommand,
GetObjectCommand,
HeadObjectCommand,
DeleteObjectCommand,
} from '@aws-sdk/client-s3';
import type { TFile } from 'librechat-data-provider';
import type { S3FileRef } from '~/storage/types';
import type { ServerRequest } from '~/types';
const s3Mock = mockClient(S3Client);
jest.mock('fs', () => ({
...jest.requireActual('fs'),
promises: {
stat: jest.fn(),
unlink: jest.fn(),
},
createReadStream: jest.fn(),
}));
jest.mock('@aws-sdk/s3-request-presigner', () => ({
getSignedUrl: jest.fn().mockResolvedValue('https://bucket.s3.amazonaws.com/test-key?signed=true'),
}));
jest.mock('~/files', () => ({
deleteRagFile: jest.fn().mockResolvedValue(undefined),
}));
jest.mock('@librechat/data-schemas', () => ({
logger: {
debug: jest.fn(),
info: jest.fn(),
warn: jest.fn(),
error: jest.fn(),
},
}));
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import { deleteRagFile } from '~/files';
import { logger } from '@librechat/data-schemas';
describe('S3 CRUD', () => {
let originalEnv: NodeJS.ProcessEnv;
beforeAll(() => {
originalEnv = { ...process.env };
process.env.AWS_REGION = 'us-east-1';
process.env.AWS_BUCKET_NAME = 'test-bucket';
process.env.S3_URL_EXPIRY_SECONDS = '120';
});
afterAll(() => {
process.env = originalEnv;
});
beforeEach(() => {
s3Mock.reset();
s3Mock.on(PutObjectCommand).resolves({});
s3Mock.on(DeleteObjectCommand).resolves({});
const stream = new Readable();
stream.push('test content');
stream.push(null);
const sdkStream = sdkStreamMixin(stream);
s3Mock.on(GetObjectCommand).resolves({ Body: sdkStream });
jest.clearAllMocks();
});
describe('getS3Key', () => {
it('constructs key from basePath, userId, and fileName', async () => {
const { getS3Key } = await import('../crud');
const key = getS3Key('images', 'user123', 'file.png');
expect(key).toBe('images/user123/file.png');
});
it('handles nested file names', async () => {
const { getS3Key } = await import('../crud');
const key = getS3Key('files', 'user456', 'folder/subfolder/doc.pdf');
expect(key).toBe('files/user456/folder/subfolder/doc.pdf');
});
it('throws if basePath contains a slash', async () => {
const { getS3Key } = await import('../crud');
expect(() => getS3Key('a/b', 'user123', 'file.png')).toThrow(
'[getS3Key] basePath must not contain slashes: "a/b"',
);
});
});
describe('saveBufferToS3', () => {
it('uploads buffer and returns signed URL', async () => {
const { saveBufferToS3 } = await import('../crud');
const result = await saveBufferToS3({
userId: 'user123',
buffer: Buffer.from('test'),
fileName: 'test.txt',
basePath: 'files',
});
expect(result).toContain('signed=true');
expect(s3Mock.commandCalls(PutObjectCommand)).toHaveLength(1);
});
it('calls PutObjectCommand with correct parameters', async () => {
const { saveBufferToS3 } = await import('../crud');
await saveBufferToS3({
userId: 'user123',
buffer: Buffer.from('test content'),
fileName: 'document.pdf',
basePath: 'documents',
});
const calls = s3Mock.commandCalls(PutObjectCommand);
expect(calls[0].args[0].input).toEqual({
Bucket: 'test-bucket',
Key: 'documents/user123/document.pdf',
Body: Buffer.from('test content'),
});
});
it('uses default basePath if not provided', async () => {
const { saveBufferToS3 } = await import('../crud');
await saveBufferToS3({
userId: 'user123',
buffer: Buffer.from('test'),
fileName: 'test.txt',
});
const calls = s3Mock.commandCalls(PutObjectCommand);
expect(calls[0].args[0].input.Key).toBe('images/user123/test.txt');
});
it('handles S3 upload errors', async () => {
s3Mock.on(PutObjectCommand).rejects(new Error('S3 upload failed'));
const { saveBufferToS3 } = await import('../crud');
await expect(
saveBufferToS3({
userId: 'user123',
buffer: Buffer.from('test'),
fileName: 'test.txt',
}),
).rejects.toThrow('S3 upload failed');
expect(logger.error).toHaveBeenCalledWith(
'[saveBufferToS3] Error uploading buffer to S3:',
'S3 upload failed',
);
});
});
describe('getS3URL', () => {
it('returns signed URL', async () => {
const { getS3URL } = await import('../crud');
const result = await getS3URL({
userId: 'user123',
fileName: 'test.txt',
basePath: 'files',
});
expect(result).toContain('signed=true');
});
it('adds custom filename to Content-Disposition header', async () => {
const { getS3URL } = await import('../crud');
await getS3URL({
userId: 'user123',
fileName: 'test.pdf',
customFilename: 'custom-name.pdf',
});
expect(getSignedUrl).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
input: expect.objectContaining({
ResponseContentDisposition: 'attachment; filename="custom-name.pdf"',
}),
}),
expect.anything(),
);
});
it('adds custom content type', async () => {
const { getS3URL } = await import('../crud');
await getS3URL({
userId: 'user123',
fileName: 'test.pdf',
contentType: 'application/pdf',
});
expect(getSignedUrl).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
input: expect.objectContaining({
ResponseContentType: 'application/pdf',
}),
}),
expect.anything(),
);
});
it('handles errors when getting signed URL', async () => {
(getSignedUrl as jest.Mock).mockRejectedValueOnce(new Error('Failed to sign URL'));
const { getS3URL } = await import('../crud');
await expect(
getS3URL({
userId: 'user123',
fileName: 'file.pdf',
}),
).rejects.toThrow('Failed to sign URL');
expect(logger.error).toHaveBeenCalledWith(
'[getS3URL] Error getting signed URL from S3:',
'Failed to sign URL',
);
});
});
describe('saveURLToS3', () => {
beforeEach(() => {
global.fetch = jest.fn().mockResolvedValue({
ok: true,
arrayBuffer: jest.fn().mockResolvedValue(new ArrayBuffer(8)),
}) as unknown as typeof fetch;
});
it('fetches file from URL and saves to S3', async () => {
const { saveURLToS3 } = await import('../crud');
const result = await saveURLToS3({
userId: 'user123',
URL: 'https://example.com/image.jpg',
fileName: 'downloaded.jpg',
});
expect(global.fetch).toHaveBeenCalledWith('https://example.com/image.jpg');
expect(s3Mock.commandCalls(PutObjectCommand)).toHaveLength(1);
expect(result).toContain('signed=true');
});
it('throws error on non-ok response', async () => {
(global.fetch as unknown as jest.Mock).mockResolvedValueOnce({
ok: false,
status: 404,
statusText: 'Not Found',
arrayBuffer: jest.fn().mockResolvedValue(new ArrayBuffer(0)),
});
const { saveURLToS3 } = await import('../crud');
await expect(
saveURLToS3({
userId: 'user123',
URL: 'https://example.com/missing.jpg',
fileName: 'missing.jpg',
}),
).rejects.toThrow('Failed to fetch URL');
});
it('handles fetch errors', async () => {
(global.fetch as unknown as jest.Mock).mockRejectedValueOnce(new Error('Network error'));
const { saveURLToS3 } = await import('../crud');
await expect(
saveURLToS3({
userId: 'user123',
URL: 'https://example.com/image.jpg',
fileName: 'downloaded.jpg',
}),
).rejects.toThrow('Network error');
expect(logger.error).toHaveBeenCalled();
});
});
describe('deleteFileFromS3', () => {
const mockReq = { user: { id: 'user123' } } as ServerRequest;
it('deletes a file from S3', async () => {
const mockFile = {
filepath: 'https://bucket.s3.amazonaws.com/images/user123/file.jpg',
file_id: 'file123',
} as TFile;
s3Mock.on(HeadObjectCommand).resolvesOnce({});
const { deleteFileFromS3 } = await import('../crud');
await deleteFileFromS3(mockReq, mockFile);
expect(deleteRagFile).toHaveBeenCalledWith({ userId: 'user123', file: mockFile });
expect(s3Mock.commandCalls(HeadObjectCommand)).toHaveLength(1);
expect(s3Mock.commandCalls(DeleteObjectCommand)).toHaveLength(1);
});
it('handles file not found gracefully and cleans up RAG', async () => {
const mockFile = {
filepath: 'https://bucket.s3.amazonaws.com/images/user123/nonexistent.jpg',
file_id: 'file123',
} as TFile;
s3Mock.on(HeadObjectCommand).rejects({ name: 'NotFound' });
const { deleteFileFromS3 } = await import('../crud');
await deleteFileFromS3(mockReq, mockFile);
expect(logger.warn).toHaveBeenCalled();
expect(deleteRagFile).toHaveBeenCalledWith({ userId: 'user123', file: mockFile });
expect(s3Mock.commandCalls(DeleteObjectCommand)).toHaveLength(0);
});
it('throws error if user ID does not match', async () => {
const mockFile = {
filepath: 'https://bucket.s3.amazonaws.com/images/different-user/file.jpg',
file_id: 'file123',
} as TFile;
const { deleteFileFromS3 } = await import('../crud');
await expect(deleteFileFromS3(mockReq, mockFile)).rejects.toThrow('User ID mismatch');
expect(logger.error).toHaveBeenCalled();
});
it('handles NoSuchKey error without calling deleteRagFile', async () => {
const mockFile = {
filepath: 'https://bucket.s3.amazonaws.com/images/user123/file.jpg',
file_id: 'file123',
} as TFile;
s3Mock.on(HeadObjectCommand).resolvesOnce({});
const noSuchKeyError = Object.assign(new Error('NoSuchKey'), { name: 'NoSuchKey' });
s3Mock.on(DeleteObjectCommand).rejects(noSuchKeyError);
const { deleteFileFromS3 } = await import('../crud');
await expect(deleteFileFromS3(mockReq, mockFile)).resolves.toBeUndefined();
expect(deleteRagFile).not.toHaveBeenCalled();
});
});
describe('uploadFileToS3', () => {
const mockReq = { user: { id: 'user123' } } as ServerRequest;
it('uploads a file from disk to S3', async () => {
const mockFile = {
path: '/tmp/upload.jpg',
originalname: 'photo.jpg',
} as Express.Multer.File;
(fs.promises.stat as jest.Mock).mockResolvedValue({ size: 1024 });
(fs.createReadStream as jest.Mock).mockReturnValue(new Readable());
const { uploadFileToS3 } = await import('../crud');
const result = await uploadFileToS3({
req: mockReq,
file: mockFile,
file_id: 'file123',
basePath: 'images',
});
expect(result).toEqual({
filepath: expect.stringContaining('signed=true'),
bytes: 1024,
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload.jpg');
expect(s3Mock.commandCalls(PutObjectCommand)).toHaveLength(1);
expect(fs.promises.unlink).not.toHaveBeenCalled();
});
it('handles upload errors and cleans up temp file', async () => {
const mockFile = {
path: '/tmp/upload.jpg',
originalname: 'photo.jpg',
} as Express.Multer.File;
(fs.promises.stat as jest.Mock).mockResolvedValue({ size: 1024 });
(fs.promises.unlink as jest.Mock).mockResolvedValue(undefined);
(fs.createReadStream as jest.Mock).mockReturnValue(new Readable());
s3Mock.on(PutObjectCommand).rejects(new Error('Upload failed'));
const { uploadFileToS3 } = await import('../crud');
await expect(
uploadFileToS3({
req: mockReq,
file: mockFile,
file_id: 'file123',
}),
).rejects.toThrow('Upload failed');
expect(logger.error).toHaveBeenCalledWith(
'[uploadFileToS3] Error streaming file to S3:',
expect.any(Error),
);
expect(fs.promises.unlink).toHaveBeenCalledWith('/tmp/upload.jpg');
});
});
describe('getS3FileStream', () => {
it('returns a readable stream for a file', async () => {
const { getS3FileStream } = await import('../crud');
const result = await getS3FileStream(
{} as ServerRequest,
'https://bucket.s3.amazonaws.com/images/user123/file.pdf',
);
expect(result).toBeInstanceOf(Readable);
expect(s3Mock.commandCalls(GetObjectCommand)).toHaveLength(1);
});
it('handles errors when retrieving stream', async () => {
s3Mock.on(GetObjectCommand).rejects(new Error('Stream error'));
const { getS3FileStream } = await import('../crud');
await expect(getS3FileStream({} as ServerRequest, 'images/user123/file.pdf')).rejects.toThrow(
'Stream error',
);
expect(logger.error).toHaveBeenCalled();
});
});
describe('needsRefresh', () => {
it('returns false for non-signed URLs', async () => {
const { needsRefresh } = await import('../crud');
const result = needsRefresh('https://example.com/file.png', 3600);
expect(result).toBe(false);
});
it('returns true when URL is expired', async () => {
const { needsRefresh } = await import('../crud');
const pastDate = new Date(Date.now() - 2 * 60 * 60 * 1000);
const dateStr = pastDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
const url = `https://bucket.s3.amazonaws.com/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=3600`;
const result = needsRefresh(url, 3600);
expect(result).toBe(true);
});
it('returns false when URL is not close to expiration', async () => {
const { needsRefresh } = await import('../crud');
const futureDate = new Date(Date.now() + 10 * 60 * 1000);
const dateStr = futureDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
const url = `https://bucket.s3.amazonaws.com/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=7200`;
const result = needsRefresh(url, 60);
expect(result).toBe(false);
});
it('returns true when missing expiration parameters', async () => {
const { needsRefresh } = await import('../crud');
const url = 'https://bucket.s3.amazonaws.com/key?X-Amz-Signature=abc';
const result = needsRefresh(url, 3600);
expect(result).toBe(true);
});
it('returns true for malformed URLs', async () => {
const { needsRefresh } = await import('../crud');
const result = needsRefresh('not-a-valid-url', 3600);
expect(result).toBe(true);
});
});
describe('getNewS3URL', () => {
it('generates a new URL from an existing S3 URL', async () => {
const { getNewS3URL } = await import('../crud');
const result = await getNewS3URL(
'https://bucket.s3.amazonaws.com/images/user123/file.jpg?signature=old',
);
expect(result).toContain('signed=true');
});
it('returns undefined for invalid URLs', async () => {
const { getNewS3URL } = await import('../crud');
const result = await getNewS3URL('simple-file.txt');
expect(result).toBeUndefined();
});
it('returns undefined when key has insufficient parts', async () => {
const { getNewS3URL } = await import('../crud');
// Key with only 2 parts (basePath/userId but no fileName)
const result = await getNewS3URL('https://bucket.s3.amazonaws.com/images/user123');
expect(result).toBeUndefined();
});
});
describe('refreshS3FileUrls', () => {
it('refreshes expired URLs for multiple files', async () => {
const { refreshS3FileUrls } = await import('../crud');
const pastDate = new Date(Date.now() - 2 * 60 * 60 * 1000);
const dateStr = pastDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
const files = [
{
file_id: 'file1',
source: FileSources.s3,
filepath: `https://bucket.s3.amazonaws.com/images/user123/file1.jpg?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
},
{
file_id: 'file2',
source: FileSources.s3,
filepath: `https://bucket.s3.amazonaws.com/images/user456/file2.jpg?X-Amz-Signature=def&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
},
];
const mockBatchUpdate = jest.fn().mockResolvedValue(undefined);
const result = await refreshS3FileUrls(files as TFile[], mockBatchUpdate, 60);
expect(result[0].filepath).toContain('signed=true');
expect(result[1].filepath).toContain('signed=true');
expect(mockBatchUpdate).toHaveBeenCalledWith([
{ file_id: 'file1', filepath: expect.stringContaining('signed=true') },
{ file_id: 'file2', filepath: expect.stringContaining('signed=true') },
]);
});
it('skips non-S3 files', async () => {
const { refreshS3FileUrls } = await import('../crud');
const files = [
{
file_id: 'file1',
source: 'local',
filepath: '/local/path/file.jpg',
},
];
const mockBatchUpdate = jest.fn();
const result = await refreshS3FileUrls(files as TFile[], mockBatchUpdate);
expect(result).toEqual(files);
expect(mockBatchUpdate).not.toHaveBeenCalled();
});
it('handles empty or invalid input', async () => {
const { refreshS3FileUrls } = await import('../crud');
const mockBatchUpdate = jest.fn();
const result1 = await refreshS3FileUrls(null, mockBatchUpdate);
expect(result1).toEqual([]);
const result2 = await refreshS3FileUrls(undefined, mockBatchUpdate);
expect(result2).toEqual([]);
const result3 = await refreshS3FileUrls([], mockBatchUpdate);
expect(result3).toEqual([]);
expect(mockBatchUpdate).not.toHaveBeenCalled();
});
});
describe('refreshS3Url', () => {
it('refreshes an expired S3 URL', async () => {
const { refreshS3Url } = await import('../crud');
const pastDate = new Date(Date.now() - 2 * 60 * 60 * 1000);
const dateStr = pastDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
const fileObj: S3FileRef = {
source: FileSources.s3,
filepath: `https://bucket.s3.amazonaws.com/images/user123/file.jpg?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
};
const result = await refreshS3Url(fileObj, 60);
expect(result).toContain('signed=true');
});
it('returns original URL if not expired', async () => {
const { refreshS3Url } = await import('../crud');
const fileObj: S3FileRef = {
source: FileSources.s3,
filepath: 'https://example.com/proxy/file.jpg',
};
const result = await refreshS3Url(fileObj, 3600);
expect(result).toBe(fileObj.filepath);
});
it('returns empty string for null input', async () => {
const { refreshS3Url } = await import('../crud');
const result = await refreshS3Url(null as unknown as S3FileRef);
expect(result).toBe('');
});
it('returns original URL for non-S3 files', async () => {
const { refreshS3Url } = await import('../crud');
const fileObj: S3FileRef = {
source: 'local',
filepath: '/local/path/file.jpg',
};
const result = await refreshS3Url(fileObj);
expect(result).toBe(fileObj.filepath);
});
it('handles errors and returns original URL', async () => {
(getSignedUrl as jest.Mock).mockRejectedValueOnce(new Error('Refresh failed'));
const { refreshS3Url } = await import('../crud');
const pastDate = new Date(Date.now() - 2 * 60 * 60 * 1000);
const dateStr = pastDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
const fileObj: S3FileRef = {
source: FileSources.s3,
filepath: `https://bucket.s3.amazonaws.com/images/user123/file.jpg?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
};
const result = await refreshS3Url(fileObj, 60);
expect(result).toBe(fileObj.filepath);
expect(logger.error).toHaveBeenCalled();
});
});
describe('extractKeyFromS3Url', () => {
it('extracts key from virtual-hosted-style URL', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url('https://bucket.s3.amazonaws.com/images/user123/file.png');
expect(key).toBe('images/user123/file.png');
});
it('returns key as-is when not a URL', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url('images/user123/file.png');
expect(key).toBe('images/user123/file.png');
});
it('throws on empty input', async () => {
const { extractKeyFromS3Url } = await import('../crud');
expect(() => extractKeyFromS3Url('')).toThrow('Invalid input: URL or key is empty');
});
it('handles URL with query parameters', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url(
'https://bucket.s3.amazonaws.com/images/user123/file.png?X-Amz-Signature=abc',
);
expect(key).toBe('images/user123/file.png');
});
it('extracts key from path-style regional endpoint', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url(
'https://s3.us-west-2.amazonaws.com/test-bucket/dogs/puppy.jpg',
);
expect(key).toBe('dogs/puppy.jpg');
});
it('extracts key from virtual-hosted regional endpoint', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url(
'https://test-bucket.s3.us-west-2.amazonaws.com/dogs/puppy.png',
);
expect(key).toBe('dogs/puppy.png');
});
it('extracts key from legacy s3-region format', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url(
'https://test-bucket.s3-us-west-2.amazonaws.com/cats/kitten.png',
);
expect(key).toBe('cats/kitten.png');
});
it('extracts key from legacy global endpoint', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url('https://test-bucket.s3.amazonaws.com/dogs/puppy.png');
expect(key).toBe('dogs/puppy.png');
});
it('handles key with leading slash by removing it', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url('/images/user123/file.jpg');
expect(key).toBe('images/user123/file.jpg');
});
it('handles simple key without slashes', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url('simple-file.txt');
expect(key).toBe('simple-file.txt');
});
it('handles key with only two parts', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url('folder/file.txt');
expect(key).toBe('folder/file.txt');
});
it('handles URLs with encoded characters', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url(
'https://bucket.s3.amazonaws.com/test-bucket/images/user123/my%20file%20name.jpg',
);
expect(key).toBe('images/user123/my%20file%20name.jpg');
});
it('handles deep nested paths', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url(
'https://bucket.s3.amazonaws.com/test-bucket/a/b/c/d/e/f/file.jpg',
);
expect(key).toBe('a/b/c/d/e/f/file.jpg');
});
it('returns empty string for URL with only bucket (no key)', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url('https://s3.us-west-2.amazonaws.com/my-bucket');
expect(key).toBe('');
expect(logger.warn).toHaveBeenCalled();
});
it('handles malformed URL and returns input', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const malformedUrl = 'https://invalid url with spaces.com/key';
const result = extractKeyFromS3Url(malformedUrl);
expect(logger.error).toHaveBeenCalled();
expect(result).toBe(malformedUrl);
});
it('strips bucket from custom endpoint URLs (MinIO, R2)', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = extractKeyFromS3Url(
'https://minio.example.com/test-bucket/images/user123/file.jpg',
);
expect(key).toBe('images/user123/file.jpg');
});
});
describe('needsRefresh with S3_REFRESH_EXPIRY_MS set', () => {
beforeEach(() => {
process.env.S3_REFRESH_EXPIRY_MS = '60000'; // 1 minute
jest.resetModules();
});
afterEach(() => {
delete process.env.S3_REFRESH_EXPIRY_MS;
});
it('returns true when URL age exceeds S3_REFRESH_EXPIRY_MS', async () => {
const { needsRefresh } = await import('../crud');
// URL created 2 minutes ago
const oldDate = new Date(Date.now() - 2 * 60 * 1000);
const dateStr = oldDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
const url = `https://bucket.s3.amazonaws.com/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=3600`;
const result = needsRefresh(url, 60);
expect(result).toBe(true);
});
it('returns false when URL age is under S3_REFRESH_EXPIRY_MS', async () => {
const { needsRefresh } = await import('../crud');
// URL created 30 seconds ago
const recentDate = new Date(Date.now() - 30 * 1000);
const dateStr = recentDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
const url = `https://bucket.s3.amazonaws.com/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=3600`;
const result = needsRefresh(url, 60);
expect(result).toBe(false);
});
});
});

View file

@ -0,0 +1,182 @@
import fs from 'fs';
import type { S3ImageServiceDeps } from '~/storage/s3/images';
import type { ServerRequest } from '~/types';
import { S3ImageService } from '~/storage/s3/images';
import { saveBufferToS3 } from '~/storage/s3/crud';
jest.mock('fs', () => ({
...jest.requireActual('fs'),
promises: {
readFile: jest.fn(),
unlink: jest.fn().mockResolvedValue(undefined),
},
}));
jest.mock('../crud', () => ({
saveBufferToS3: jest
.fn()
.mockResolvedValue('https://bucket.s3.amazonaws.com/avatar.png?signed=true'),
}));
const mockSaveBufferToS3 = jest.mocked(saveBufferToS3);
jest.mock('sharp', () => {
return jest.fn(() => ({
metadata: jest.fn().mockResolvedValue({ format: 'png', width: 100, height: 100 }),
toFormat: jest.fn().mockReturnThis(),
toBuffer: jest.fn().mockResolvedValue(Buffer.from('processed')),
}));
});
describe('S3ImageService', () => {
let service: S3ImageService;
let mockDeps: S3ImageServiceDeps;
beforeEach(() => {
jest.clearAllMocks();
mockDeps = {
resizeImageBuffer: jest.fn().mockResolvedValue({
buffer: Buffer.from('resized'),
width: 100,
height: 100,
}),
updateUser: jest.fn().mockResolvedValue(undefined),
updateFile: jest.fn().mockResolvedValue(undefined),
};
service = new S3ImageService(mockDeps);
});
describe('processAvatar', () => {
it('uploads avatar and returns URL', async () => {
const result = await service.processAvatar({
buffer: Buffer.from('test'),
userId: 'user123',
manual: 'false',
});
expect(result).toContain('signed=true');
});
it('updates user avatar when manual is true', async () => {
await service.processAvatar({
buffer: Buffer.from('test'),
userId: 'user123',
manual: 'true',
});
expect(mockDeps.updateUser).toHaveBeenCalledWith(
'user123',
expect.objectContaining({ avatar: expect.any(String) }),
);
});
it('does not update user when agentId is provided', async () => {
await service.processAvatar({
buffer: Buffer.from('test'),
userId: 'user123',
manual: 'true',
agentId: 'agent456',
});
expect(mockDeps.updateUser).not.toHaveBeenCalled();
});
it('generates agent avatar filename when agentId provided', async () => {
await service.processAvatar({
buffer: Buffer.from('test'),
userId: 'user123',
manual: 'false',
agentId: 'agent456',
});
expect(mockSaveBufferToS3).toHaveBeenCalledWith(
expect.objectContaining({
fileName: expect.stringContaining('agent-agent456-avatar-'),
}),
);
});
});
describe('prepareImageURL', () => {
it('returns tuple with resolved promise and filepath', async () => {
const file = { file_id: 'file123', filepath: 'https://example.com/file.png' };
const result = await service.prepareImageURL(file);
expect(Array.isArray(result)).toBe(true);
expect(result[1]).toBe('https://example.com/file.png');
});
it('calls updateFile with file_id', async () => {
const file = { file_id: 'file123', filepath: 'https://example.com/file.png' };
await service.prepareImageURL(file);
expect(mockDeps.updateFile).toHaveBeenCalledWith({ file_id: 'file123' });
});
});
describe('constructor', () => {
it('requires dependencies to be passed', () => {
const newService = new S3ImageService(mockDeps);
expect(newService).toBeInstanceOf(S3ImageService);
});
});
describe('uploadImageToS3', () => {
const mockReq = {
user: { id: 'user123' },
config: { imageOutputType: 'webp' },
} as unknown as ServerRequest;
it('deletes temp file on early failure (readFile throws)', async () => {
(fs.promises.readFile as jest.Mock).mockRejectedValueOnce(
new Error('ENOENT: no such file or directory'),
);
(fs.promises.unlink as jest.Mock).mockResolvedValueOnce(undefined);
await expect(
service.uploadImageToS3({
req: mockReq,
file: { path: '/tmp/input.jpg' } as Express.Multer.File,
file_id: 'file123',
endpoint: 'openai',
}),
).rejects.toThrow('ENOENT: no such file or directory');
expect(fs.promises.unlink).toHaveBeenCalledWith('/tmp/input.jpg');
});
it('deletes temp file on resize failure (resizeImageBuffer throws)', async () => {
(fs.promises.readFile as jest.Mock).mockResolvedValueOnce(Buffer.from('raw'));
(mockDeps.resizeImageBuffer as jest.Mock).mockRejectedValueOnce(new Error('Resize failed'));
(fs.promises.unlink as jest.Mock).mockResolvedValueOnce(undefined);
await expect(
service.uploadImageToS3({
req: mockReq,
file: { path: '/tmp/input.jpg' } as Express.Multer.File,
file_id: 'file123',
endpoint: 'openai',
}),
).rejects.toThrow('Resize failed');
expect(fs.promises.unlink).toHaveBeenCalledWith('/tmp/input.jpg');
});
it('deletes temp file on success', async () => {
(fs.promises.readFile as jest.Mock).mockResolvedValueOnce(Buffer.from('raw'));
(fs.promises.unlink as jest.Mock).mockResolvedValueOnce(undefined);
const result = await service.uploadImageToS3({
req: mockReq,
file: { path: '/tmp/input.webp' } as Express.Multer.File,
file_id: 'file123',
endpoint: 'openai',
});
expect(result.filepath).toContain('signed=true');
expect(fs.promises.unlink).toHaveBeenCalledWith('/tmp/input.webp');
});
});
});

View file

@ -0,0 +1,529 @@
/**
* S3 Integration Tests
*
* These tests run against a REAL S3 bucket. They are skipped when AWS_TEST_BUCKET_NAME is not set.
*
* Run with:
* AWS_TEST_BUCKET_NAME=my-test-bucket npx jest s3.s3_integration
*
* Required env vars:
* - AWS_TEST_BUCKET_NAME: Dedicated test bucket (gates test execution)
* - AWS_REGION: Defaults to 'us-east-1'
* - AWS_ACCESS_KEY_ID / AWS_SECRET_ACCESS_KEY => to avoid error: A dynamic import callback was invoked without -experimental-vm-modules the AWS SDK credential provider
*/
import fs from 'fs';
import os from 'os';
import path from 'path';
import { Readable } from 'stream';
import { ListObjectsV2Command, DeleteObjectsCommand } from '@aws-sdk/client-s3';
import type { S3Client } from '@aws-sdk/client-s3';
import type { ServerRequest } from '~/types';
const MINIMAL_PNG = Buffer.from([
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d, 0x49, 0x48, 0x44, 0x52,
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x02, 0x00, 0x00, 0x00, 0x90, 0x77, 0x53,
0xde, 0x00, 0x00, 0x00, 0x0c, 0x49, 0x44, 0x41, 0x54, 0x08, 0xd7, 0x63, 0xf8, 0xff, 0xff, 0x3f,
0x00, 0x05, 0xfe, 0x02, 0xfe, 0xdc, 0xcc, 0x59, 0xe7, 0x00, 0x00, 0x00, 0x00, 0x49, 0x45, 0x4e,
0x44, 0xae, 0x42, 0x60, 0x82,
]);
const TEST_BUCKET = process.env.AWS_TEST_BUCKET_NAME;
const TEST_USER_ID = 'test-user-123';
const TEST_RUN_ID = `integration-test-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
const TEST_BASE_PATH = TEST_RUN_ID;
async function deleteAllWithPrefix(s3: S3Client, bucket: string, prefix: string): Promise<void> {
let continuationToken: string | undefined;
do {
const listCommand = new ListObjectsV2Command({
Bucket: bucket,
Prefix: prefix,
ContinuationToken: continuationToken,
});
const response = await s3.send(listCommand);
if (response.Contents?.length) {
const deleteCommand = new DeleteObjectsCommand({
Bucket: bucket,
Delete: {
Objects: response.Contents.filter(
(obj): obj is typeof obj & { Key: string } => obj.Key !== undefined,
).map((obj) => ({ Key: obj.Key })),
},
});
await s3.send(deleteCommand);
}
continuationToken = response.IsTruncated ? response.NextContinuationToken : undefined;
} while (continuationToken);
}
describe('S3 Integration Tests', () => {
if (!TEST_BUCKET) {
// eslint-disable-next-line jest/expect-expect
it.skip('Skipped: AWS_TEST_BUCKET_NAME not configured', () => {});
return;
}
let originalEnv: NodeJS.ProcessEnv;
let tempDir: string;
let s3Client: S3Client | null = null;
beforeAll(async () => {
originalEnv = { ...process.env };
// Use dedicated test bucket
process.env.AWS_BUCKET_NAME = TEST_BUCKET;
process.env.AWS_REGION = process.env.AWS_REGION || 'us-east-1';
// Reset modules so the next import picks up the updated env vars.
// s3Client is retained as a plain instance — it remains valid even though
// beforeEach/afterEach call resetModules() for per-test isolation.
jest.resetModules();
const { initializeS3 } = await import('~/cdn/s3');
s3Client = initializeS3();
});
beforeEach(() => {
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 's3-integration-'));
jest.resetModules();
});
afterEach(async () => {
if (tempDir && fs.existsSync(tempDir)) {
fs.rmSync(tempDir, { recursive: true, force: true });
}
jest.resetModules();
});
afterAll(async () => {
// Clean up all test files from this run
if (s3Client && TEST_BUCKET) {
await deleteAllWithPrefix(s3Client, TEST_BUCKET, TEST_RUN_ID);
}
process.env = originalEnv;
jest.resetModules();
});
describe('getS3Key', () => {
it('constructs key from basePath, userId, and fileName', async () => {
const { getS3Key } = await import('../crud');
const key = getS3Key(TEST_BASE_PATH, TEST_USER_ID, 'test-file.txt');
expect(key).toBe(`${TEST_BASE_PATH}/${TEST_USER_ID}/test-file.txt`);
});
it('handles nested file names', async () => {
const { getS3Key } = await import('../crud');
const key = getS3Key(TEST_BASE_PATH, TEST_USER_ID, 'folder/nested/file.pdf');
expect(key).toBe(`${TEST_BASE_PATH}/${TEST_USER_ID}/folder/nested/file.pdf`);
});
});
describe('saveBufferToS3 and getS3URL', () => {
it('uploads buffer and returns signed URL', async () => {
const { saveBufferToS3 } = await import('../crud');
const testContent = 'Hello, S3!';
const buffer = Buffer.from(testContent);
const fileName = `test-${Date.now()}.txt`;
const downloadURL = await saveBufferToS3({
userId: TEST_USER_ID,
buffer,
fileName,
basePath: TEST_BASE_PATH,
});
expect(downloadURL).toBeDefined();
expect(downloadURL).toContain('X-Amz-Signature');
expect(downloadURL).toContain(fileName);
});
it('can get signed URL for existing file', async () => {
const { saveBufferToS3, getS3URL } = await import('../crud');
const buffer = Buffer.from('test content for URL');
const fileName = `url-test-${Date.now()}.txt`;
await saveBufferToS3({
userId: TEST_USER_ID,
buffer,
fileName,
basePath: TEST_BASE_PATH,
});
const signedUrl = await getS3URL({
userId: TEST_USER_ID,
fileName,
basePath: TEST_BASE_PATH,
});
expect(signedUrl).toBeDefined();
expect(signedUrl).toContain('X-Amz-Signature');
});
it('can get signed URL with custom filename and content type', async () => {
const { saveBufferToS3, getS3URL } = await import('../crud');
const buffer = Buffer.from('custom headers test');
const fileName = `headers-test-${Date.now()}.txt`;
await saveBufferToS3({
userId: TEST_USER_ID,
buffer,
fileName,
basePath: TEST_BASE_PATH,
});
const signedUrl = await getS3URL({
userId: TEST_USER_ID,
fileName,
basePath: TEST_BASE_PATH,
customFilename: 'download.txt',
contentType: 'text/plain',
});
expect(signedUrl).toContain('response-content-disposition');
expect(signedUrl).toContain('response-content-type');
});
});
describe('saveURLToS3', () => {
it('fetches URL content and uploads to S3', async () => {
const { saveURLToS3 } = await import('../crud');
const fileName = `url-upload-${Date.now()}.json`;
const downloadURL = await saveURLToS3({
userId: TEST_USER_ID,
URL: 'https://raw.githubusercontent.com/danny-avila/LibreChat/main/package.json',
fileName,
basePath: TEST_BASE_PATH,
});
expect(downloadURL).toBeDefined();
expect(downloadURL).toContain('X-Amz-Signature');
});
});
describe('extractKeyFromS3Url', () => {
it('extracts key from signed URL', async () => {
const { saveBufferToS3, extractKeyFromS3Url } = await import('../crud');
const buffer = Buffer.from('extract key test');
const fileName = `extract-key-${Date.now()}.txt`;
const signedUrl = await saveBufferToS3({
userId: TEST_USER_ID,
buffer,
fileName,
basePath: TEST_BASE_PATH,
});
const extractedKey = extractKeyFromS3Url(signedUrl);
expect(extractedKey).toBe(`${TEST_BASE_PATH}/${TEST_USER_ID}/${fileName}`);
});
it('returns key as-is when not a URL', async () => {
const { extractKeyFromS3Url } = await import('../crud');
const key = `${TEST_BASE_PATH}/${TEST_USER_ID}/file.txt`;
expect(extractKeyFromS3Url(key)).toBe(key);
});
});
describe('uploadFileToS3', () => {
it('uploads file and returns filepath with bytes', async () => {
const { uploadFileToS3 } = await import('../crud');
const testContent = 'File upload test content';
const testFilePath = path.join(tempDir, 'upload-test.txt');
fs.writeFileSync(testFilePath, testContent);
const mockReq = {
user: { id: TEST_USER_ID },
} as ServerRequest;
const mockFile = {
path: testFilePath,
originalname: 'upload-test.txt',
fieldname: 'file',
encoding: '7bit',
mimetype: 'text/plain',
size: Buffer.byteLength(testContent),
stream: fs.createReadStream(testFilePath),
destination: tempDir,
filename: 'upload-test.txt',
buffer: Buffer.from(testContent),
} as Express.Multer.File;
const fileId = `file-${Date.now()}`;
const result = await uploadFileToS3({
req: mockReq,
file: mockFile,
file_id: fileId,
basePath: TEST_BASE_PATH,
});
expect(result.filepath).toBeDefined();
expect(result.filepath).toContain('X-Amz-Signature');
expect(result.bytes).toBe(Buffer.byteLength(testContent));
});
it('throws error when user is not authenticated', async () => {
const { uploadFileToS3 } = await import('../crud');
const mockReq = {} as ServerRequest;
const mockFile = {
path: '/fake/path.txt',
originalname: 'test.txt',
} as Express.Multer.File;
await expect(
uploadFileToS3({
req: mockReq,
file: mockFile,
file_id: 'test-id',
basePath: TEST_BASE_PATH,
}),
).rejects.toThrow('User not authenticated');
});
});
describe('getS3FileStream', () => {
it('returns readable stream for existing file', async () => {
const { saveBufferToS3, getS3FileStream } = await import('../crud');
const testContent = 'Stream test content';
const buffer = Buffer.from(testContent);
const fileName = `stream-test-${Date.now()}.txt`;
const signedUrl = await saveBufferToS3({
userId: TEST_USER_ID,
buffer,
fileName,
basePath: TEST_BASE_PATH,
});
const mockReq = {
user: { id: TEST_USER_ID },
} as ServerRequest;
const stream = await getS3FileStream(mockReq, signedUrl);
expect(stream).toBeInstanceOf(Readable);
const chunks: Uint8Array[] = [];
for await (const chunk of stream) {
chunks.push(chunk as Uint8Array);
}
const downloadedContent = Buffer.concat(chunks).toString();
expect(downloadedContent).toBe(testContent);
});
});
describe('needsRefresh', () => {
it('returns false for non-signed URLs', async () => {
const { needsRefresh } = await import('../crud');
expect(needsRefresh('https://example.com/file.png', 3600)).toBe(false);
});
it('returns true for expired signed URLs', async () => {
const { saveBufferToS3, needsRefresh } = await import('../crud');
const buffer = Buffer.from('refresh test');
const fileName = `refresh-test-${Date.now()}.txt`;
const signedUrl = await saveBufferToS3({
userId: TEST_USER_ID,
buffer,
fileName,
basePath: TEST_BASE_PATH,
});
const result = needsRefresh(signedUrl, 999999);
expect(result).toBe(true);
});
it('returns false for fresh signed URLs', async () => {
const { saveBufferToS3, needsRefresh } = await import('../crud');
const buffer = Buffer.from('fresh test');
const fileName = `fresh-test-${Date.now()}.txt`;
const signedUrl = await saveBufferToS3({
userId: TEST_USER_ID,
buffer,
fileName,
basePath: TEST_BASE_PATH,
});
const result = needsRefresh(signedUrl, 60);
expect(result).toBe(false);
});
});
describe('getNewS3URL', () => {
it('generates signed URL from existing URL', async () => {
const { saveBufferToS3, getNewS3URL } = await import('../crud');
const buffer = Buffer.from('new url test');
const fileName = `new-url-${Date.now()}.txt`;
const originalUrl = await saveBufferToS3({
userId: TEST_USER_ID,
buffer,
fileName,
basePath: TEST_BASE_PATH,
});
const newUrl = await getNewS3URL(originalUrl);
expect(newUrl).toBeDefined();
expect(newUrl).toContain('X-Amz-Signature');
expect(newUrl).toContain(fileName);
});
});
describe('refreshS3Url', () => {
it('returns original URL for non-S3 source', async () => {
const { refreshS3Url } = await import('../crud');
const fileObj = {
filepath: 'https://example.com/file.png',
source: 'local',
};
const result = await refreshS3Url(fileObj, 3600);
expect(result).toBe(fileObj.filepath);
});
it('refreshes URL for S3 source when needed', async () => {
const { saveBufferToS3, refreshS3Url } = await import('../crud');
const buffer = Buffer.from('s3 refresh test');
const fileName = `s3-refresh-${Date.now()}.txt`;
const originalUrl = await saveBufferToS3({
userId: TEST_USER_ID,
buffer,
fileName,
basePath: TEST_BASE_PATH,
});
const fileObj = {
filepath: originalUrl,
source: 's3',
};
const newUrl = await refreshS3Url(fileObj, 999999);
expect(newUrl).toBeDefined();
expect(newUrl).toContain('X-Amz-Signature');
});
});
describe('S3ImageService', () => {
it('uploads avatar and returns URL', async () => {
const { S3ImageService } = await import('../images');
const mockDeps = {
resizeImageBuffer: jest.fn().mockImplementation(async (buffer: Buffer) => ({
buffer,
width: 100,
height: 100,
})),
updateUser: jest.fn().mockResolvedValue(undefined),
updateFile: jest.fn().mockResolvedValue(undefined),
};
const imageService = new S3ImageService(mockDeps);
const pngBuffer = MINIMAL_PNG;
const result = await imageService.processAvatar({
buffer: pngBuffer,
userId: TEST_USER_ID,
manual: 'false',
basePath: TEST_BASE_PATH,
});
expect(result).toBeDefined();
expect(result).toContain('X-Amz-Signature');
expect(result).toContain('avatar');
});
it('updates user when manual is true', async () => {
const { S3ImageService } = await import('../images');
const mockDeps = {
resizeImageBuffer: jest.fn().mockImplementation(async (buffer: Buffer) => ({
buffer,
width: 100,
height: 100,
})),
updateUser: jest.fn().mockResolvedValue(undefined),
updateFile: jest.fn().mockResolvedValue(undefined),
};
const imageService = new S3ImageService(mockDeps);
const pngBuffer = MINIMAL_PNG;
await imageService.processAvatar({
buffer: pngBuffer,
userId: TEST_USER_ID,
manual: 'true',
basePath: TEST_BASE_PATH,
});
expect(mockDeps.updateUser).toHaveBeenCalledWith(
TEST_USER_ID,
expect.objectContaining({ avatar: expect.any(String) }),
);
});
it('does not update user when agentId is provided', async () => {
const { S3ImageService } = await import('../images');
const mockDeps = {
resizeImageBuffer: jest.fn().mockImplementation(async (buffer: Buffer) => ({
buffer,
width: 100,
height: 100,
})),
updateUser: jest.fn().mockResolvedValue(undefined),
updateFile: jest.fn().mockResolvedValue(undefined),
};
const imageService = new S3ImageService(mockDeps);
const pngBuffer = MINIMAL_PNG;
await imageService.processAvatar({
buffer: pngBuffer,
userId: TEST_USER_ID,
manual: 'true',
agentId: 'agent-123',
basePath: TEST_BASE_PATH,
});
expect(mockDeps.updateUser).not.toHaveBeenCalled();
});
it('returns tuple with resolved promise and filepath in prepareImageURL', async () => {
const { S3ImageService } = await import('../images');
const mockDeps = {
resizeImageBuffer: jest.fn().mockImplementation(async (buffer: Buffer) => ({
buffer,
width: 100,
height: 100,
})),
updateUser: jest.fn().mockResolvedValue(undefined),
updateFile: jest.fn().mockResolvedValue(undefined),
};
const imageService = new S3ImageService(mockDeps);
const testFile = {
file_id: 'file-123',
filepath: 'https://example.com/file.png',
};
const result = await imageService.prepareImageURL(testFile);
expect(Array.isArray(result)).toBe(true);
expect(result[1]).toBe(testFile.filepath);
expect(mockDeps.updateFile).toHaveBeenCalledWith({ file_id: 'file-123' });
});
});
});

View file

@ -0,0 +1,460 @@
import fs from 'fs';
import { Readable } from 'stream';
import { logger } from '@librechat/data-schemas';
import { FileSources } from 'librechat-data-provider';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import {
PutObjectCommand,
GetObjectCommand,
HeadObjectCommand,
DeleteObjectCommand,
} from '@aws-sdk/client-s3';
import type { GetObjectCommandInput } from '@aws-sdk/client-s3';
import type { TFile } from 'librechat-data-provider';
import type { ServerRequest } from '~/types';
import type {
UploadFileParams,
SaveBufferParams,
BatchUpdateFn,
SaveURLParams,
GetURLParams,
UploadResult,
S3FileRef,
} from '~/storage/types';
import { initializeS3 } from '~/cdn/s3';
import { deleteRagFile } from '~/files';
import { s3Config } from './s3Config';
const {
AWS_BUCKET_NAME: bucketName,
AWS_ENDPOINT_URL: endpoint,
AWS_FORCE_PATH_STYLE: forcePathStyle,
S3_URL_EXPIRY_SECONDS: s3UrlExpirySeconds,
S3_REFRESH_EXPIRY_MS: s3RefreshExpiryMs,
DEFAULT_BASE_PATH: defaultBasePath,
} = s3Config;
export const getS3Key = (basePath: string, userId: string, fileName: string): string => {
if (basePath.includes('/')) {
throw new Error(`[getS3Key] basePath must not contain slashes: "${basePath}"`);
}
return `${basePath}/${userId}/${fileName}`;
};
export async function getS3URL({
userId,
fileName,
basePath = defaultBasePath,
customFilename = null,
contentType = null,
}: GetURLParams): Promise<string> {
const key = getS3Key(basePath, userId, fileName);
const params: GetObjectCommandInput = { Bucket: bucketName, Key: key };
if (customFilename) {
const safeFilename = customFilename.replace(/["\r\n]/g, '');
params.ResponseContentDisposition = `attachment; filename="${safeFilename}"`;
}
if (contentType) {
params.ResponseContentType = contentType;
}
try {
const s3 = initializeS3();
if (!s3) {
throw new Error('[getS3URL] S3 not initialized');
}
return await getSignedUrl(s3, new GetObjectCommand(params), { expiresIn: s3UrlExpirySeconds });
} catch (error) {
logger.error('[getS3URL] Error getting signed URL from S3:', (error as Error).message);
throw error;
}
}
export async function saveBufferToS3({
userId,
buffer,
fileName,
basePath = defaultBasePath,
}: SaveBufferParams): Promise<string> {
const key = getS3Key(basePath, userId, fileName);
const params = { Bucket: bucketName, Key: key, Body: buffer };
try {
const s3 = initializeS3();
if (!s3) {
throw new Error('[saveBufferToS3] S3 not initialized');
}
await s3.send(new PutObjectCommand(params));
return await getS3URL({ userId, fileName, basePath });
} catch (error) {
logger.error('[saveBufferToS3] Error uploading buffer to S3:', (error as Error).message);
throw error;
}
}
export async function saveURLToS3({
userId,
URL,
fileName,
basePath = defaultBasePath,
}: SaveURLParams): Promise<string> {
try {
const response = await fetch(URL);
if (!response.ok) {
throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`);
}
const arrayBuffer = await response.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
return await saveBufferToS3({ userId, buffer, fileName, basePath });
} catch (error) {
logger.error('[saveURLToS3] Error uploading file from URL to S3:', (error as Error).message);
throw error;
}
}
export function extractKeyFromS3Url(fileUrlOrKey: string): string {
if (!fileUrlOrKey) {
throw new Error('Invalid input: URL or key is empty');
}
try {
const url = new URL(fileUrlOrKey);
const hostname = url.hostname;
const pathname = url.pathname.substring(1);
if (endpoint && forcePathStyle) {
const endpointUrl = new URL(endpoint);
const startPos =
endpointUrl.pathname.length +
(endpointUrl.pathname.endsWith('/') ? 0 : 1) +
bucketName.length +
1;
const key = url.pathname.substring(startPos);
if (!key) {
logger.warn(
`[extractKeyFromS3Url] Extracted key is empty for endpoint path-style URL: ${fileUrlOrKey}`,
);
} else {
logger.debug(`[extractKeyFromS3Url] fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${key}`);
}
return key;
}
if (
hostname === 's3.amazonaws.com' ||
hostname.match(/^s3[-.][a-z0-9-]+\.amazonaws\.com$/) ||
(bucketName && pathname.startsWith(`${bucketName}/`))
) {
const firstSlashIndex = pathname.indexOf('/');
if (firstSlashIndex > 0) {
const key = pathname.substring(firstSlashIndex + 1);
if (key === '') {
logger.warn(
`[extractKeyFromS3Url] Extracted key is empty after removing bucket name from URL: ${fileUrlOrKey}`,
);
} else {
logger.debug(
`[extractKeyFromS3Url] fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${key}`,
);
}
return key;
}
logger.warn(
`[extractKeyFromS3Url] Unable to extract key from path-style URL: ${fileUrlOrKey}`,
);
return '';
}
logger.debug(`[extractKeyFromS3Url] fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${pathname}`);
return pathname;
} catch (error) {
if (fileUrlOrKey.startsWith('http://') || fileUrlOrKey.startsWith('https://')) {
logger.error(
`[extractKeyFromS3Url] Error parsing URL: ${fileUrlOrKey}, Error: ${(error as Error).message}`,
);
} else {
logger.debug(`[extractKeyFromS3Url] Non-URL input, using fallback: ${fileUrlOrKey}`);
}
const parts = fileUrlOrKey.split('/');
if (parts.length >= 3 && !fileUrlOrKey.startsWith('http') && !fileUrlOrKey.startsWith('/')) {
return fileUrlOrKey;
}
const key = fileUrlOrKey.startsWith('/') ? fileUrlOrKey.substring(1) : fileUrlOrKey;
logger.debug(
`[extractKeyFromS3Url] FALLBACK. fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${key}`,
);
return key;
}
}
export async function deleteFileFromS3(req: ServerRequest, file: TFile): Promise<void> {
if (!req.user) {
throw new Error('[deleteFileFromS3] User not authenticated');
}
const userId = req.user.id;
const key = extractKeyFromS3Url(file.filepath);
const keyParts = key.split('/');
if (keyParts.length < 2 || keyParts[1] !== userId) {
const message = `[deleteFileFromS3] User ID mismatch: ${userId} vs ${key}`;
logger.error(message);
throw new Error(message);
}
const s3 = initializeS3();
if (!s3) {
throw new Error('[deleteFileFromS3] S3 not initialized');
}
const params = { Bucket: bucketName, Key: key };
try {
try {
const headCommand = new HeadObjectCommand(params);
await s3.send(headCommand);
logger.debug('[deleteFileFromS3] File exists, proceeding with deletion');
} catch (headErr) {
if ((headErr as { name?: string }).name === 'NotFound') {
logger.warn(`[deleteFileFromS3] File does not exist: ${key}`);
await deleteRagFile({ userId, file });
return;
}
throw headErr;
}
await s3.send(new DeleteObjectCommand(params));
await deleteRagFile({ userId, file });
logger.debug('[deleteFileFromS3] S3 File deletion completed');
} catch (error) {
logger.error(`[deleteFileFromS3] Error deleting file from S3: ${(error as Error).message}`);
logger.error((error as Error).stack);
if ((error as { name?: string }).name === 'NoSuchKey') {
return;
}
throw error;
}
}
export async function uploadFileToS3({
req,
file,
file_id,
basePath = defaultBasePath,
}: UploadFileParams): Promise<UploadResult> {
if (!req.user) {
throw new Error('[uploadFileToS3] User not authenticated');
}
try {
const inputFilePath = file.path;
const userId = req.user.id;
const fileName = `${file_id}__${file.originalname}`;
const key = getS3Key(basePath, userId, fileName);
const stats = await fs.promises.stat(inputFilePath);
const bytes = stats.size;
const fileStream = fs.createReadStream(inputFilePath);
const s3 = initializeS3();
if (!s3) {
throw new Error('[uploadFileToS3] S3 not initialized');
}
const uploadParams = {
Bucket: bucketName,
Key: key,
Body: fileStream,
};
await s3.send(new PutObjectCommand(uploadParams));
const fileURL = await getS3URL({ userId, fileName, basePath });
// NOTE: temp file is intentionally NOT deleted on the success path.
// The caller (processAgentFileUpload) reads file.path after this returns
// to stream the file to the RAG vector embedding service (POST /embed).
// Temp file lifecycle on success is the caller's responsibility.
return { filepath: fileURL, bytes };
} catch (error) {
logger.error('[uploadFileToS3] Error streaming file to S3:', error);
if (file?.path) {
await fs.promises
.unlink(file.path)
.catch((e: unknown) =>
logger.error('[uploadFileToS3] Failed to delete temp file:', (e as Error).message),
);
}
throw error;
}
}
export async function getS3FileStream(_req: ServerRequest, filePath: string): Promise<Readable> {
try {
const Key = extractKeyFromS3Url(filePath);
const params = { Bucket: bucketName, Key };
const s3 = initializeS3();
if (!s3) {
throw new Error('[getS3FileStream] S3 not initialized');
}
const data = await s3.send(new GetObjectCommand(params));
if (!data.Body) {
throw new Error(`[getS3FileStream] S3 response body is empty for key: ${Key}`);
}
return data.Body as Readable;
} catch (error) {
logger.error('[getS3FileStream] Error retrieving S3 file stream:', error);
throw error;
}
}
export function needsRefresh(signedUrl: string, bufferSeconds: number): boolean {
try {
const url = new URL(signedUrl);
if (!url.searchParams.has('X-Amz-Signature')) {
return false;
}
const expiresParam = url.searchParams.get('X-Amz-Expires');
const dateParam = url.searchParams.get('X-Amz-Date');
if (!expiresParam || !dateParam) {
return true;
}
const year = dateParam.substring(0, 4);
const month = dateParam.substring(4, 6);
const day = dateParam.substring(6, 8);
const hour = dateParam.substring(9, 11);
const minute = dateParam.substring(11, 13);
const second = dateParam.substring(13, 15);
const dateObj = new Date(`${year}-${month}-${day}T${hour}:${minute}:${second}Z`);
const now = new Date();
if (s3RefreshExpiryMs !== null) {
const urlAge = now.getTime() - dateObj.getTime();
return urlAge >= s3RefreshExpiryMs;
}
const expiresAtDate = new Date(dateObj.getTime() + parseInt(expiresParam) * 1000);
const bufferTime = new Date(now.getTime() + bufferSeconds * 1000);
return expiresAtDate <= bufferTime;
} catch (error) {
logger.error('Error checking URL expiration:', error);
return true;
}
}
export async function getNewS3URL(currentURL: string): Promise<string | undefined> {
try {
const s3Key = extractKeyFromS3Url(currentURL);
if (!s3Key) {
return;
}
const keyParts = s3Key.split('/');
if (keyParts.length < 3) {
return;
}
const basePath = keyParts[0];
const userId = keyParts[1];
const fileName = keyParts.slice(2).join('/');
return getS3URL({ userId, fileName, basePath });
} catch (error) {
logger.error('Error getting new S3 URL:', error);
}
}
export async function refreshS3FileUrls(
files: TFile[] | null | undefined,
batchUpdateFiles: BatchUpdateFn,
bufferSeconds = 3600,
): Promise<TFile[]> {
if (!files || !Array.isArray(files) || files.length === 0) {
return [];
}
const filesToUpdate: Array<{ file_id: string; filepath: string }> = [];
const updatedFiles = [...files];
for (let i = 0; i < updatedFiles.length; i++) {
const file = updatedFiles[i];
if (!file?.file_id) {
continue;
}
if (file.source !== FileSources.s3) {
continue;
}
if (!file.filepath) {
continue;
}
if (!needsRefresh(file.filepath, bufferSeconds)) {
continue;
}
try {
const newURL = await getNewS3URL(file.filepath);
if (!newURL) {
continue;
}
filesToUpdate.push({
file_id: file.file_id,
filepath: newURL,
});
updatedFiles[i] = { ...file, filepath: newURL };
} catch (error) {
logger.error(`Error refreshing S3 URL for file ${file.file_id}:`, error);
}
}
if (filesToUpdate.length > 0) {
await batchUpdateFiles(filesToUpdate);
}
return updatedFiles;
}
export async function refreshS3Url(fileObj: S3FileRef, bufferSeconds = 3600): Promise<string> {
if (!fileObj || fileObj.source !== FileSources.s3 || !fileObj.filepath) {
return fileObj?.filepath || '';
}
if (!needsRefresh(fileObj.filepath, bufferSeconds)) {
return fileObj.filepath;
}
try {
const s3Key = extractKeyFromS3Url(fileObj.filepath);
if (!s3Key) {
logger.warn(`Unable to extract S3 key from URL: ${fileObj.filepath}`);
return fileObj.filepath;
}
const keyParts = s3Key.split('/');
if (keyParts.length < 3) {
logger.warn(`Invalid S3 key format: ${s3Key}`);
return fileObj.filepath;
}
const basePath = keyParts[0];
const userId = keyParts[1];
const fileName = keyParts.slice(2).join('/');
const newUrl = await getS3URL({ userId, fileName, basePath });
logger.debug(`Refreshed S3 URL for key: ${s3Key}`);
return newUrl;
} catch (error) {
logger.error(`Error refreshing S3 URL: ${(error as Error).message}`);
return fileObj.filepath;
}
}

View file

@ -0,0 +1,141 @@
import fs from 'fs';
import path from 'path';
import sharp from 'sharp';
import { logger } from '@librechat/data-schemas';
import type { IUser } from '@librechat/data-schemas';
import type { TFile } from 'librechat-data-provider';
import type { FormatEnum } from 'sharp';
import type { UploadImageParams, ImageUploadResult, ProcessAvatarParams } from '~/storage/types';
import { saveBufferToS3 } from './crud';
import { s3Config } from './s3Config';
const { DEFAULT_BASE_PATH: defaultBasePath } = s3Config;
export interface S3ImageServiceDeps {
resizeImageBuffer: (
buffer: Buffer,
resolution: string,
endpoint: string,
) => Promise<{ buffer: Buffer; width: number; height: number }>;
updateUser: (userId: string, update: { avatar: string }) => Promise<IUser | null>;
updateFile: (params: { file_id: string }) => Promise<TFile>;
}
export class S3ImageService {
private deps: S3ImageServiceDeps;
constructor(deps: S3ImageServiceDeps) {
this.deps = deps;
}
async uploadImageToS3({
req,
file,
file_id,
endpoint,
resolution = 'high',
basePath = defaultBasePath,
}: UploadImageParams): Promise<ImageUploadResult> {
const inputFilePath = file.path;
try {
if (!req.user) {
throw new Error('[S3ImageService.uploadImageToS3] User not authenticated');
}
const appConfig = req.config;
const inputBuffer = await fs.promises.readFile(inputFilePath);
const {
buffer: resizedBuffer,
width,
height,
} = await this.deps.resizeImageBuffer(inputBuffer, resolution, endpoint);
const extension = path.extname(inputFilePath);
const userId = req.user.id;
let processedBuffer: Buffer;
let fileName = `${file_id}__${path.basename(inputFilePath)}`;
const targetExtension = `.${appConfig?.imageOutputType ?? 'webp'}`;
if (extension.toLowerCase() === targetExtension) {
processedBuffer = resizedBuffer;
} else {
const outputFormat = (appConfig?.imageOutputType ?? 'webp') as keyof FormatEnum;
processedBuffer = await sharp(resizedBuffer).toFormat(outputFormat).toBuffer();
fileName = fileName.replace(new RegExp(path.extname(fileName) + '$'), targetExtension);
if (!path.extname(fileName)) {
fileName += targetExtension;
}
}
const downloadURL = await saveBufferToS3({
userId,
buffer: processedBuffer,
fileName,
basePath,
});
const bytes = processedBuffer.length;
return { filepath: downloadURL, bytes, width, height };
} catch (error) {
logger.error(
'[S3ImageService.uploadImageToS3] Error uploading image to S3:',
(error as Error).message,
);
throw error;
} finally {
await fs.promises
.unlink(inputFilePath)
.catch((e: unknown) =>
logger.error(
'[S3ImageService.uploadImageToS3] Failed to delete temp file:',
(e as Error).message,
),
);
}
}
async prepareImageURL(file: { file_id: string; filepath: string }): Promise<[TFile, string]> {
try {
return await Promise.all([this.deps.updateFile({ file_id: file.file_id }), file.filepath]);
} catch (error) {
logger.error(
'[S3ImageService.prepareImageURL] Error preparing image URL:',
(error as Error).message,
);
throw error;
}
}
async processAvatar({
buffer,
userId,
manual,
agentId,
basePath = defaultBasePath,
}: ProcessAvatarParams): Promise<string> {
try {
const metadata = await sharp(buffer).metadata();
const extension = metadata.format ?? 'png';
const timestamp = new Date().getTime();
const fileName = agentId
? `agent-${agentId}-avatar-${timestamp}.${extension}`
: `avatar-${timestamp}.${extension}`;
const downloadURL = await saveBufferToS3({ userId, buffer, fileName, basePath });
if (manual === 'true' && !agentId) {
await this.deps.updateUser(userId, { avatar: downloadURL });
}
return downloadURL;
} catch (error) {
logger.error(
'[S3ImageService.processAvatar] Error processing S3 avatar:',
(error as Error).message,
);
throw error;
}
}
}

View file

@ -0,0 +1,2 @@
export * from './crud';
export * from './images';

View file

@ -0,0 +1,57 @@
import { logger } from '@librechat/data-schemas';
import { isEnabled } from '~/utils/common';
const MAX_EXPIRY_SECONDS = 7 * 24 * 60 * 60; // 7 days
const DEFAULT_EXPIRY_SECONDS = 2 * 60; // 2 minutes
const DEFAULT_BASE_PATH = 'images';
const parseUrlExpiry = (): number => {
if (process.env.S3_URL_EXPIRY_SECONDS === undefined) {
return DEFAULT_EXPIRY_SECONDS;
}
const parsed = parseInt(process.env.S3_URL_EXPIRY_SECONDS, 10);
if (isNaN(parsed) || parsed <= 0) {
logger.warn(
`[S3] Invalid S3_URL_EXPIRY_SECONDS value: "${process.env.S3_URL_EXPIRY_SECONDS}". Using ${DEFAULT_EXPIRY_SECONDS}s expiry.`,
);
return DEFAULT_EXPIRY_SECONDS;
}
return Math.min(parsed, MAX_EXPIRY_SECONDS);
};
const parseRefreshExpiry = (): number | null => {
if (!process.env.S3_REFRESH_EXPIRY_MS) {
return null;
}
const parsed = parseInt(process.env.S3_REFRESH_EXPIRY_MS, 10);
if (isNaN(parsed) || parsed <= 0) {
logger.warn(
`[S3] Invalid S3_REFRESH_EXPIRY_MS value: "${process.env.S3_REFRESH_EXPIRY_MS}". Using default refresh logic.`,
);
return null;
}
logger.info(`[S3] Using custom refresh expiry time: ${parsed}ms`);
return parsed;
};
// Internal module config — not part of the public @librechat/api surface
export const s3Config = {
/** AWS region for S3 */
AWS_REGION: process.env.AWS_REGION ?? '',
/** S3 bucket name */
AWS_BUCKET_NAME: process.env.AWS_BUCKET_NAME ?? '',
/** Custom endpoint URL (for MinIO, R2, etc.) */
AWS_ENDPOINT_URL: process.env.AWS_ENDPOINT_URL,
/** Use path-style URLs instead of virtual-hosted-style */
AWS_FORCE_PATH_STYLE: isEnabled(process.env.AWS_FORCE_PATH_STYLE),
/** Presigned URL expiry in seconds */
S3_URL_EXPIRY_SECONDS: parseUrlExpiry(),
/** Custom refresh expiry in milliseconds (null = use default buffer logic) */
S3_REFRESH_EXPIRY_MS: parseRefreshExpiry(),
/** Default base path for file storage */
DEFAULT_BASE_PATH,
};

View file

@ -0,0 +1,60 @@
import type { ServerRequest } from '~/types';
export interface SaveBufferParams {
userId: string;
buffer: Buffer;
fileName: string;
basePath?: string;
}
export interface GetURLParams {
userId: string;
fileName: string;
basePath?: string;
customFilename?: string | null;
contentType?: string | null;
}
export interface SaveURLParams {
userId: string;
URL: string;
fileName: string;
basePath?: string;
}
export interface UploadFileParams {
req: ServerRequest;
file: Express.Multer.File;
file_id: string;
basePath?: string;
}
export interface UploadImageParams extends UploadFileParams {
endpoint: string;
resolution?: string;
}
export interface UploadResult {
filepath: string;
bytes: number;
}
export interface ImageUploadResult extends UploadResult {
width: number;
height: number;
}
export interface ProcessAvatarParams {
buffer: Buffer;
userId: string;
manual: string;
agentId?: string;
basePath?: string;
}
export interface S3FileRef {
filepath: string;
source: string;
}
export type BatchUpdateFn = (files: Array<{ file_id: string; filepath: string }>) => Promise<void>;