mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-03-12 02:52:36 +01:00
🗂️ refactor: Migrate S3 Storage to TypeScript in packages/api (#11947)
* Migrate S3 storage module with unit and integration tests - Migrate S3 CRUD and image operations to packages/api/src/storage/s3/ - Add S3ImageService class with dependency injection - Add unit tests using aws-sdk-client-mock - Add integration tests with real s3 bucket (condition presence of AWS_TEST_BUCKET_NAME) * AI Review Findings Fixes * chore: tests and refactor S3 storage types - Added mock implementations for the 'sharp' library in various test files to improve image processing testing. - Updated type references in S3 storage files from MongoFile to TFile for consistency and type safety. - Refactored S3 CRUD operations to ensure proper handling of file types and improve code clarity. - Enhanced integration tests to validate S3 file operations and error handling more effectively. * chore: rename test file * Remove duplicate import of refreshS3Url * chore: imports order * fix: remove duplicate imports for S3 URL handling in UserController * fix: remove duplicate import of refreshS3FileUrls in files.js * test: Add mock implementations for 'sharp' and '@librechat/api' in UserController tests - Introduced mock functions for the 'sharp' library to facilitate image processing tests, including metadata retrieval and buffer conversion. - Enhanced mocking for '@librechat/api' to ensure consistent behavior in tests, particularly for the needsRefresh and getNewS3URL functions. --------- Co-authored-by: Danny Avila <danny@librechat.ai>
This commit is contained in:
parent
428ef2eb15
commit
ca6ce8fceb
27 changed files with 2455 additions and 1697 deletions
|
|
@ -25,6 +25,7 @@
|
|||
"test:cache-integration:mcp": "jest --testPathPatterns=\"src/mcp/.*\\.cache_integration\\.spec\\.ts$\" --coverage=false",
|
||||
"test:cache-integration:stream": "jest --testPathPatterns=\"src/stream/.*\\.stream_integration\\.spec\\.ts$\" --coverage=false --runInBand --forceExit",
|
||||
"test:cache-integration": "npm run test:cache-integration:core && npm run test:cache-integration:cluster && npm run test:cache-integration:mcp && npm run test:cache-integration:stream",
|
||||
"test:s3-integration": "jest --testPathPatterns=\"src/storage/s3/.*\\.s3_integration\\.spec\\.ts$\" --coverage=false --runInBand",
|
||||
"verify": "npm run test:ci",
|
||||
"b:clean": "bun run rimraf dist",
|
||||
"b:build": "bun run b:clean && bun run rollup -c --silent --bundleConfigAsCjs",
|
||||
|
|
@ -64,6 +65,7 @@
|
|||
"@types/node-fetch": "^2.6.13",
|
||||
"@types/react": "^18.2.18",
|
||||
"@types/winston": "^2.4.4",
|
||||
"aws-sdk-client-mock": "^4.1.0",
|
||||
"jest": "^30.2.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"librechat-data-provider": "*",
|
||||
|
|
@ -117,6 +119,7 @@
|
|||
"node-fetch": "2.7.0",
|
||||
"pdfjs-dist": "^5.4.624",
|
||||
"rate-limit-redis": "^4.2.0",
|
||||
"sharp": "^0.33.5",
|
||||
"undici": "^7.18.2",
|
||||
"zod": "^3.22.4"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -101,6 +101,14 @@ describe('initializeS3', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('should throw when AWS_BUCKET_NAME is not set', async () => {
|
||||
delete process.env.AWS_BUCKET_NAME;
|
||||
const { initializeS3 } = await load();
|
||||
expect(() => initializeS3()).toThrow(
|
||||
'[S3] AWS_BUCKET_NAME environment variable is required for S3 operations.',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return the same instance on subsequent calls', async () => {
|
||||
const { MockS3Client, initializeS3 } = await load();
|
||||
const first = initializeS3();
|
||||
|
|
|
|||
|
|
@ -25,6 +25,13 @@ export const initializeS3 = (): S3Client | null => {
|
|||
return null;
|
||||
}
|
||||
|
||||
if (!process.env.AWS_BUCKET_NAME) {
|
||||
throw new Error(
|
||||
'[S3] AWS_BUCKET_NAME environment variable is required for S3 operations. ' +
|
||||
'Please set this environment variable to enable S3 storage.',
|
||||
);
|
||||
}
|
||||
|
||||
// Read the custom endpoint if provided.
|
||||
const endpoint = process.env.AWS_ENDPOINT_URL;
|
||||
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;
|
||||
|
|
|
|||
|
|
@ -37,6 +37,8 @@ export * from './prompts';
|
|||
export * from './endpoints';
|
||||
/* Files */
|
||||
export * from './files';
|
||||
/* Storage */
|
||||
export * from './storage';
|
||||
/* Tools */
|
||||
export * from './tools';
|
||||
/* web search */
|
||||
|
|
|
|||
2
packages/api/src/storage/index.ts
Normal file
2
packages/api/src/storage/index.ts
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export * from './s3';
|
||||
export * from './types';
|
||||
770
packages/api/src/storage/s3/__tests__/crud.test.ts
Normal file
770
packages/api/src/storage/s3/__tests__/crud.test.ts
Normal file
|
|
@ -0,0 +1,770 @@
|
|||
import fs from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
import { mockClient } from 'aws-sdk-client-mock';
|
||||
import { sdkStreamMixin } from '@smithy/util-stream';
|
||||
import { FileSources } from 'librechat-data-provider';
|
||||
import {
|
||||
S3Client,
|
||||
PutObjectCommand,
|
||||
GetObjectCommand,
|
||||
HeadObjectCommand,
|
||||
DeleteObjectCommand,
|
||||
} from '@aws-sdk/client-s3';
|
||||
import type { TFile } from 'librechat-data-provider';
|
||||
import type { S3FileRef } from '~/storage/types';
|
||||
import type { ServerRequest } from '~/types';
|
||||
|
||||
const s3Mock = mockClient(S3Client);
|
||||
|
||||
jest.mock('fs', () => ({
|
||||
...jest.requireActual('fs'),
|
||||
promises: {
|
||||
stat: jest.fn(),
|
||||
unlink: jest.fn(),
|
||||
},
|
||||
createReadStream: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@aws-sdk/s3-request-presigner', () => ({
|
||||
getSignedUrl: jest.fn().mockResolvedValue('https://bucket.s3.amazonaws.com/test-key?signed=true'),
|
||||
}));
|
||||
|
||||
jest.mock('~/files', () => ({
|
||||
deleteRagFile: jest.fn().mockResolvedValue(undefined),
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
debug: jest.fn(),
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
||||
import { deleteRagFile } from '~/files';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
|
||||
describe('S3 CRUD', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeAll(() => {
|
||||
originalEnv = { ...process.env };
|
||||
process.env.AWS_REGION = 'us-east-1';
|
||||
process.env.AWS_BUCKET_NAME = 'test-bucket';
|
||||
process.env.S3_URL_EXPIRY_SECONDS = '120';
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
s3Mock.reset();
|
||||
s3Mock.on(PutObjectCommand).resolves({});
|
||||
s3Mock.on(DeleteObjectCommand).resolves({});
|
||||
|
||||
const stream = new Readable();
|
||||
stream.push('test content');
|
||||
stream.push(null);
|
||||
const sdkStream = sdkStreamMixin(stream);
|
||||
s3Mock.on(GetObjectCommand).resolves({ Body: sdkStream });
|
||||
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('getS3Key', () => {
|
||||
it('constructs key from basePath, userId, and fileName', async () => {
|
||||
const { getS3Key } = await import('../crud');
|
||||
const key = getS3Key('images', 'user123', 'file.png');
|
||||
expect(key).toBe('images/user123/file.png');
|
||||
});
|
||||
|
||||
it('handles nested file names', async () => {
|
||||
const { getS3Key } = await import('../crud');
|
||||
const key = getS3Key('files', 'user456', 'folder/subfolder/doc.pdf');
|
||||
expect(key).toBe('files/user456/folder/subfolder/doc.pdf');
|
||||
});
|
||||
|
||||
it('throws if basePath contains a slash', async () => {
|
||||
const { getS3Key } = await import('../crud');
|
||||
expect(() => getS3Key('a/b', 'user123', 'file.png')).toThrow(
|
||||
'[getS3Key] basePath must not contain slashes: "a/b"',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveBufferToS3', () => {
|
||||
it('uploads buffer and returns signed URL', async () => {
|
||||
const { saveBufferToS3 } = await import('../crud');
|
||||
const result = await saveBufferToS3({
|
||||
userId: 'user123',
|
||||
buffer: Buffer.from('test'),
|
||||
fileName: 'test.txt',
|
||||
basePath: 'files',
|
||||
});
|
||||
expect(result).toContain('signed=true');
|
||||
expect(s3Mock.commandCalls(PutObjectCommand)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('calls PutObjectCommand with correct parameters', async () => {
|
||||
const { saveBufferToS3 } = await import('../crud');
|
||||
await saveBufferToS3({
|
||||
userId: 'user123',
|
||||
buffer: Buffer.from('test content'),
|
||||
fileName: 'document.pdf',
|
||||
basePath: 'documents',
|
||||
});
|
||||
|
||||
const calls = s3Mock.commandCalls(PutObjectCommand);
|
||||
expect(calls[0].args[0].input).toEqual({
|
||||
Bucket: 'test-bucket',
|
||||
Key: 'documents/user123/document.pdf',
|
||||
Body: Buffer.from('test content'),
|
||||
});
|
||||
});
|
||||
|
||||
it('uses default basePath if not provided', async () => {
|
||||
const { saveBufferToS3 } = await import('../crud');
|
||||
await saveBufferToS3({
|
||||
userId: 'user123',
|
||||
buffer: Buffer.from('test'),
|
||||
fileName: 'test.txt',
|
||||
});
|
||||
|
||||
const calls = s3Mock.commandCalls(PutObjectCommand);
|
||||
expect(calls[0].args[0].input.Key).toBe('images/user123/test.txt');
|
||||
});
|
||||
|
||||
it('handles S3 upload errors', async () => {
|
||||
s3Mock.on(PutObjectCommand).rejects(new Error('S3 upload failed'));
|
||||
|
||||
const { saveBufferToS3 } = await import('../crud');
|
||||
await expect(
|
||||
saveBufferToS3({
|
||||
userId: 'user123',
|
||||
buffer: Buffer.from('test'),
|
||||
fileName: 'test.txt',
|
||||
}),
|
||||
).rejects.toThrow('S3 upload failed');
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'[saveBufferToS3] Error uploading buffer to S3:',
|
||||
'S3 upload failed',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getS3URL', () => {
|
||||
it('returns signed URL', async () => {
|
||||
const { getS3URL } = await import('../crud');
|
||||
const result = await getS3URL({
|
||||
userId: 'user123',
|
||||
fileName: 'test.txt',
|
||||
basePath: 'files',
|
||||
});
|
||||
expect(result).toContain('signed=true');
|
||||
});
|
||||
|
||||
it('adds custom filename to Content-Disposition header', async () => {
|
||||
const { getS3URL } = await import('../crud');
|
||||
await getS3URL({
|
||||
userId: 'user123',
|
||||
fileName: 'test.pdf',
|
||||
customFilename: 'custom-name.pdf',
|
||||
});
|
||||
|
||||
expect(getSignedUrl).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
input: expect.objectContaining({
|
||||
ResponseContentDisposition: 'attachment; filename="custom-name.pdf"',
|
||||
}),
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('adds custom content type', async () => {
|
||||
const { getS3URL } = await import('../crud');
|
||||
await getS3URL({
|
||||
userId: 'user123',
|
||||
fileName: 'test.pdf',
|
||||
contentType: 'application/pdf',
|
||||
});
|
||||
|
||||
expect(getSignedUrl).toHaveBeenCalledWith(
|
||||
expect.anything(),
|
||||
expect.objectContaining({
|
||||
input: expect.objectContaining({
|
||||
ResponseContentType: 'application/pdf',
|
||||
}),
|
||||
}),
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
|
||||
it('handles errors when getting signed URL', async () => {
|
||||
(getSignedUrl as jest.Mock).mockRejectedValueOnce(new Error('Failed to sign URL'));
|
||||
|
||||
const { getS3URL } = await import('../crud');
|
||||
await expect(
|
||||
getS3URL({
|
||||
userId: 'user123',
|
||||
fileName: 'file.pdf',
|
||||
}),
|
||||
).rejects.toThrow('Failed to sign URL');
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'[getS3URL] Error getting signed URL from S3:',
|
||||
'Failed to sign URL',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveURLToS3', () => {
|
||||
beforeEach(() => {
|
||||
global.fetch = jest.fn().mockResolvedValue({
|
||||
ok: true,
|
||||
arrayBuffer: jest.fn().mockResolvedValue(new ArrayBuffer(8)),
|
||||
}) as unknown as typeof fetch;
|
||||
});
|
||||
|
||||
it('fetches file from URL and saves to S3', async () => {
|
||||
const { saveURLToS3 } = await import('../crud');
|
||||
const result = await saveURLToS3({
|
||||
userId: 'user123',
|
||||
URL: 'https://example.com/image.jpg',
|
||||
fileName: 'downloaded.jpg',
|
||||
});
|
||||
|
||||
expect(global.fetch).toHaveBeenCalledWith('https://example.com/image.jpg');
|
||||
expect(s3Mock.commandCalls(PutObjectCommand)).toHaveLength(1);
|
||||
expect(result).toContain('signed=true');
|
||||
});
|
||||
|
||||
it('throws error on non-ok response', async () => {
|
||||
(global.fetch as unknown as jest.Mock).mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 404,
|
||||
statusText: 'Not Found',
|
||||
arrayBuffer: jest.fn().mockResolvedValue(new ArrayBuffer(0)),
|
||||
});
|
||||
|
||||
const { saveURLToS3 } = await import('../crud');
|
||||
await expect(
|
||||
saveURLToS3({
|
||||
userId: 'user123',
|
||||
URL: 'https://example.com/missing.jpg',
|
||||
fileName: 'missing.jpg',
|
||||
}),
|
||||
).rejects.toThrow('Failed to fetch URL');
|
||||
});
|
||||
|
||||
it('handles fetch errors', async () => {
|
||||
(global.fetch as unknown as jest.Mock).mockRejectedValueOnce(new Error('Network error'));
|
||||
|
||||
const { saveURLToS3 } = await import('../crud');
|
||||
await expect(
|
||||
saveURLToS3({
|
||||
userId: 'user123',
|
||||
URL: 'https://example.com/image.jpg',
|
||||
fileName: 'downloaded.jpg',
|
||||
}),
|
||||
).rejects.toThrow('Network error');
|
||||
|
||||
expect(logger.error).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteFileFromS3', () => {
|
||||
const mockReq = { user: { id: 'user123' } } as ServerRequest;
|
||||
|
||||
it('deletes a file from S3', async () => {
|
||||
const mockFile = {
|
||||
filepath: 'https://bucket.s3.amazonaws.com/images/user123/file.jpg',
|
||||
file_id: 'file123',
|
||||
} as TFile;
|
||||
|
||||
s3Mock.on(HeadObjectCommand).resolvesOnce({});
|
||||
|
||||
const { deleteFileFromS3 } = await import('../crud');
|
||||
await deleteFileFromS3(mockReq, mockFile);
|
||||
|
||||
expect(deleteRagFile).toHaveBeenCalledWith({ userId: 'user123', file: mockFile });
|
||||
expect(s3Mock.commandCalls(HeadObjectCommand)).toHaveLength(1);
|
||||
expect(s3Mock.commandCalls(DeleteObjectCommand)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('handles file not found gracefully and cleans up RAG', async () => {
|
||||
const mockFile = {
|
||||
filepath: 'https://bucket.s3.amazonaws.com/images/user123/nonexistent.jpg',
|
||||
file_id: 'file123',
|
||||
} as TFile;
|
||||
|
||||
s3Mock.on(HeadObjectCommand).rejects({ name: 'NotFound' });
|
||||
|
||||
const { deleteFileFromS3 } = await import('../crud');
|
||||
await deleteFileFromS3(mockReq, mockFile);
|
||||
|
||||
expect(logger.warn).toHaveBeenCalled();
|
||||
expect(deleteRagFile).toHaveBeenCalledWith({ userId: 'user123', file: mockFile });
|
||||
expect(s3Mock.commandCalls(DeleteObjectCommand)).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('throws error if user ID does not match', async () => {
|
||||
const mockFile = {
|
||||
filepath: 'https://bucket.s3.amazonaws.com/images/different-user/file.jpg',
|
||||
file_id: 'file123',
|
||||
} as TFile;
|
||||
|
||||
const { deleteFileFromS3 } = await import('../crud');
|
||||
await expect(deleteFileFromS3(mockReq, mockFile)).rejects.toThrow('User ID mismatch');
|
||||
expect(logger.error).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles NoSuchKey error without calling deleteRagFile', async () => {
|
||||
const mockFile = {
|
||||
filepath: 'https://bucket.s3.amazonaws.com/images/user123/file.jpg',
|
||||
file_id: 'file123',
|
||||
} as TFile;
|
||||
|
||||
s3Mock.on(HeadObjectCommand).resolvesOnce({});
|
||||
const noSuchKeyError = Object.assign(new Error('NoSuchKey'), { name: 'NoSuchKey' });
|
||||
s3Mock.on(DeleteObjectCommand).rejects(noSuchKeyError);
|
||||
|
||||
const { deleteFileFromS3 } = await import('../crud');
|
||||
await expect(deleteFileFromS3(mockReq, mockFile)).resolves.toBeUndefined();
|
||||
expect(deleteRagFile).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadFileToS3', () => {
|
||||
const mockReq = { user: { id: 'user123' } } as ServerRequest;
|
||||
|
||||
it('uploads a file from disk to S3', async () => {
|
||||
const mockFile = {
|
||||
path: '/tmp/upload.jpg',
|
||||
originalname: 'photo.jpg',
|
||||
} as Express.Multer.File;
|
||||
|
||||
(fs.promises.stat as jest.Mock).mockResolvedValue({ size: 1024 });
|
||||
(fs.createReadStream as jest.Mock).mockReturnValue(new Readable());
|
||||
|
||||
const { uploadFileToS3 } = await import('../crud');
|
||||
const result = await uploadFileToS3({
|
||||
req: mockReq,
|
||||
file: mockFile,
|
||||
file_id: 'file123',
|
||||
basePath: 'images',
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
filepath: expect.stringContaining('signed=true'),
|
||||
bytes: 1024,
|
||||
});
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload.jpg');
|
||||
expect(s3Mock.commandCalls(PutObjectCommand)).toHaveLength(1);
|
||||
expect(fs.promises.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles upload errors and cleans up temp file', async () => {
|
||||
const mockFile = {
|
||||
path: '/tmp/upload.jpg',
|
||||
originalname: 'photo.jpg',
|
||||
} as Express.Multer.File;
|
||||
|
||||
(fs.promises.stat as jest.Mock).mockResolvedValue({ size: 1024 });
|
||||
(fs.promises.unlink as jest.Mock).mockResolvedValue(undefined);
|
||||
(fs.createReadStream as jest.Mock).mockReturnValue(new Readable());
|
||||
s3Mock.on(PutObjectCommand).rejects(new Error('Upload failed'));
|
||||
|
||||
const { uploadFileToS3 } = await import('../crud');
|
||||
await expect(
|
||||
uploadFileToS3({
|
||||
req: mockReq,
|
||||
file: mockFile,
|
||||
file_id: 'file123',
|
||||
}),
|
||||
).rejects.toThrow('Upload failed');
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'[uploadFileToS3] Error streaming file to S3:',
|
||||
expect.any(Error),
|
||||
);
|
||||
expect(fs.promises.unlink).toHaveBeenCalledWith('/tmp/upload.jpg');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getS3FileStream', () => {
|
||||
it('returns a readable stream for a file', async () => {
|
||||
const { getS3FileStream } = await import('../crud');
|
||||
const result = await getS3FileStream(
|
||||
{} as ServerRequest,
|
||||
'https://bucket.s3.amazonaws.com/images/user123/file.pdf',
|
||||
);
|
||||
|
||||
expect(result).toBeInstanceOf(Readable);
|
||||
expect(s3Mock.commandCalls(GetObjectCommand)).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('handles errors when retrieving stream', async () => {
|
||||
s3Mock.on(GetObjectCommand).rejects(new Error('Stream error'));
|
||||
|
||||
const { getS3FileStream } = await import('../crud');
|
||||
await expect(getS3FileStream({} as ServerRequest, 'images/user123/file.pdf')).rejects.toThrow(
|
||||
'Stream error',
|
||||
);
|
||||
expect(logger.error).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('needsRefresh', () => {
|
||||
it('returns false for non-signed URLs', async () => {
|
||||
const { needsRefresh } = await import('../crud');
|
||||
const result = needsRefresh('https://example.com/file.png', 3600);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('returns true when URL is expired', async () => {
|
||||
const { needsRefresh } = await import('../crud');
|
||||
const pastDate = new Date(Date.now() - 2 * 60 * 60 * 1000);
|
||||
const dateStr = pastDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
|
||||
const url = `https://bucket.s3.amazonaws.com/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=3600`;
|
||||
const result = needsRefresh(url, 3600);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false when URL is not close to expiration', async () => {
|
||||
const { needsRefresh } = await import('../crud');
|
||||
const futureDate = new Date(Date.now() + 10 * 60 * 1000);
|
||||
const dateStr = futureDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
|
||||
const url = `https://bucket.s3.amazonaws.com/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=7200`;
|
||||
const result = needsRefresh(url, 60);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('returns true when missing expiration parameters', async () => {
|
||||
const { needsRefresh } = await import('../crud');
|
||||
const url = 'https://bucket.s3.amazonaws.com/key?X-Amz-Signature=abc';
|
||||
const result = needsRefresh(url, 3600);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('returns true for malformed URLs', async () => {
|
||||
const { needsRefresh } = await import('../crud');
|
||||
const result = needsRefresh('not-a-valid-url', 3600);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getNewS3URL', () => {
|
||||
it('generates a new URL from an existing S3 URL', async () => {
|
||||
const { getNewS3URL } = await import('../crud');
|
||||
const result = await getNewS3URL(
|
||||
'https://bucket.s3.amazonaws.com/images/user123/file.jpg?signature=old',
|
||||
);
|
||||
|
||||
expect(result).toContain('signed=true');
|
||||
});
|
||||
|
||||
it('returns undefined for invalid URLs', async () => {
|
||||
const { getNewS3URL } = await import('../crud');
|
||||
const result = await getNewS3URL('simple-file.txt');
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns undefined when key has insufficient parts', async () => {
|
||||
const { getNewS3URL } = await import('../crud');
|
||||
// Key with only 2 parts (basePath/userId but no fileName)
|
||||
const result = await getNewS3URL('https://bucket.s3.amazonaws.com/images/user123');
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('refreshS3FileUrls', () => {
|
||||
it('refreshes expired URLs for multiple files', async () => {
|
||||
const { refreshS3FileUrls } = await import('../crud');
|
||||
|
||||
const pastDate = new Date(Date.now() - 2 * 60 * 60 * 1000);
|
||||
const dateStr = pastDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
|
||||
|
||||
const files = [
|
||||
{
|
||||
file_id: 'file1',
|
||||
source: FileSources.s3,
|
||||
filepath: `https://bucket.s3.amazonaws.com/images/user123/file1.jpg?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
|
||||
},
|
||||
{
|
||||
file_id: 'file2',
|
||||
source: FileSources.s3,
|
||||
filepath: `https://bucket.s3.amazonaws.com/images/user456/file2.jpg?X-Amz-Signature=def&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
|
||||
},
|
||||
];
|
||||
|
||||
const mockBatchUpdate = jest.fn().mockResolvedValue(undefined);
|
||||
|
||||
const result = await refreshS3FileUrls(files as TFile[], mockBatchUpdate, 60);
|
||||
|
||||
expect(result[0].filepath).toContain('signed=true');
|
||||
expect(result[1].filepath).toContain('signed=true');
|
||||
expect(mockBatchUpdate).toHaveBeenCalledWith([
|
||||
{ file_id: 'file1', filepath: expect.stringContaining('signed=true') },
|
||||
{ file_id: 'file2', filepath: expect.stringContaining('signed=true') },
|
||||
]);
|
||||
});
|
||||
|
||||
it('skips non-S3 files', async () => {
|
||||
const { refreshS3FileUrls } = await import('../crud');
|
||||
|
||||
const files = [
|
||||
{
|
||||
file_id: 'file1',
|
||||
source: 'local',
|
||||
filepath: '/local/path/file.jpg',
|
||||
},
|
||||
];
|
||||
|
||||
const mockBatchUpdate = jest.fn();
|
||||
|
||||
const result = await refreshS3FileUrls(files as TFile[], mockBatchUpdate);
|
||||
|
||||
expect(result).toEqual(files);
|
||||
expect(mockBatchUpdate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles empty or invalid input', async () => {
|
||||
const { refreshS3FileUrls } = await import('../crud');
|
||||
const mockBatchUpdate = jest.fn();
|
||||
|
||||
const result1 = await refreshS3FileUrls(null, mockBatchUpdate);
|
||||
expect(result1).toEqual([]);
|
||||
|
||||
const result2 = await refreshS3FileUrls(undefined, mockBatchUpdate);
|
||||
expect(result2).toEqual([]);
|
||||
|
||||
const result3 = await refreshS3FileUrls([], mockBatchUpdate);
|
||||
expect(result3).toEqual([]);
|
||||
|
||||
expect(mockBatchUpdate).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('refreshS3Url', () => {
|
||||
it('refreshes an expired S3 URL', async () => {
|
||||
const { refreshS3Url } = await import('../crud');
|
||||
|
||||
const pastDate = new Date(Date.now() - 2 * 60 * 60 * 1000);
|
||||
const dateStr = pastDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
|
||||
|
||||
const fileObj: S3FileRef = {
|
||||
source: FileSources.s3,
|
||||
filepath: `https://bucket.s3.amazonaws.com/images/user123/file.jpg?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
|
||||
};
|
||||
|
||||
const result = await refreshS3Url(fileObj, 60);
|
||||
|
||||
expect(result).toContain('signed=true');
|
||||
});
|
||||
|
||||
it('returns original URL if not expired', async () => {
|
||||
const { refreshS3Url } = await import('../crud');
|
||||
|
||||
const fileObj: S3FileRef = {
|
||||
source: FileSources.s3,
|
||||
filepath: 'https://example.com/proxy/file.jpg',
|
||||
};
|
||||
|
||||
const result = await refreshS3Url(fileObj, 3600);
|
||||
|
||||
expect(result).toBe(fileObj.filepath);
|
||||
});
|
||||
|
||||
it('returns empty string for null input', async () => {
|
||||
const { refreshS3Url } = await import('../crud');
|
||||
const result = await refreshS3Url(null as unknown as S3FileRef);
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
it('returns original URL for non-S3 files', async () => {
|
||||
const { refreshS3Url } = await import('../crud');
|
||||
|
||||
const fileObj: S3FileRef = {
|
||||
source: 'local',
|
||||
filepath: '/local/path/file.jpg',
|
||||
};
|
||||
|
||||
const result = await refreshS3Url(fileObj);
|
||||
|
||||
expect(result).toBe(fileObj.filepath);
|
||||
});
|
||||
|
||||
it('handles errors and returns original URL', async () => {
|
||||
(getSignedUrl as jest.Mock).mockRejectedValueOnce(new Error('Refresh failed'));
|
||||
|
||||
const { refreshS3Url } = await import('../crud');
|
||||
|
||||
const pastDate = new Date(Date.now() - 2 * 60 * 60 * 1000);
|
||||
const dateStr = pastDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
|
||||
|
||||
const fileObj: S3FileRef = {
|
||||
source: FileSources.s3,
|
||||
filepath: `https://bucket.s3.amazonaws.com/images/user123/file.jpg?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=60`,
|
||||
};
|
||||
|
||||
const result = await refreshS3Url(fileObj, 60);
|
||||
|
||||
expect(result).toBe(fileObj.filepath);
|
||||
expect(logger.error).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractKeyFromS3Url', () => {
|
||||
it('extracts key from virtual-hosted-style URL', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url('https://bucket.s3.amazonaws.com/images/user123/file.png');
|
||||
expect(key).toBe('images/user123/file.png');
|
||||
});
|
||||
|
||||
it('returns key as-is when not a URL', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url('images/user123/file.png');
|
||||
expect(key).toBe('images/user123/file.png');
|
||||
});
|
||||
|
||||
it('throws on empty input', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
expect(() => extractKeyFromS3Url('')).toThrow('Invalid input: URL or key is empty');
|
||||
});
|
||||
|
||||
it('handles URL with query parameters', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url(
|
||||
'https://bucket.s3.amazonaws.com/images/user123/file.png?X-Amz-Signature=abc',
|
||||
);
|
||||
expect(key).toBe('images/user123/file.png');
|
||||
});
|
||||
|
||||
it('extracts key from path-style regional endpoint', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url(
|
||||
'https://s3.us-west-2.amazonaws.com/test-bucket/dogs/puppy.jpg',
|
||||
);
|
||||
expect(key).toBe('dogs/puppy.jpg');
|
||||
});
|
||||
|
||||
it('extracts key from virtual-hosted regional endpoint', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url(
|
||||
'https://test-bucket.s3.us-west-2.amazonaws.com/dogs/puppy.png',
|
||||
);
|
||||
expect(key).toBe('dogs/puppy.png');
|
||||
});
|
||||
|
||||
it('extracts key from legacy s3-region format', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url(
|
||||
'https://test-bucket.s3-us-west-2.amazonaws.com/cats/kitten.png',
|
||||
);
|
||||
expect(key).toBe('cats/kitten.png');
|
||||
});
|
||||
|
||||
it('extracts key from legacy global endpoint', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url('https://test-bucket.s3.amazonaws.com/dogs/puppy.png');
|
||||
expect(key).toBe('dogs/puppy.png');
|
||||
});
|
||||
|
||||
it('handles key with leading slash by removing it', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url('/images/user123/file.jpg');
|
||||
expect(key).toBe('images/user123/file.jpg');
|
||||
});
|
||||
|
||||
it('handles simple key without slashes', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url('simple-file.txt');
|
||||
expect(key).toBe('simple-file.txt');
|
||||
});
|
||||
|
||||
it('handles key with only two parts', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url('folder/file.txt');
|
||||
expect(key).toBe('folder/file.txt');
|
||||
});
|
||||
|
||||
it('handles URLs with encoded characters', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url(
|
||||
'https://bucket.s3.amazonaws.com/test-bucket/images/user123/my%20file%20name.jpg',
|
||||
);
|
||||
expect(key).toBe('images/user123/my%20file%20name.jpg');
|
||||
});
|
||||
|
||||
it('handles deep nested paths', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url(
|
||||
'https://bucket.s3.amazonaws.com/test-bucket/a/b/c/d/e/f/file.jpg',
|
||||
);
|
||||
expect(key).toBe('a/b/c/d/e/f/file.jpg');
|
||||
});
|
||||
|
||||
it('returns empty string for URL with only bucket (no key)', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url('https://s3.us-west-2.amazonaws.com/my-bucket');
|
||||
expect(key).toBe('');
|
||||
expect(logger.warn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('handles malformed URL and returns input', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const malformedUrl = 'https://invalid url with spaces.com/key';
|
||||
const result = extractKeyFromS3Url(malformedUrl);
|
||||
|
||||
expect(logger.error).toHaveBeenCalled();
|
||||
expect(result).toBe(malformedUrl);
|
||||
});
|
||||
|
||||
it('strips bucket from custom endpoint URLs (MinIO, R2)', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = extractKeyFromS3Url(
|
||||
'https://minio.example.com/test-bucket/images/user123/file.jpg',
|
||||
);
|
||||
expect(key).toBe('images/user123/file.jpg');
|
||||
});
|
||||
});
|
||||
|
||||
describe('needsRefresh with S3_REFRESH_EXPIRY_MS set', () => {
|
||||
beforeEach(() => {
|
||||
process.env.S3_REFRESH_EXPIRY_MS = '60000'; // 1 minute
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env.S3_REFRESH_EXPIRY_MS;
|
||||
});
|
||||
|
||||
it('returns true when URL age exceeds S3_REFRESH_EXPIRY_MS', async () => {
|
||||
const { needsRefresh } = await import('../crud');
|
||||
// URL created 2 minutes ago
|
||||
const oldDate = new Date(Date.now() - 2 * 60 * 1000);
|
||||
const dateStr = oldDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
|
||||
const url = `https://bucket.s3.amazonaws.com/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=3600`;
|
||||
|
||||
const result = needsRefresh(url, 60);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false when URL age is under S3_REFRESH_EXPIRY_MS', async () => {
|
||||
const { needsRefresh } = await import('../crud');
|
||||
// URL created 30 seconds ago
|
||||
const recentDate = new Date(Date.now() - 30 * 1000);
|
||||
const dateStr = recentDate.toISOString().replace(/[-:]/g, '').split('.')[0] + 'Z';
|
||||
const url = `https://bucket.s3.amazonaws.com/key?X-Amz-Signature=abc&X-Amz-Date=${dateStr}&X-Amz-Expires=3600`;
|
||||
|
||||
const result = needsRefresh(url, 60);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
182
packages/api/src/storage/s3/__tests__/images.test.ts
Normal file
182
packages/api/src/storage/s3/__tests__/images.test.ts
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
import fs from 'fs';
|
||||
import type { S3ImageServiceDeps } from '~/storage/s3/images';
|
||||
import type { ServerRequest } from '~/types';
|
||||
import { S3ImageService } from '~/storage/s3/images';
|
||||
import { saveBufferToS3 } from '~/storage/s3/crud';
|
||||
|
||||
jest.mock('fs', () => ({
|
||||
...jest.requireActual('fs'),
|
||||
promises: {
|
||||
readFile: jest.fn(),
|
||||
unlink: jest.fn().mockResolvedValue(undefined),
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('../crud', () => ({
|
||||
saveBufferToS3: jest
|
||||
.fn()
|
||||
.mockResolvedValue('https://bucket.s3.amazonaws.com/avatar.png?signed=true'),
|
||||
}));
|
||||
|
||||
const mockSaveBufferToS3 = jest.mocked(saveBufferToS3);
|
||||
|
||||
jest.mock('sharp', () => {
|
||||
return jest.fn(() => ({
|
||||
metadata: jest.fn().mockResolvedValue({ format: 'png', width: 100, height: 100 }),
|
||||
toFormat: jest.fn().mockReturnThis(),
|
||||
toBuffer: jest.fn().mockResolvedValue(Buffer.from('processed')),
|
||||
}));
|
||||
});
|
||||
|
||||
describe('S3ImageService', () => {
|
||||
let service: S3ImageService;
|
||||
let mockDeps: S3ImageServiceDeps;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
mockDeps = {
|
||||
resizeImageBuffer: jest.fn().mockResolvedValue({
|
||||
buffer: Buffer.from('resized'),
|
||||
width: 100,
|
||||
height: 100,
|
||||
}),
|
||||
updateUser: jest.fn().mockResolvedValue(undefined),
|
||||
updateFile: jest.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
|
||||
service = new S3ImageService(mockDeps);
|
||||
});
|
||||
|
||||
describe('processAvatar', () => {
|
||||
it('uploads avatar and returns URL', async () => {
|
||||
const result = await service.processAvatar({
|
||||
buffer: Buffer.from('test'),
|
||||
userId: 'user123',
|
||||
manual: 'false',
|
||||
});
|
||||
|
||||
expect(result).toContain('signed=true');
|
||||
});
|
||||
|
||||
it('updates user avatar when manual is true', async () => {
|
||||
await service.processAvatar({
|
||||
buffer: Buffer.from('test'),
|
||||
userId: 'user123',
|
||||
manual: 'true',
|
||||
});
|
||||
|
||||
expect(mockDeps.updateUser).toHaveBeenCalledWith(
|
||||
'user123',
|
||||
expect.objectContaining({ avatar: expect.any(String) }),
|
||||
);
|
||||
});
|
||||
|
||||
it('does not update user when agentId is provided', async () => {
|
||||
await service.processAvatar({
|
||||
buffer: Buffer.from('test'),
|
||||
userId: 'user123',
|
||||
manual: 'true',
|
||||
agentId: 'agent456',
|
||||
});
|
||||
|
||||
expect(mockDeps.updateUser).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('generates agent avatar filename when agentId provided', async () => {
|
||||
await service.processAvatar({
|
||||
buffer: Buffer.from('test'),
|
||||
userId: 'user123',
|
||||
manual: 'false',
|
||||
agentId: 'agent456',
|
||||
});
|
||||
|
||||
expect(mockSaveBufferToS3).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
fileName: expect.stringContaining('agent-agent456-avatar-'),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('prepareImageURL', () => {
|
||||
it('returns tuple with resolved promise and filepath', async () => {
|
||||
const file = { file_id: 'file123', filepath: 'https://example.com/file.png' };
|
||||
const result = await service.prepareImageURL(file);
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result[1]).toBe('https://example.com/file.png');
|
||||
});
|
||||
|
||||
it('calls updateFile with file_id', async () => {
|
||||
const file = { file_id: 'file123', filepath: 'https://example.com/file.png' };
|
||||
await service.prepareImageURL(file);
|
||||
|
||||
expect(mockDeps.updateFile).toHaveBeenCalledWith({ file_id: 'file123' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('requires dependencies to be passed', () => {
|
||||
const newService = new S3ImageService(mockDeps);
|
||||
expect(newService).toBeInstanceOf(S3ImageService);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadImageToS3', () => {
|
||||
const mockReq = {
|
||||
user: { id: 'user123' },
|
||||
config: { imageOutputType: 'webp' },
|
||||
} as unknown as ServerRequest;
|
||||
|
||||
it('deletes temp file on early failure (readFile throws)', async () => {
|
||||
(fs.promises.readFile as jest.Mock).mockRejectedValueOnce(
|
||||
new Error('ENOENT: no such file or directory'),
|
||||
);
|
||||
(fs.promises.unlink as jest.Mock).mockResolvedValueOnce(undefined);
|
||||
|
||||
await expect(
|
||||
service.uploadImageToS3({
|
||||
req: mockReq,
|
||||
file: { path: '/tmp/input.jpg' } as Express.Multer.File,
|
||||
file_id: 'file123',
|
||||
endpoint: 'openai',
|
||||
}),
|
||||
).rejects.toThrow('ENOENT: no such file or directory');
|
||||
|
||||
expect(fs.promises.unlink).toHaveBeenCalledWith('/tmp/input.jpg');
|
||||
});
|
||||
|
||||
it('deletes temp file on resize failure (resizeImageBuffer throws)', async () => {
|
||||
(fs.promises.readFile as jest.Mock).mockResolvedValueOnce(Buffer.from('raw'));
|
||||
(mockDeps.resizeImageBuffer as jest.Mock).mockRejectedValueOnce(new Error('Resize failed'));
|
||||
(fs.promises.unlink as jest.Mock).mockResolvedValueOnce(undefined);
|
||||
|
||||
await expect(
|
||||
service.uploadImageToS3({
|
||||
req: mockReq,
|
||||
file: { path: '/tmp/input.jpg' } as Express.Multer.File,
|
||||
file_id: 'file123',
|
||||
endpoint: 'openai',
|
||||
}),
|
||||
).rejects.toThrow('Resize failed');
|
||||
|
||||
expect(fs.promises.unlink).toHaveBeenCalledWith('/tmp/input.jpg');
|
||||
});
|
||||
|
||||
it('deletes temp file on success', async () => {
|
||||
(fs.promises.readFile as jest.Mock).mockResolvedValueOnce(Buffer.from('raw'));
|
||||
(fs.promises.unlink as jest.Mock).mockResolvedValueOnce(undefined);
|
||||
|
||||
const result = await service.uploadImageToS3({
|
||||
req: mockReq,
|
||||
file: { path: '/tmp/input.webp' } as Express.Multer.File,
|
||||
file_id: 'file123',
|
||||
endpoint: 'openai',
|
||||
});
|
||||
|
||||
expect(result.filepath).toContain('signed=true');
|
||||
expect(fs.promises.unlink).toHaveBeenCalledWith('/tmp/input.webp');
|
||||
});
|
||||
});
|
||||
});
|
||||
529
packages/api/src/storage/s3/__tests__/s3.integration.spec.ts
Normal file
529
packages/api/src/storage/s3/__tests__/s3.integration.spec.ts
Normal file
|
|
@ -0,0 +1,529 @@
|
|||
/**
|
||||
* S3 Integration Tests
|
||||
*
|
||||
* These tests run against a REAL S3 bucket. They are skipped when AWS_TEST_BUCKET_NAME is not set.
|
||||
*
|
||||
* Run with:
|
||||
* AWS_TEST_BUCKET_NAME=my-test-bucket npx jest s3.s3_integration
|
||||
*
|
||||
* Required env vars:
|
||||
* - AWS_TEST_BUCKET_NAME: Dedicated test bucket (gates test execution)
|
||||
* - AWS_REGION: Defaults to 'us-east-1'
|
||||
* - AWS_ACCESS_KEY_ID / AWS_SECRET_ACCESS_KEY => to avoid error: A dynamic import callback was invoked without -experimental-vm-modules — the AWS SDK credential provider
|
||||
*/
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { Readable } from 'stream';
|
||||
import { ListObjectsV2Command, DeleteObjectsCommand } from '@aws-sdk/client-s3';
|
||||
import type { S3Client } from '@aws-sdk/client-s3';
|
||||
import type { ServerRequest } from '~/types';
|
||||
|
||||
const MINIMAL_PNG = Buffer.from([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d, 0x49, 0x48, 0x44, 0x52,
|
||||
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x02, 0x00, 0x00, 0x00, 0x90, 0x77, 0x53,
|
||||
0xde, 0x00, 0x00, 0x00, 0x0c, 0x49, 0x44, 0x41, 0x54, 0x08, 0xd7, 0x63, 0xf8, 0xff, 0xff, 0x3f,
|
||||
0x00, 0x05, 0xfe, 0x02, 0xfe, 0xdc, 0xcc, 0x59, 0xe7, 0x00, 0x00, 0x00, 0x00, 0x49, 0x45, 0x4e,
|
||||
0x44, 0xae, 0x42, 0x60, 0x82,
|
||||
]);
|
||||
|
||||
const TEST_BUCKET = process.env.AWS_TEST_BUCKET_NAME;
|
||||
const TEST_USER_ID = 'test-user-123';
|
||||
const TEST_RUN_ID = `integration-test-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
||||
const TEST_BASE_PATH = TEST_RUN_ID;
|
||||
|
||||
async function deleteAllWithPrefix(s3: S3Client, bucket: string, prefix: string): Promise<void> {
|
||||
let continuationToken: string | undefined;
|
||||
|
||||
do {
|
||||
const listCommand = new ListObjectsV2Command({
|
||||
Bucket: bucket,
|
||||
Prefix: prefix,
|
||||
ContinuationToken: continuationToken,
|
||||
});
|
||||
const response = await s3.send(listCommand);
|
||||
|
||||
if (response.Contents?.length) {
|
||||
const deleteCommand = new DeleteObjectsCommand({
|
||||
Bucket: bucket,
|
||||
Delete: {
|
||||
Objects: response.Contents.filter(
|
||||
(obj): obj is typeof obj & { Key: string } => obj.Key !== undefined,
|
||||
).map((obj) => ({ Key: obj.Key })),
|
||||
},
|
||||
});
|
||||
await s3.send(deleteCommand);
|
||||
}
|
||||
|
||||
continuationToken = response.IsTruncated ? response.NextContinuationToken : undefined;
|
||||
} while (continuationToken);
|
||||
}
|
||||
|
||||
describe('S3 Integration Tests', () => {
|
||||
if (!TEST_BUCKET) {
|
||||
// eslint-disable-next-line jest/expect-expect
|
||||
it.skip('Skipped: AWS_TEST_BUCKET_NAME not configured', () => {});
|
||||
return;
|
||||
}
|
||||
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
let tempDir: string;
|
||||
let s3Client: S3Client | null = null;
|
||||
|
||||
beforeAll(async () => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Use dedicated test bucket
|
||||
process.env.AWS_BUCKET_NAME = TEST_BUCKET;
|
||||
process.env.AWS_REGION = process.env.AWS_REGION || 'us-east-1';
|
||||
|
||||
// Reset modules so the next import picks up the updated env vars.
|
||||
// s3Client is retained as a plain instance — it remains valid even though
|
||||
// beforeEach/afterEach call resetModules() for per-test isolation.
|
||||
jest.resetModules();
|
||||
const { initializeS3 } = await import('~/cdn/s3');
|
||||
s3Client = initializeS3();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 's3-integration-'));
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (tempDir && fs.existsSync(tempDir)) {
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
// Clean up all test files from this run
|
||||
if (s3Client && TEST_BUCKET) {
|
||||
await deleteAllWithPrefix(s3Client, TEST_BUCKET, TEST_RUN_ID);
|
||||
}
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
describe('getS3Key', () => {
|
||||
it('constructs key from basePath, userId, and fileName', async () => {
|
||||
const { getS3Key } = await import('../crud');
|
||||
const key = getS3Key(TEST_BASE_PATH, TEST_USER_ID, 'test-file.txt');
|
||||
expect(key).toBe(`${TEST_BASE_PATH}/${TEST_USER_ID}/test-file.txt`);
|
||||
});
|
||||
|
||||
it('handles nested file names', async () => {
|
||||
const { getS3Key } = await import('../crud');
|
||||
const key = getS3Key(TEST_BASE_PATH, TEST_USER_ID, 'folder/nested/file.pdf');
|
||||
expect(key).toBe(`${TEST_BASE_PATH}/${TEST_USER_ID}/folder/nested/file.pdf`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveBufferToS3 and getS3URL', () => {
|
||||
it('uploads buffer and returns signed URL', async () => {
|
||||
const { saveBufferToS3 } = await import('../crud');
|
||||
const testContent = 'Hello, S3!';
|
||||
const buffer = Buffer.from(testContent);
|
||||
const fileName = `test-${Date.now()}.txt`;
|
||||
|
||||
const downloadURL = await saveBufferToS3({
|
||||
userId: TEST_USER_ID,
|
||||
buffer,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
expect(downloadURL).toBeDefined();
|
||||
expect(downloadURL).toContain('X-Amz-Signature');
|
||||
expect(downloadURL).toContain(fileName);
|
||||
});
|
||||
|
||||
it('can get signed URL for existing file', async () => {
|
||||
const { saveBufferToS3, getS3URL } = await import('../crud');
|
||||
const buffer = Buffer.from('test content for URL');
|
||||
const fileName = `url-test-${Date.now()}.txt`;
|
||||
|
||||
await saveBufferToS3({
|
||||
userId: TEST_USER_ID,
|
||||
buffer,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
const signedUrl = await getS3URL({
|
||||
userId: TEST_USER_ID,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
expect(signedUrl).toBeDefined();
|
||||
expect(signedUrl).toContain('X-Amz-Signature');
|
||||
});
|
||||
|
||||
it('can get signed URL with custom filename and content type', async () => {
|
||||
const { saveBufferToS3, getS3URL } = await import('../crud');
|
||||
const buffer = Buffer.from('custom headers test');
|
||||
const fileName = `headers-test-${Date.now()}.txt`;
|
||||
|
||||
await saveBufferToS3({
|
||||
userId: TEST_USER_ID,
|
||||
buffer,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
const signedUrl = await getS3URL({
|
||||
userId: TEST_USER_ID,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
customFilename: 'download.txt',
|
||||
contentType: 'text/plain',
|
||||
});
|
||||
|
||||
expect(signedUrl).toContain('response-content-disposition');
|
||||
expect(signedUrl).toContain('response-content-type');
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveURLToS3', () => {
|
||||
it('fetches URL content and uploads to S3', async () => {
|
||||
const { saveURLToS3 } = await import('../crud');
|
||||
const fileName = `url-upload-${Date.now()}.json`;
|
||||
|
||||
const downloadURL = await saveURLToS3({
|
||||
userId: TEST_USER_ID,
|
||||
URL: 'https://raw.githubusercontent.com/danny-avila/LibreChat/main/package.json',
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
expect(downloadURL).toBeDefined();
|
||||
expect(downloadURL).toContain('X-Amz-Signature');
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractKeyFromS3Url', () => {
|
||||
it('extracts key from signed URL', async () => {
|
||||
const { saveBufferToS3, extractKeyFromS3Url } = await import('../crud');
|
||||
const buffer = Buffer.from('extract key test');
|
||||
const fileName = `extract-key-${Date.now()}.txt`;
|
||||
|
||||
const signedUrl = await saveBufferToS3({
|
||||
userId: TEST_USER_ID,
|
||||
buffer,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
const extractedKey = extractKeyFromS3Url(signedUrl);
|
||||
expect(extractedKey).toBe(`${TEST_BASE_PATH}/${TEST_USER_ID}/${fileName}`);
|
||||
});
|
||||
|
||||
it('returns key as-is when not a URL', async () => {
|
||||
const { extractKeyFromS3Url } = await import('../crud');
|
||||
const key = `${TEST_BASE_PATH}/${TEST_USER_ID}/file.txt`;
|
||||
expect(extractKeyFromS3Url(key)).toBe(key);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadFileToS3', () => {
|
||||
it('uploads file and returns filepath with bytes', async () => {
|
||||
const { uploadFileToS3 } = await import('../crud');
|
||||
const testContent = 'File upload test content';
|
||||
const testFilePath = path.join(tempDir, 'upload-test.txt');
|
||||
fs.writeFileSync(testFilePath, testContent);
|
||||
|
||||
const mockReq = {
|
||||
user: { id: TEST_USER_ID },
|
||||
} as ServerRequest;
|
||||
|
||||
const mockFile = {
|
||||
path: testFilePath,
|
||||
originalname: 'upload-test.txt',
|
||||
fieldname: 'file',
|
||||
encoding: '7bit',
|
||||
mimetype: 'text/plain',
|
||||
size: Buffer.byteLength(testContent),
|
||||
stream: fs.createReadStream(testFilePath),
|
||||
destination: tempDir,
|
||||
filename: 'upload-test.txt',
|
||||
buffer: Buffer.from(testContent),
|
||||
} as Express.Multer.File;
|
||||
|
||||
const fileId = `file-${Date.now()}`;
|
||||
|
||||
const result = await uploadFileToS3({
|
||||
req: mockReq,
|
||||
file: mockFile,
|
||||
file_id: fileId,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
expect(result.filepath).toBeDefined();
|
||||
expect(result.filepath).toContain('X-Amz-Signature');
|
||||
expect(result.bytes).toBe(Buffer.byteLength(testContent));
|
||||
});
|
||||
|
||||
it('throws error when user is not authenticated', async () => {
|
||||
const { uploadFileToS3 } = await import('../crud');
|
||||
const mockReq = {} as ServerRequest;
|
||||
const mockFile = {
|
||||
path: '/fake/path.txt',
|
||||
originalname: 'test.txt',
|
||||
} as Express.Multer.File;
|
||||
|
||||
await expect(
|
||||
uploadFileToS3({
|
||||
req: mockReq,
|
||||
file: mockFile,
|
||||
file_id: 'test-id',
|
||||
basePath: TEST_BASE_PATH,
|
||||
}),
|
||||
).rejects.toThrow('User not authenticated');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getS3FileStream', () => {
|
||||
it('returns readable stream for existing file', async () => {
|
||||
const { saveBufferToS3, getS3FileStream } = await import('../crud');
|
||||
const testContent = 'Stream test content';
|
||||
const buffer = Buffer.from(testContent);
|
||||
const fileName = `stream-test-${Date.now()}.txt`;
|
||||
|
||||
const signedUrl = await saveBufferToS3({
|
||||
userId: TEST_USER_ID,
|
||||
buffer,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
const mockReq = {
|
||||
user: { id: TEST_USER_ID },
|
||||
} as ServerRequest;
|
||||
|
||||
const stream = await getS3FileStream(mockReq, signedUrl);
|
||||
|
||||
expect(stream).toBeInstanceOf(Readable);
|
||||
|
||||
const chunks: Uint8Array[] = [];
|
||||
for await (const chunk of stream) {
|
||||
chunks.push(chunk as Uint8Array);
|
||||
}
|
||||
const downloadedContent = Buffer.concat(chunks).toString();
|
||||
expect(downloadedContent).toBe(testContent);
|
||||
});
|
||||
});
|
||||
|
||||
describe('needsRefresh', () => {
|
||||
it('returns false for non-signed URLs', async () => {
|
||||
const { needsRefresh } = await import('../crud');
|
||||
expect(needsRefresh('https://example.com/file.png', 3600)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns true for expired signed URLs', async () => {
|
||||
const { saveBufferToS3, needsRefresh } = await import('../crud');
|
||||
const buffer = Buffer.from('refresh test');
|
||||
const fileName = `refresh-test-${Date.now()}.txt`;
|
||||
|
||||
const signedUrl = await saveBufferToS3({
|
||||
userId: TEST_USER_ID,
|
||||
buffer,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
const result = needsRefresh(signedUrl, 999999);
|
||||
expect(result).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false for fresh signed URLs', async () => {
|
||||
const { saveBufferToS3, needsRefresh } = await import('../crud');
|
||||
const buffer = Buffer.from('fresh test');
|
||||
const fileName = `fresh-test-${Date.now()}.txt`;
|
||||
|
||||
const signedUrl = await saveBufferToS3({
|
||||
userId: TEST_USER_ID,
|
||||
buffer,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
const result = needsRefresh(signedUrl, 60);
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getNewS3URL', () => {
|
||||
it('generates signed URL from existing URL', async () => {
|
||||
const { saveBufferToS3, getNewS3URL } = await import('../crud');
|
||||
const buffer = Buffer.from('new url test');
|
||||
const fileName = `new-url-${Date.now()}.txt`;
|
||||
|
||||
const originalUrl = await saveBufferToS3({
|
||||
userId: TEST_USER_ID,
|
||||
buffer,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
const newUrl = await getNewS3URL(originalUrl);
|
||||
|
||||
expect(newUrl).toBeDefined();
|
||||
expect(newUrl).toContain('X-Amz-Signature');
|
||||
expect(newUrl).toContain(fileName);
|
||||
});
|
||||
});
|
||||
|
||||
describe('refreshS3Url', () => {
|
||||
it('returns original URL for non-S3 source', async () => {
|
||||
const { refreshS3Url } = await import('../crud');
|
||||
const fileObj = {
|
||||
filepath: 'https://example.com/file.png',
|
||||
source: 'local',
|
||||
};
|
||||
|
||||
const result = await refreshS3Url(fileObj, 3600);
|
||||
expect(result).toBe(fileObj.filepath);
|
||||
});
|
||||
|
||||
it('refreshes URL for S3 source when needed', async () => {
|
||||
const { saveBufferToS3, refreshS3Url } = await import('../crud');
|
||||
const buffer = Buffer.from('s3 refresh test');
|
||||
const fileName = `s3-refresh-${Date.now()}.txt`;
|
||||
|
||||
const originalUrl = await saveBufferToS3({
|
||||
userId: TEST_USER_ID,
|
||||
buffer,
|
||||
fileName,
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
const fileObj = {
|
||||
filepath: originalUrl,
|
||||
source: 's3',
|
||||
};
|
||||
|
||||
const newUrl = await refreshS3Url(fileObj, 999999);
|
||||
|
||||
expect(newUrl).toBeDefined();
|
||||
expect(newUrl).toContain('X-Amz-Signature');
|
||||
});
|
||||
});
|
||||
|
||||
describe('S3ImageService', () => {
|
||||
it('uploads avatar and returns URL', async () => {
|
||||
const { S3ImageService } = await import('../images');
|
||||
|
||||
const mockDeps = {
|
||||
resizeImageBuffer: jest.fn().mockImplementation(async (buffer: Buffer) => ({
|
||||
buffer,
|
||||
width: 100,
|
||||
height: 100,
|
||||
})),
|
||||
updateUser: jest.fn().mockResolvedValue(undefined),
|
||||
updateFile: jest.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
|
||||
const imageService = new S3ImageService(mockDeps);
|
||||
|
||||
const pngBuffer = MINIMAL_PNG;
|
||||
|
||||
const result = await imageService.processAvatar({
|
||||
buffer: pngBuffer,
|
||||
userId: TEST_USER_ID,
|
||||
manual: 'false',
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toContain('X-Amz-Signature');
|
||||
expect(result).toContain('avatar');
|
||||
});
|
||||
|
||||
it('updates user when manual is true', async () => {
|
||||
const { S3ImageService } = await import('../images');
|
||||
|
||||
const mockDeps = {
|
||||
resizeImageBuffer: jest.fn().mockImplementation(async (buffer: Buffer) => ({
|
||||
buffer,
|
||||
width: 100,
|
||||
height: 100,
|
||||
})),
|
||||
updateUser: jest.fn().mockResolvedValue(undefined),
|
||||
updateFile: jest.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
|
||||
const imageService = new S3ImageService(mockDeps);
|
||||
|
||||
const pngBuffer = MINIMAL_PNG;
|
||||
|
||||
await imageService.processAvatar({
|
||||
buffer: pngBuffer,
|
||||
userId: TEST_USER_ID,
|
||||
manual: 'true',
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
expect(mockDeps.updateUser).toHaveBeenCalledWith(
|
||||
TEST_USER_ID,
|
||||
expect.objectContaining({ avatar: expect.any(String) }),
|
||||
);
|
||||
});
|
||||
|
||||
it('does not update user when agentId is provided', async () => {
|
||||
const { S3ImageService } = await import('../images');
|
||||
|
||||
const mockDeps = {
|
||||
resizeImageBuffer: jest.fn().mockImplementation(async (buffer: Buffer) => ({
|
||||
buffer,
|
||||
width: 100,
|
||||
height: 100,
|
||||
})),
|
||||
updateUser: jest.fn().mockResolvedValue(undefined),
|
||||
updateFile: jest.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
|
||||
const imageService = new S3ImageService(mockDeps);
|
||||
|
||||
const pngBuffer = MINIMAL_PNG;
|
||||
|
||||
await imageService.processAvatar({
|
||||
buffer: pngBuffer,
|
||||
userId: TEST_USER_ID,
|
||||
manual: 'true',
|
||||
agentId: 'agent-123',
|
||||
basePath: TEST_BASE_PATH,
|
||||
});
|
||||
|
||||
expect(mockDeps.updateUser).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns tuple with resolved promise and filepath in prepareImageURL', async () => {
|
||||
const { S3ImageService } = await import('../images');
|
||||
|
||||
const mockDeps = {
|
||||
resizeImageBuffer: jest.fn().mockImplementation(async (buffer: Buffer) => ({
|
||||
buffer,
|
||||
width: 100,
|
||||
height: 100,
|
||||
})),
|
||||
updateUser: jest.fn().mockResolvedValue(undefined),
|
||||
updateFile: jest.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
|
||||
const imageService = new S3ImageService(mockDeps);
|
||||
|
||||
const testFile = {
|
||||
file_id: 'file-123',
|
||||
filepath: 'https://example.com/file.png',
|
||||
};
|
||||
|
||||
const result = await imageService.prepareImageURL(testFile);
|
||||
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result[1]).toBe(testFile.filepath);
|
||||
expect(mockDeps.updateFile).toHaveBeenCalledWith({ file_id: 'file-123' });
|
||||
});
|
||||
});
|
||||
});
|
||||
460
packages/api/src/storage/s3/crud.ts
Normal file
460
packages/api/src/storage/s3/crud.ts
Normal file
|
|
@ -0,0 +1,460 @@
|
|||
import fs from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { FileSources } from 'librechat-data-provider';
|
||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
||||
import {
|
||||
PutObjectCommand,
|
||||
GetObjectCommand,
|
||||
HeadObjectCommand,
|
||||
DeleteObjectCommand,
|
||||
} from '@aws-sdk/client-s3';
|
||||
import type { GetObjectCommandInput } from '@aws-sdk/client-s3';
|
||||
import type { TFile } from 'librechat-data-provider';
|
||||
import type { ServerRequest } from '~/types';
|
||||
import type {
|
||||
UploadFileParams,
|
||||
SaveBufferParams,
|
||||
BatchUpdateFn,
|
||||
SaveURLParams,
|
||||
GetURLParams,
|
||||
UploadResult,
|
||||
S3FileRef,
|
||||
} from '~/storage/types';
|
||||
import { initializeS3 } from '~/cdn/s3';
|
||||
import { deleteRagFile } from '~/files';
|
||||
import { s3Config } from './s3Config';
|
||||
|
||||
const {
|
||||
AWS_BUCKET_NAME: bucketName,
|
||||
AWS_ENDPOINT_URL: endpoint,
|
||||
AWS_FORCE_PATH_STYLE: forcePathStyle,
|
||||
S3_URL_EXPIRY_SECONDS: s3UrlExpirySeconds,
|
||||
S3_REFRESH_EXPIRY_MS: s3RefreshExpiryMs,
|
||||
DEFAULT_BASE_PATH: defaultBasePath,
|
||||
} = s3Config;
|
||||
|
||||
export const getS3Key = (basePath: string, userId: string, fileName: string): string => {
|
||||
if (basePath.includes('/')) {
|
||||
throw new Error(`[getS3Key] basePath must not contain slashes: "${basePath}"`);
|
||||
}
|
||||
return `${basePath}/${userId}/${fileName}`;
|
||||
};
|
||||
|
||||
export async function getS3URL({
|
||||
userId,
|
||||
fileName,
|
||||
basePath = defaultBasePath,
|
||||
customFilename = null,
|
||||
contentType = null,
|
||||
}: GetURLParams): Promise<string> {
|
||||
const key = getS3Key(basePath, userId, fileName);
|
||||
const params: GetObjectCommandInput = { Bucket: bucketName, Key: key };
|
||||
|
||||
if (customFilename) {
|
||||
const safeFilename = customFilename.replace(/["\r\n]/g, '');
|
||||
params.ResponseContentDisposition = `attachment; filename="${safeFilename}"`;
|
||||
}
|
||||
if (contentType) {
|
||||
params.ResponseContentType = contentType;
|
||||
}
|
||||
|
||||
try {
|
||||
const s3 = initializeS3();
|
||||
if (!s3) {
|
||||
throw new Error('[getS3URL] S3 not initialized');
|
||||
}
|
||||
|
||||
return await getSignedUrl(s3, new GetObjectCommand(params), { expiresIn: s3UrlExpirySeconds });
|
||||
} catch (error) {
|
||||
logger.error('[getS3URL] Error getting signed URL from S3:', (error as Error).message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveBufferToS3({
|
||||
userId,
|
||||
buffer,
|
||||
fileName,
|
||||
basePath = defaultBasePath,
|
||||
}: SaveBufferParams): Promise<string> {
|
||||
const key = getS3Key(basePath, userId, fileName);
|
||||
const params = { Bucket: bucketName, Key: key, Body: buffer };
|
||||
|
||||
try {
|
||||
const s3 = initializeS3();
|
||||
if (!s3) {
|
||||
throw new Error('[saveBufferToS3] S3 not initialized');
|
||||
}
|
||||
|
||||
await s3.send(new PutObjectCommand(params));
|
||||
return await getS3URL({ userId, fileName, basePath });
|
||||
} catch (error) {
|
||||
logger.error('[saveBufferToS3] Error uploading buffer to S3:', (error as Error).message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveURLToS3({
|
||||
userId,
|
||||
URL,
|
||||
fileName,
|
||||
basePath = defaultBasePath,
|
||||
}: SaveURLParams): Promise<string> {
|
||||
try {
|
||||
const response = await fetch(URL);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
return await saveBufferToS3({ userId, buffer, fileName, basePath });
|
||||
} catch (error) {
|
||||
logger.error('[saveURLToS3] Error uploading file from URL to S3:', (error as Error).message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export function extractKeyFromS3Url(fileUrlOrKey: string): string {
|
||||
if (!fileUrlOrKey) {
|
||||
throw new Error('Invalid input: URL or key is empty');
|
||||
}
|
||||
|
||||
try {
|
||||
const url = new URL(fileUrlOrKey);
|
||||
const hostname = url.hostname;
|
||||
const pathname = url.pathname.substring(1);
|
||||
|
||||
if (endpoint && forcePathStyle) {
|
||||
const endpointUrl = new URL(endpoint);
|
||||
const startPos =
|
||||
endpointUrl.pathname.length +
|
||||
(endpointUrl.pathname.endsWith('/') ? 0 : 1) +
|
||||
bucketName.length +
|
||||
1;
|
||||
const key = url.pathname.substring(startPos);
|
||||
if (!key) {
|
||||
logger.warn(
|
||||
`[extractKeyFromS3Url] Extracted key is empty for endpoint path-style URL: ${fileUrlOrKey}`,
|
||||
);
|
||||
} else {
|
||||
logger.debug(`[extractKeyFromS3Url] fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${key}`);
|
||||
}
|
||||
return key;
|
||||
}
|
||||
|
||||
if (
|
||||
hostname === 's3.amazonaws.com' ||
|
||||
hostname.match(/^s3[-.][a-z0-9-]+\.amazonaws\.com$/) ||
|
||||
(bucketName && pathname.startsWith(`${bucketName}/`))
|
||||
) {
|
||||
const firstSlashIndex = pathname.indexOf('/');
|
||||
if (firstSlashIndex > 0) {
|
||||
const key = pathname.substring(firstSlashIndex + 1);
|
||||
if (key === '') {
|
||||
logger.warn(
|
||||
`[extractKeyFromS3Url] Extracted key is empty after removing bucket name from URL: ${fileUrlOrKey}`,
|
||||
);
|
||||
} else {
|
||||
logger.debug(
|
||||
`[extractKeyFromS3Url] fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${key}`,
|
||||
);
|
||||
}
|
||||
return key;
|
||||
}
|
||||
logger.warn(
|
||||
`[extractKeyFromS3Url] Unable to extract key from path-style URL: ${fileUrlOrKey}`,
|
||||
);
|
||||
return '';
|
||||
}
|
||||
|
||||
logger.debug(`[extractKeyFromS3Url] fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${pathname}`);
|
||||
return pathname;
|
||||
} catch (error) {
|
||||
if (fileUrlOrKey.startsWith('http://') || fileUrlOrKey.startsWith('https://')) {
|
||||
logger.error(
|
||||
`[extractKeyFromS3Url] Error parsing URL: ${fileUrlOrKey}, Error: ${(error as Error).message}`,
|
||||
);
|
||||
} else {
|
||||
logger.debug(`[extractKeyFromS3Url] Non-URL input, using fallback: ${fileUrlOrKey}`);
|
||||
}
|
||||
|
||||
const parts = fileUrlOrKey.split('/');
|
||||
if (parts.length >= 3 && !fileUrlOrKey.startsWith('http') && !fileUrlOrKey.startsWith('/')) {
|
||||
return fileUrlOrKey;
|
||||
}
|
||||
|
||||
const key = fileUrlOrKey.startsWith('/') ? fileUrlOrKey.substring(1) : fileUrlOrKey;
|
||||
logger.debug(
|
||||
`[extractKeyFromS3Url] FALLBACK. fileUrlOrKey: ${fileUrlOrKey}, Extracted key: ${key}`,
|
||||
);
|
||||
return key;
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteFileFromS3(req: ServerRequest, file: TFile): Promise<void> {
|
||||
if (!req.user) {
|
||||
throw new Error('[deleteFileFromS3] User not authenticated');
|
||||
}
|
||||
|
||||
const userId = req.user.id;
|
||||
const key = extractKeyFromS3Url(file.filepath);
|
||||
|
||||
const keyParts = key.split('/');
|
||||
if (keyParts.length < 2 || keyParts[1] !== userId) {
|
||||
const message = `[deleteFileFromS3] User ID mismatch: ${userId} vs ${key}`;
|
||||
logger.error(message);
|
||||
throw new Error(message);
|
||||
}
|
||||
|
||||
const s3 = initializeS3();
|
||||
if (!s3) {
|
||||
throw new Error('[deleteFileFromS3] S3 not initialized');
|
||||
}
|
||||
|
||||
const params = { Bucket: bucketName, Key: key };
|
||||
|
||||
try {
|
||||
try {
|
||||
const headCommand = new HeadObjectCommand(params);
|
||||
await s3.send(headCommand);
|
||||
logger.debug('[deleteFileFromS3] File exists, proceeding with deletion');
|
||||
} catch (headErr) {
|
||||
if ((headErr as { name?: string }).name === 'NotFound') {
|
||||
logger.warn(`[deleteFileFromS3] File does not exist: ${key}`);
|
||||
await deleteRagFile({ userId, file });
|
||||
return;
|
||||
}
|
||||
throw headErr;
|
||||
}
|
||||
|
||||
await s3.send(new DeleteObjectCommand(params));
|
||||
await deleteRagFile({ userId, file });
|
||||
logger.debug('[deleteFileFromS3] S3 File deletion completed');
|
||||
} catch (error) {
|
||||
logger.error(`[deleteFileFromS3] Error deleting file from S3: ${(error as Error).message}`);
|
||||
logger.error((error as Error).stack);
|
||||
|
||||
if ((error as { name?: string }).name === 'NoSuchKey') {
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function uploadFileToS3({
|
||||
req,
|
||||
file,
|
||||
file_id,
|
||||
basePath = defaultBasePath,
|
||||
}: UploadFileParams): Promise<UploadResult> {
|
||||
if (!req.user) {
|
||||
throw new Error('[uploadFileToS3] User not authenticated');
|
||||
}
|
||||
|
||||
try {
|
||||
const inputFilePath = file.path;
|
||||
const userId = req.user.id;
|
||||
const fileName = `${file_id}__${file.originalname}`;
|
||||
const key = getS3Key(basePath, userId, fileName);
|
||||
|
||||
const stats = await fs.promises.stat(inputFilePath);
|
||||
const bytes = stats.size;
|
||||
const fileStream = fs.createReadStream(inputFilePath);
|
||||
|
||||
const s3 = initializeS3();
|
||||
if (!s3) {
|
||||
throw new Error('[uploadFileToS3] S3 not initialized');
|
||||
}
|
||||
|
||||
const uploadParams = {
|
||||
Bucket: bucketName,
|
||||
Key: key,
|
||||
Body: fileStream,
|
||||
};
|
||||
|
||||
await s3.send(new PutObjectCommand(uploadParams));
|
||||
const fileURL = await getS3URL({ userId, fileName, basePath });
|
||||
// NOTE: temp file is intentionally NOT deleted on the success path.
|
||||
// The caller (processAgentFileUpload) reads file.path after this returns
|
||||
// to stream the file to the RAG vector embedding service (POST /embed).
|
||||
// Temp file lifecycle on success is the caller's responsibility.
|
||||
return { filepath: fileURL, bytes };
|
||||
} catch (error) {
|
||||
logger.error('[uploadFileToS3] Error streaming file to S3:', error);
|
||||
if (file?.path) {
|
||||
await fs.promises
|
||||
.unlink(file.path)
|
||||
.catch((e: unknown) =>
|
||||
logger.error('[uploadFileToS3] Failed to delete temp file:', (e as Error).message),
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getS3FileStream(_req: ServerRequest, filePath: string): Promise<Readable> {
|
||||
try {
|
||||
const Key = extractKeyFromS3Url(filePath);
|
||||
const params = { Bucket: bucketName, Key };
|
||||
|
||||
const s3 = initializeS3();
|
||||
if (!s3) {
|
||||
throw new Error('[getS3FileStream] S3 not initialized');
|
||||
}
|
||||
|
||||
const data = await s3.send(new GetObjectCommand(params));
|
||||
if (!data.Body) {
|
||||
throw new Error(`[getS3FileStream] S3 response body is empty for key: ${Key}`);
|
||||
}
|
||||
return data.Body as Readable;
|
||||
} catch (error) {
|
||||
logger.error('[getS3FileStream] Error retrieving S3 file stream:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export function needsRefresh(signedUrl: string, bufferSeconds: number): boolean {
|
||||
try {
|
||||
const url = new URL(signedUrl);
|
||||
|
||||
if (!url.searchParams.has('X-Amz-Signature')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const expiresParam = url.searchParams.get('X-Amz-Expires');
|
||||
const dateParam = url.searchParams.get('X-Amz-Date');
|
||||
|
||||
if (!expiresParam || !dateParam) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const year = dateParam.substring(0, 4);
|
||||
const month = dateParam.substring(4, 6);
|
||||
const day = dateParam.substring(6, 8);
|
||||
const hour = dateParam.substring(9, 11);
|
||||
const minute = dateParam.substring(11, 13);
|
||||
const second = dateParam.substring(13, 15);
|
||||
|
||||
const dateObj = new Date(`${year}-${month}-${day}T${hour}:${minute}:${second}Z`);
|
||||
const now = new Date();
|
||||
|
||||
if (s3RefreshExpiryMs !== null) {
|
||||
const urlAge = now.getTime() - dateObj.getTime();
|
||||
return urlAge >= s3RefreshExpiryMs;
|
||||
}
|
||||
|
||||
const expiresAtDate = new Date(dateObj.getTime() + parseInt(expiresParam) * 1000);
|
||||
const bufferTime = new Date(now.getTime() + bufferSeconds * 1000);
|
||||
return expiresAtDate <= bufferTime;
|
||||
} catch (error) {
|
||||
logger.error('Error checking URL expiration:', error);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getNewS3URL(currentURL: string): Promise<string | undefined> {
|
||||
try {
|
||||
const s3Key = extractKeyFromS3Url(currentURL);
|
||||
if (!s3Key) {
|
||||
return;
|
||||
}
|
||||
|
||||
const keyParts = s3Key.split('/');
|
||||
if (keyParts.length < 3) {
|
||||
return;
|
||||
}
|
||||
|
||||
const basePath = keyParts[0];
|
||||
const userId = keyParts[1];
|
||||
const fileName = keyParts.slice(2).join('/');
|
||||
|
||||
return getS3URL({ userId, fileName, basePath });
|
||||
} catch (error) {
|
||||
logger.error('Error getting new S3 URL:', error);
|
||||
}
|
||||
}
|
||||
|
||||
export async function refreshS3FileUrls(
|
||||
files: TFile[] | null | undefined,
|
||||
batchUpdateFiles: BatchUpdateFn,
|
||||
bufferSeconds = 3600,
|
||||
): Promise<TFile[]> {
|
||||
if (!files || !Array.isArray(files) || files.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const filesToUpdate: Array<{ file_id: string; filepath: string }> = [];
|
||||
const updatedFiles = [...files];
|
||||
|
||||
for (let i = 0; i < updatedFiles.length; i++) {
|
||||
const file = updatedFiles[i];
|
||||
if (!file?.file_id) {
|
||||
continue;
|
||||
}
|
||||
if (file.source !== FileSources.s3) {
|
||||
continue;
|
||||
}
|
||||
if (!file.filepath) {
|
||||
continue;
|
||||
}
|
||||
if (!needsRefresh(file.filepath, bufferSeconds)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
const newURL = await getNewS3URL(file.filepath);
|
||||
if (!newURL) {
|
||||
continue;
|
||||
}
|
||||
filesToUpdate.push({
|
||||
file_id: file.file_id,
|
||||
filepath: newURL,
|
||||
});
|
||||
updatedFiles[i] = { ...file, filepath: newURL };
|
||||
} catch (error) {
|
||||
logger.error(`Error refreshing S3 URL for file ${file.file_id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
if (filesToUpdate.length > 0) {
|
||||
await batchUpdateFiles(filesToUpdate);
|
||||
}
|
||||
|
||||
return updatedFiles;
|
||||
}
|
||||
|
||||
export async function refreshS3Url(fileObj: S3FileRef, bufferSeconds = 3600): Promise<string> {
|
||||
if (!fileObj || fileObj.source !== FileSources.s3 || !fileObj.filepath) {
|
||||
return fileObj?.filepath || '';
|
||||
}
|
||||
|
||||
if (!needsRefresh(fileObj.filepath, bufferSeconds)) {
|
||||
return fileObj.filepath;
|
||||
}
|
||||
|
||||
try {
|
||||
const s3Key = extractKeyFromS3Url(fileObj.filepath);
|
||||
if (!s3Key) {
|
||||
logger.warn(`Unable to extract S3 key from URL: ${fileObj.filepath}`);
|
||||
return fileObj.filepath;
|
||||
}
|
||||
|
||||
const keyParts = s3Key.split('/');
|
||||
if (keyParts.length < 3) {
|
||||
logger.warn(`Invalid S3 key format: ${s3Key}`);
|
||||
return fileObj.filepath;
|
||||
}
|
||||
|
||||
const basePath = keyParts[0];
|
||||
const userId = keyParts[1];
|
||||
const fileName = keyParts.slice(2).join('/');
|
||||
|
||||
const newUrl = await getS3URL({ userId, fileName, basePath });
|
||||
logger.debug(`Refreshed S3 URL for key: ${s3Key}`);
|
||||
return newUrl;
|
||||
} catch (error) {
|
||||
logger.error(`Error refreshing S3 URL: ${(error as Error).message}`);
|
||||
return fileObj.filepath;
|
||||
}
|
||||
}
|
||||
141
packages/api/src/storage/s3/images.ts
Normal file
141
packages/api/src/storage/s3/images.ts
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import sharp from 'sharp';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import type { IUser } from '@librechat/data-schemas';
|
||||
import type { TFile } from 'librechat-data-provider';
|
||||
import type { FormatEnum } from 'sharp';
|
||||
import type { UploadImageParams, ImageUploadResult, ProcessAvatarParams } from '~/storage/types';
|
||||
import { saveBufferToS3 } from './crud';
|
||||
import { s3Config } from './s3Config';
|
||||
|
||||
const { DEFAULT_BASE_PATH: defaultBasePath } = s3Config;
|
||||
|
||||
export interface S3ImageServiceDeps {
|
||||
resizeImageBuffer: (
|
||||
buffer: Buffer,
|
||||
resolution: string,
|
||||
endpoint: string,
|
||||
) => Promise<{ buffer: Buffer; width: number; height: number }>;
|
||||
updateUser: (userId: string, update: { avatar: string }) => Promise<IUser | null>;
|
||||
updateFile: (params: { file_id: string }) => Promise<TFile>;
|
||||
}
|
||||
|
||||
export class S3ImageService {
|
||||
private deps: S3ImageServiceDeps;
|
||||
|
||||
constructor(deps: S3ImageServiceDeps) {
|
||||
this.deps = deps;
|
||||
}
|
||||
|
||||
async uploadImageToS3({
|
||||
req,
|
||||
file,
|
||||
file_id,
|
||||
endpoint,
|
||||
resolution = 'high',
|
||||
basePath = defaultBasePath,
|
||||
}: UploadImageParams): Promise<ImageUploadResult> {
|
||||
const inputFilePath = file.path;
|
||||
try {
|
||||
if (!req.user) {
|
||||
throw new Error('[S3ImageService.uploadImageToS3] User not authenticated');
|
||||
}
|
||||
|
||||
const appConfig = req.config;
|
||||
const inputBuffer = await fs.promises.readFile(inputFilePath);
|
||||
|
||||
const {
|
||||
buffer: resizedBuffer,
|
||||
width,
|
||||
height,
|
||||
} = await this.deps.resizeImageBuffer(inputBuffer, resolution, endpoint);
|
||||
|
||||
const extension = path.extname(inputFilePath);
|
||||
const userId = req.user.id;
|
||||
|
||||
let processedBuffer: Buffer;
|
||||
let fileName = `${file_id}__${path.basename(inputFilePath)}`;
|
||||
const targetExtension = `.${appConfig?.imageOutputType ?? 'webp'}`;
|
||||
|
||||
if (extension.toLowerCase() === targetExtension) {
|
||||
processedBuffer = resizedBuffer;
|
||||
} else {
|
||||
const outputFormat = (appConfig?.imageOutputType ?? 'webp') as keyof FormatEnum;
|
||||
processedBuffer = await sharp(resizedBuffer).toFormat(outputFormat).toBuffer();
|
||||
fileName = fileName.replace(new RegExp(path.extname(fileName) + '$'), targetExtension);
|
||||
if (!path.extname(fileName)) {
|
||||
fileName += targetExtension;
|
||||
}
|
||||
}
|
||||
|
||||
const downloadURL = await saveBufferToS3({
|
||||
userId,
|
||||
buffer: processedBuffer,
|
||||
fileName,
|
||||
basePath,
|
||||
});
|
||||
const bytes = processedBuffer.length;
|
||||
return { filepath: downloadURL, bytes, width, height };
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
'[S3ImageService.uploadImageToS3] Error uploading image to S3:',
|
||||
(error as Error).message,
|
||||
);
|
||||
throw error;
|
||||
} finally {
|
||||
await fs.promises
|
||||
.unlink(inputFilePath)
|
||||
.catch((e: unknown) =>
|
||||
logger.error(
|
||||
'[S3ImageService.uploadImageToS3] Failed to delete temp file:',
|
||||
(e as Error).message,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async prepareImageURL(file: { file_id: string; filepath: string }): Promise<[TFile, string]> {
|
||||
try {
|
||||
return await Promise.all([this.deps.updateFile({ file_id: file.file_id }), file.filepath]);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
'[S3ImageService.prepareImageURL] Error preparing image URL:',
|
||||
(error as Error).message,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async processAvatar({
|
||||
buffer,
|
||||
userId,
|
||||
manual,
|
||||
agentId,
|
||||
basePath = defaultBasePath,
|
||||
}: ProcessAvatarParams): Promise<string> {
|
||||
try {
|
||||
const metadata = await sharp(buffer).metadata();
|
||||
const extension = metadata.format ?? 'png';
|
||||
const timestamp = new Date().getTime();
|
||||
|
||||
const fileName = agentId
|
||||
? `agent-${agentId}-avatar-${timestamp}.${extension}`
|
||||
: `avatar-${timestamp}.${extension}`;
|
||||
|
||||
const downloadURL = await saveBufferToS3({ userId, buffer, fileName, basePath });
|
||||
|
||||
if (manual === 'true' && !agentId) {
|
||||
await this.deps.updateUser(userId, { avatar: downloadURL });
|
||||
}
|
||||
|
||||
return downloadURL;
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
'[S3ImageService.processAvatar] Error processing S3 avatar:',
|
||||
(error as Error).message,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
2
packages/api/src/storage/s3/index.ts
Normal file
2
packages/api/src/storage/s3/index.ts
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
export * from './crud';
|
||||
export * from './images';
|
||||
57
packages/api/src/storage/s3/s3Config.ts
Normal file
57
packages/api/src/storage/s3/s3Config.ts
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
import { logger } from '@librechat/data-schemas';
|
||||
import { isEnabled } from '~/utils/common';
|
||||
|
||||
const MAX_EXPIRY_SECONDS = 7 * 24 * 60 * 60; // 7 days
|
||||
const DEFAULT_EXPIRY_SECONDS = 2 * 60; // 2 minutes
|
||||
const DEFAULT_BASE_PATH = 'images';
|
||||
|
||||
const parseUrlExpiry = (): number => {
|
||||
if (process.env.S3_URL_EXPIRY_SECONDS === undefined) {
|
||||
return DEFAULT_EXPIRY_SECONDS;
|
||||
}
|
||||
|
||||
const parsed = parseInt(process.env.S3_URL_EXPIRY_SECONDS, 10);
|
||||
if (isNaN(parsed) || parsed <= 0) {
|
||||
logger.warn(
|
||||
`[S3] Invalid S3_URL_EXPIRY_SECONDS value: "${process.env.S3_URL_EXPIRY_SECONDS}". Using ${DEFAULT_EXPIRY_SECONDS}s expiry.`,
|
||||
);
|
||||
return DEFAULT_EXPIRY_SECONDS;
|
||||
}
|
||||
|
||||
return Math.min(parsed, MAX_EXPIRY_SECONDS);
|
||||
};
|
||||
|
||||
const parseRefreshExpiry = (): number | null => {
|
||||
if (!process.env.S3_REFRESH_EXPIRY_MS) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsed = parseInt(process.env.S3_REFRESH_EXPIRY_MS, 10);
|
||||
if (isNaN(parsed) || parsed <= 0) {
|
||||
logger.warn(
|
||||
`[S3] Invalid S3_REFRESH_EXPIRY_MS value: "${process.env.S3_REFRESH_EXPIRY_MS}". Using default refresh logic.`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
logger.info(`[S3] Using custom refresh expiry time: ${parsed}ms`);
|
||||
return parsed;
|
||||
};
|
||||
|
||||
// Internal module config — not part of the public @librechat/api surface
|
||||
export const s3Config = {
|
||||
/** AWS region for S3 */
|
||||
AWS_REGION: process.env.AWS_REGION ?? '',
|
||||
/** S3 bucket name */
|
||||
AWS_BUCKET_NAME: process.env.AWS_BUCKET_NAME ?? '',
|
||||
/** Custom endpoint URL (for MinIO, R2, etc.) */
|
||||
AWS_ENDPOINT_URL: process.env.AWS_ENDPOINT_URL,
|
||||
/** Use path-style URLs instead of virtual-hosted-style */
|
||||
AWS_FORCE_PATH_STYLE: isEnabled(process.env.AWS_FORCE_PATH_STYLE),
|
||||
/** Presigned URL expiry in seconds */
|
||||
S3_URL_EXPIRY_SECONDS: parseUrlExpiry(),
|
||||
/** Custom refresh expiry in milliseconds (null = use default buffer logic) */
|
||||
S3_REFRESH_EXPIRY_MS: parseRefreshExpiry(),
|
||||
/** Default base path for file storage */
|
||||
DEFAULT_BASE_PATH,
|
||||
};
|
||||
60
packages/api/src/storage/types.ts
Normal file
60
packages/api/src/storage/types.ts
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
import type { ServerRequest } from '~/types';
|
||||
|
||||
export interface SaveBufferParams {
|
||||
userId: string;
|
||||
buffer: Buffer;
|
||||
fileName: string;
|
||||
basePath?: string;
|
||||
}
|
||||
|
||||
export interface GetURLParams {
|
||||
userId: string;
|
||||
fileName: string;
|
||||
basePath?: string;
|
||||
customFilename?: string | null;
|
||||
contentType?: string | null;
|
||||
}
|
||||
|
||||
export interface SaveURLParams {
|
||||
userId: string;
|
||||
URL: string;
|
||||
fileName: string;
|
||||
basePath?: string;
|
||||
}
|
||||
|
||||
export interface UploadFileParams {
|
||||
req: ServerRequest;
|
||||
file: Express.Multer.File;
|
||||
file_id: string;
|
||||
basePath?: string;
|
||||
}
|
||||
|
||||
export interface UploadImageParams extends UploadFileParams {
|
||||
endpoint: string;
|
||||
resolution?: string;
|
||||
}
|
||||
|
||||
export interface UploadResult {
|
||||
filepath: string;
|
||||
bytes: number;
|
||||
}
|
||||
|
||||
export interface ImageUploadResult extends UploadResult {
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
export interface ProcessAvatarParams {
|
||||
buffer: Buffer;
|
||||
userId: string;
|
||||
manual: string;
|
||||
agentId?: string;
|
||||
basePath?: string;
|
||||
}
|
||||
|
||||
export interface S3FileRef {
|
||||
filepath: string;
|
||||
source: string;
|
||||
}
|
||||
|
||||
export type BatchUpdateFn = (files: Array<{ file_id: string; filepath: string }>) => Promise<void>;
|
||||
Loading…
Add table
Add a link
Reference in a new issue