mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-03-15 12:16:33 +01:00
* fix: add agent permission check to image upload route
* refactor: remove unused SystemRoles import and format test file for clarity
* fix: address review findings for image upload agent permission check
* refactor: move agent upload auth logic to TypeScript in packages/api
Extract pure authorization logic from agentPermCheck.js into
checkAgentUploadAuth() in packages/api/src/files/agentUploadAuth.ts.
The function returns a structured result ({ allowed, status, error })
instead of writing HTTP responses directly, eliminating the dual
responsibility and confusing sentinel return value. The JS wrapper
in /api is now a thin adapter that translates the result to HTTP.
* test: rewrite image upload permission tests as integration tests
Replace mock-heavy images-agent-perm.spec.js with integration tests
using MongoMemoryServer, real models, and real PermissionService.
Follows the established pattern in files.agents.test.js. Moves test
to sibling location (images.agents.test.js) matching backend convention.
Adds temp file cleanup assertions on 403/404 responses and covers
message_file exemption paths (boolean true, string "true", false).
* fix: widen AgentUploadAuthDeps types to accept ObjectId from Mongoose
The injected getAgent returns Mongoose documents where _id and author
are Types.ObjectId at runtime, not string. Widen the DI interface to
accept string | Types.ObjectId for _id, author, and resourceId so the
contract accurately reflects real callers.
* chore: move agent upload auth into files/agents/ subdirectory
* refactor: delete agentPermCheck.js wrapper, move verifyAgentUploadPermission to packages/api
The /api-only dependencies (getAgent, checkPermission) are now passed
as object-field params from the route call sites. Both images.js and
files.js import verifyAgentUploadPermission from @librechat/api and
inject the deps directly, eliminating the intermediate JS wrapper.
* style: fix import type ordering in agent upload auth
* fix: prevent token TTL race in MCPTokenStorage.storeTokens
When expires_in is provided, use it directly instead of round-tripping
through Date arithmetic. The previous code computed accessTokenExpiry
as a Date, then after an async encryptV2 call, recomputed expiresIn by
subtracting Date.now(). On loaded CI runners the elapsed time caused
Math.floor to truncate to 0, triggering the 1-year fallback and making
the token appear permanently valid — so refresh never fired.
432 lines
13 KiB
JavaScript
432 lines
13 KiB
JavaScript
const fs = require('fs').promises;
|
|
const express = require('express');
|
|
const { EnvVar } = require('@librechat/agents');
|
|
const { logger } = require('@librechat/data-schemas');
|
|
const { verifyAgentUploadPermission } = require('@librechat/api');
|
|
const {
|
|
Time,
|
|
isUUID,
|
|
CacheKeys,
|
|
FileSources,
|
|
ResourceType,
|
|
EModelEndpoint,
|
|
PermissionBits,
|
|
checkOpenAIStorage,
|
|
isAssistantsEndpoint,
|
|
} = require('librechat-data-provider');
|
|
const {
|
|
filterFile,
|
|
processFileUpload,
|
|
processDeleteRequest,
|
|
processAgentFileUpload,
|
|
} = require('~/server/services/Files/process');
|
|
const { fileAccess } = require('~/server/middleware/accessResources/fileAccess');
|
|
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
|
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
|
|
const { checkPermission } = require('~/server/services/PermissionService');
|
|
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
|
const { refreshS3FileUrls } = require('~/server/services/Files/S3/crud');
|
|
const { hasAccessToFilesViaAgent } = require('~/server/services/Files');
|
|
const { getFiles, batchUpdateFiles } = require('~/models');
|
|
const { cleanFileName } = require('~/server/utils/files');
|
|
const { getAssistant } = require('~/models/Assistant');
|
|
const { getAgent } = require('~/models/Agent');
|
|
const { getLogStores } = require('~/cache');
|
|
const { Readable } = require('stream');
|
|
|
|
const router = express.Router();
|
|
|
|
router.get('/', async (req, res) => {
|
|
try {
|
|
const appConfig = req.config;
|
|
const files = await getFiles({ user: req.user.id });
|
|
if (appConfig.fileStrategy === FileSources.s3) {
|
|
try {
|
|
const cache = getLogStores(CacheKeys.S3_EXPIRY_INTERVAL);
|
|
const alreadyChecked = await cache.get(req.user.id);
|
|
if (!alreadyChecked) {
|
|
await refreshS3FileUrls(files, batchUpdateFiles);
|
|
await cache.set(req.user.id, true, Time.THIRTY_MINUTES);
|
|
}
|
|
} catch (error) {
|
|
logger.warn('[/files] Error refreshing S3 file URLs:', error);
|
|
}
|
|
}
|
|
res.status(200).send(files);
|
|
} catch (error) {
|
|
logger.error('[/files] Error getting files:', error);
|
|
res.status(400).json({ message: 'Error in request', error: error.message });
|
|
}
|
|
});
|
|
|
|
/**
|
|
* Get files specific to an agent
|
|
* @route GET /files/agent/:agent_id
|
|
* @param {string} agent_id - The agent ID to get files for
|
|
* @returns {Promise<TFile[]>} Array of files attached to the agent
|
|
*/
|
|
router.get('/agent/:agent_id', async (req, res) => {
|
|
try {
|
|
const { agent_id } = req.params;
|
|
const userId = req.user.id;
|
|
|
|
if (!agent_id) {
|
|
return res.status(400).json({ error: 'Agent ID is required' });
|
|
}
|
|
|
|
const agent = await getAgent({ id: agent_id });
|
|
if (!agent) {
|
|
return res.status(200).json([]);
|
|
}
|
|
|
|
if (agent.author.toString() !== userId) {
|
|
const hasEditPermission = await checkPermission({
|
|
userId,
|
|
role: req.user.role,
|
|
resourceType: ResourceType.AGENT,
|
|
resourceId: agent._id,
|
|
requiredPermission: PermissionBits.EDIT,
|
|
});
|
|
|
|
if (!hasEditPermission) {
|
|
return res.status(200).json([]);
|
|
}
|
|
}
|
|
|
|
const agentFileIds = [];
|
|
if (agent.tool_resources) {
|
|
for (const [, resource] of Object.entries(agent.tool_resources)) {
|
|
if (resource?.file_ids && Array.isArray(resource.file_ids)) {
|
|
agentFileIds.push(...resource.file_ids);
|
|
}
|
|
}
|
|
}
|
|
|
|
if (agentFileIds.length === 0) {
|
|
return res.status(200).json([]);
|
|
}
|
|
|
|
const files = await getFiles({ file_id: { $in: agentFileIds } }, null, { text: 0 });
|
|
|
|
res.status(200).json(files);
|
|
} catch (error) {
|
|
logger.error('[/files/agent/:agent_id] Error fetching agent files:', error);
|
|
res.status(500).json({ error: 'Failed to fetch agent files' });
|
|
}
|
|
});
|
|
|
|
router.get('/config', async (req, res) => {
|
|
try {
|
|
const appConfig = req.config;
|
|
res.status(200).json(appConfig.fileConfig);
|
|
} catch (error) {
|
|
logger.error('[/files] Error getting fileConfig', error);
|
|
res.status(400).json({ message: 'Error in request', error: error.message });
|
|
}
|
|
});
|
|
|
|
router.delete('/', async (req, res) => {
|
|
try {
|
|
const { files: _files } = req.body;
|
|
|
|
/** @type {MongoFile[]} */
|
|
const files = _files.filter((file) => {
|
|
if (!file.file_id) {
|
|
return false;
|
|
}
|
|
if (!file.filepath) {
|
|
return false;
|
|
}
|
|
|
|
if (/^(file|assistant)-/.test(file.file_id)) {
|
|
return true;
|
|
}
|
|
|
|
return isUUID.safeParse(file.file_id).success;
|
|
});
|
|
|
|
if (files.length === 0) {
|
|
res.status(204).json({ message: 'Nothing provided to delete' });
|
|
return;
|
|
}
|
|
|
|
const fileIds = files.map((file) => file.file_id);
|
|
const dbFiles = await getFiles({ file_id: { $in: fileIds } });
|
|
|
|
const ownedFiles = [];
|
|
const nonOwnedFiles = [];
|
|
|
|
for (const file of dbFiles) {
|
|
if (file.user.toString() === req.user.id.toString()) {
|
|
ownedFiles.push(file);
|
|
} else {
|
|
nonOwnedFiles.push(file);
|
|
}
|
|
}
|
|
|
|
if (nonOwnedFiles.length === 0) {
|
|
await processDeleteRequest({ req, files: ownedFiles });
|
|
logger.debug(
|
|
`[/files] Files deleted successfully: ${ownedFiles
|
|
.filter((f) => f.file_id)
|
|
.map((f) => f.file_id)
|
|
.join(', ')}`,
|
|
);
|
|
res.status(200).json({ message: 'Files deleted successfully' });
|
|
return;
|
|
}
|
|
|
|
let authorizedFiles = [...ownedFiles];
|
|
let unauthorizedFiles = [];
|
|
|
|
if (req.body.agent_id && nonOwnedFiles.length > 0) {
|
|
const nonOwnedFileIds = nonOwnedFiles.map((f) => f.file_id);
|
|
const accessMap = await hasAccessToFilesViaAgent({
|
|
userId: req.user.id,
|
|
role: req.user.role,
|
|
fileIds: nonOwnedFileIds,
|
|
agentId: req.body.agent_id,
|
|
isDelete: true,
|
|
});
|
|
|
|
for (const file of nonOwnedFiles) {
|
|
if (accessMap.get(file.file_id)) {
|
|
authorizedFiles.push(file);
|
|
} else {
|
|
unauthorizedFiles.push(file);
|
|
}
|
|
}
|
|
} else {
|
|
unauthorizedFiles = nonOwnedFiles;
|
|
}
|
|
|
|
if (unauthorizedFiles.length > 0) {
|
|
return res.status(403).json({
|
|
message: 'You can only delete files you have access to',
|
|
unauthorizedFiles: unauthorizedFiles.map((f) => f.file_id),
|
|
});
|
|
}
|
|
|
|
/* Handle agent unlinking even if no valid files to delete */
|
|
if (req.body.agent_id && req.body.tool_resource && dbFiles.length === 0) {
|
|
const agent = await getAgent({
|
|
id: req.body.agent_id,
|
|
});
|
|
|
|
const toolResourceFiles = agent.tool_resources?.[req.body.tool_resource]?.file_ids ?? [];
|
|
const agentFiles = files.filter((f) => toolResourceFiles.includes(f.file_id));
|
|
|
|
await processDeleteRequest({ req, files: agentFiles });
|
|
res.status(200).json({ message: 'File associations removed successfully from agent' });
|
|
return;
|
|
}
|
|
|
|
/* Handle assistant unlinking even if no valid files to delete */
|
|
if (req.body.assistant_id && req.body.tool_resource && dbFiles.length === 0) {
|
|
const assistant = await getAssistant({
|
|
id: req.body.assistant_id,
|
|
});
|
|
|
|
const toolResourceFiles = assistant.tool_resources?.[req.body.tool_resource]?.file_ids ?? [];
|
|
const assistantFiles = files.filter((f) => toolResourceFiles.includes(f.file_id));
|
|
|
|
await processDeleteRequest({ req, files: assistantFiles });
|
|
res.status(200).json({ message: 'File associations removed successfully from assistant' });
|
|
return;
|
|
} else if (
|
|
req.body.assistant_id &&
|
|
req.body.files?.[0]?.filepath === EModelEndpoint.azureAssistants
|
|
) {
|
|
await processDeleteRequest({ req, files: req.body.files });
|
|
return res
|
|
.status(200)
|
|
.json({ message: 'File associations removed successfully from Azure Assistant' });
|
|
}
|
|
|
|
await processDeleteRequest({ req, files: authorizedFiles });
|
|
|
|
logger.debug(
|
|
`[/files] Files deleted successfully: ${authorizedFiles
|
|
.filter((f) => f.file_id)
|
|
.map((f) => f.file_id)
|
|
.join(', ')}`,
|
|
);
|
|
res.status(200).json({ message: 'Files deleted successfully' });
|
|
} catch (error) {
|
|
logger.error('[/files] Error deleting files:', error);
|
|
res.status(400).json({ message: 'Error in request', error: error.message });
|
|
}
|
|
});
|
|
|
|
function isValidID(str) {
|
|
return /^[A-Za-z0-9_-]{21}$/.test(str);
|
|
}
|
|
|
|
router.get('/code/download/:session_id/:fileId', async (req, res) => {
|
|
try {
|
|
const { session_id, fileId } = req.params;
|
|
const logPrefix = `Session ID: ${session_id} | File ID: ${fileId} | Code output download requested by user `;
|
|
logger.debug(logPrefix);
|
|
|
|
if (!session_id || !fileId) {
|
|
return res.status(400).send('Bad request');
|
|
}
|
|
|
|
if (!isValidID(session_id) || !isValidID(fileId)) {
|
|
logger.debug(`${logPrefix} invalid session_id or fileId`);
|
|
return res.status(400).send('Bad request');
|
|
}
|
|
|
|
const { getDownloadStream } = getStrategyFunctions(FileSources.execute_code);
|
|
if (!getDownloadStream) {
|
|
logger.warn(
|
|
`${logPrefix} has no stream method implemented for ${FileSources.execute_code} source`,
|
|
);
|
|
return res.status(501).send('Not Implemented');
|
|
}
|
|
|
|
const result = await loadAuthValues({ userId: req.user.id, authFields: [EnvVar.CODE_API_KEY] });
|
|
|
|
/** @type {AxiosResponse<ReadableStream> | undefined} */
|
|
const response = await getDownloadStream(
|
|
`${session_id}/${fileId}`,
|
|
result[EnvVar.CODE_API_KEY],
|
|
);
|
|
res.set(response.headers);
|
|
response.data.pipe(res);
|
|
} catch (error) {
|
|
logger.error('Error downloading file:', error);
|
|
res.status(500).send('Error downloading file');
|
|
}
|
|
});
|
|
|
|
router.get('/download/:userId/:file_id', fileAccess, async (req, res) => {
|
|
try {
|
|
const { userId, file_id } = req.params;
|
|
logger.debug(`File download requested by user ${userId}: ${file_id}`);
|
|
|
|
// Access already validated by fileAccess middleware
|
|
const file = req.fileAccess.file;
|
|
|
|
if (checkOpenAIStorage(file.source) && !file.model) {
|
|
logger.warn(`File download requested by user ${userId} has no associated model: ${file_id}`);
|
|
return res.status(400).send('The model used when creating this file is not available');
|
|
}
|
|
|
|
const { getDownloadStream } = getStrategyFunctions(file.source);
|
|
if (!getDownloadStream) {
|
|
logger.warn(
|
|
`File download requested by user ${userId} has no stream method implemented: ${file.source}`,
|
|
);
|
|
return res.status(501).send('Not Implemented');
|
|
}
|
|
|
|
const setHeaders = () => {
|
|
const cleanedFilename = cleanFileName(file.filename);
|
|
res.setHeader('Content-Disposition', `attachment; filename="${cleanedFilename}"`);
|
|
res.setHeader('Content-Type', 'application/octet-stream');
|
|
res.setHeader('X-File-Metadata', JSON.stringify(file));
|
|
};
|
|
|
|
if (checkOpenAIStorage(file.source)) {
|
|
req.body = { model: file.model };
|
|
const endpointMap = {
|
|
[FileSources.openai]: EModelEndpoint.assistants,
|
|
[FileSources.azure]: EModelEndpoint.azureAssistants,
|
|
};
|
|
const { openai } = await getOpenAIClient({
|
|
req,
|
|
res,
|
|
overrideEndpoint: endpointMap[file.source],
|
|
});
|
|
logger.debug(`Downloading file ${file_id} from OpenAI`);
|
|
const passThrough = await getDownloadStream(file_id, openai);
|
|
setHeaders();
|
|
logger.debug(`File ${file_id} downloaded from OpenAI`);
|
|
|
|
// Handle both Node.js and Web streams
|
|
const stream =
|
|
passThrough.body && typeof passThrough.body.getReader === 'function'
|
|
? Readable.fromWeb(passThrough.body)
|
|
: passThrough.body;
|
|
|
|
stream.pipe(res);
|
|
} else {
|
|
const fileStream = await getDownloadStream(req, file.filepath);
|
|
|
|
fileStream.on('error', (streamError) => {
|
|
logger.error('[DOWNLOAD ROUTE] Stream error:', streamError);
|
|
});
|
|
|
|
setHeaders();
|
|
fileStream.pipe(res);
|
|
}
|
|
} catch (error) {
|
|
logger.error('[DOWNLOAD ROUTE] Error downloading file:', error);
|
|
res.status(500).send('Error downloading file');
|
|
}
|
|
});
|
|
|
|
router.post('/', async (req, res) => {
|
|
const metadata = req.body;
|
|
let cleanup = true;
|
|
|
|
try {
|
|
filterFile({ req });
|
|
|
|
metadata.temp_file_id = metadata.file_id;
|
|
metadata.file_id = req.file_id;
|
|
|
|
if (isAssistantsEndpoint(metadata.endpoint)) {
|
|
return await processFileUpload({ req, res, metadata });
|
|
}
|
|
|
|
const denied = await verifyAgentUploadPermission({
|
|
req,
|
|
res,
|
|
metadata,
|
|
getAgent,
|
|
checkPermission,
|
|
});
|
|
if (denied) {
|
|
return;
|
|
}
|
|
|
|
return await processAgentFileUpload({ req, res, metadata });
|
|
} catch (error) {
|
|
let message = 'Error processing file';
|
|
logger.error('[/files] Error processing file:', error);
|
|
|
|
if (error.message?.includes('file_ids')) {
|
|
message += ': ' + error.message;
|
|
}
|
|
|
|
if (
|
|
error.message?.includes('Invalid file format') ||
|
|
error.message?.includes('No OCR result') ||
|
|
error.message?.includes('exceeds token limit')
|
|
) {
|
|
message = error.message;
|
|
}
|
|
|
|
try {
|
|
await fs.unlink(req.file.path);
|
|
cleanup = false;
|
|
} catch (error) {
|
|
logger.error('[/files] Error deleting file:', error);
|
|
}
|
|
res.status(500).json({ message });
|
|
} finally {
|
|
if (cleanup) {
|
|
try {
|
|
await fs.unlink(req.file.path);
|
|
} catch (error) {
|
|
logger.error('[/files] Error deleting file after file processing:', error);
|
|
}
|
|
} else {
|
|
logger.debug('[/files] File processing completed without cleanup');
|
|
}
|
|
}
|
|
});
|
|
|
|
module.exports = router;
|