mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 06:00:56 +02:00
Merge branch 'main' into Enhancement-web-search
This commit is contained in:
commit
8eb8ef2e40
40 changed files with 2382 additions and 174 deletions
25
api/cache/cacheConfig.js
vendored
25
api/cache/cacheConfig.js
vendored
|
@ -1,4 +1,5 @@
|
|||
const fs = require('fs');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { math, isEnabled } = require('@librechat/api');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
|
||||
|
@ -34,13 +35,35 @@ if (FORCED_IN_MEMORY_CACHE_NAMESPACES.length > 0) {
|
|||
}
|
||||
}
|
||||
|
||||
/** Helper function to safely read Redis CA certificate from file
|
||||
* @returns {string|null} The contents of the CA certificate file, or null if not set or on error
|
||||
*/
|
||||
const getRedisCA = () => {
|
||||
const caPath = process.env.REDIS_CA;
|
||||
if (!caPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
if (fs.existsSync(caPath)) {
|
||||
return fs.readFileSync(caPath, 'utf8');
|
||||
} else {
|
||||
logger.warn(`Redis CA certificate file not found: ${caPath}`);
|
||||
return null;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to read Redis CA certificate file '${caPath}':`, error);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const cacheConfig = {
|
||||
FORCED_IN_MEMORY_CACHE_NAMESPACES,
|
||||
USE_REDIS,
|
||||
REDIS_URI: process.env.REDIS_URI,
|
||||
REDIS_USERNAME: process.env.REDIS_USERNAME,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
REDIS_CA: process.env.REDIS_CA ? fs.readFileSync(process.env.REDIS_CA, 'utf8') : null,
|
||||
REDIS_CA: getRedisCA(),
|
||||
REDIS_KEY_PREFIX: process.env[REDIS_KEY_PREFIX_VAR] || REDIS_KEY_PREFIX || '',
|
||||
REDIS_MAX_LISTENERS: math(process.env.REDIS_MAX_LISTENERS, 40),
|
||||
REDIS_PING_INTERVAL: math(process.env.REDIS_PING_INTERVAL, 0),
|
||||
|
|
|
@ -49,6 +49,14 @@ const createAgent = async (agentData) => {
|
|||
*/
|
||||
const getAgent = async (searchParameter) => await Agent.findOne(searchParameter).lean();
|
||||
|
||||
/**
|
||||
* Get multiple agent documents based on the provided search parameters.
|
||||
*
|
||||
* @param {Object} searchParameter - The search parameters to find agents.
|
||||
* @returns {Promise<Agent[]>} Array of agent documents as plain objects.
|
||||
*/
|
||||
const getAgents = async (searchParameter) => await Agent.find(searchParameter).lean();
|
||||
|
||||
/**
|
||||
* Load an agent based on the provided ID
|
||||
*
|
||||
|
@ -835,6 +843,7 @@ const countPromotedAgents = async () => {
|
|||
|
||||
module.exports = {
|
||||
getAgent,
|
||||
getAgents,
|
||||
loadAgent,
|
||||
createAgent,
|
||||
updateAgent,
|
||||
|
|
|
@ -42,7 +42,7 @@ const getToolFilesByIds = async (fileIds, toolResourceSet) => {
|
|||
$or: [],
|
||||
};
|
||||
|
||||
if (toolResourceSet.has(EToolResources.ocr)) {
|
||||
if (toolResourceSet.has(EToolResources.context)) {
|
||||
filter.$or.push({ text: { $exists: true, $ne: null }, context: FileContext.agents });
|
||||
}
|
||||
if (toolResourceSet.has(EToolResources.file_search)) {
|
||||
|
|
|
@ -158,7 +158,7 @@ describe('duplicateAgent', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should handle tool_resources.ocr correctly', async () => {
|
||||
it('should convert `tool_resources.ocr` to `tool_resources.context`', async () => {
|
||||
const mockAgent = {
|
||||
id: 'agent_123',
|
||||
name: 'Test Agent',
|
||||
|
@ -178,7 +178,7 @@ describe('duplicateAgent', () => {
|
|||
expect(createAgent).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
tool_resources: {
|
||||
ocr: { enabled: true, config: 'test' },
|
||||
context: { enabled: true, config: 'test' },
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
|
|
@ -2,7 +2,12 @@ const { z } = require('zod');
|
|||
const fs = require('fs').promises;
|
||||
const { nanoid } = require('nanoid');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { agentCreateSchema, agentUpdateSchema } = require('@librechat/api');
|
||||
const {
|
||||
agentCreateSchema,
|
||||
agentUpdateSchema,
|
||||
mergeAgentOcrConversion,
|
||||
convertOcrToContextInPlace,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
|
@ -198,19 +203,32 @@ const getAgentHandler = async (req, res, expandProperties = false) => {
|
|||
* @param {object} req.params - Request params
|
||||
* @param {string} req.params.id - Agent identifier.
|
||||
* @param {AgentUpdateParams} req.body - The Agent update parameters.
|
||||
* @returns {Agent} 200 - success response - application/json
|
||||
* @returns {Promise<Agent>} 200 - success response - application/json
|
||||
*/
|
||||
const updateAgentHandler = async (req, res) => {
|
||||
try {
|
||||
const id = req.params.id;
|
||||
const validatedData = agentUpdateSchema.parse(req.body);
|
||||
const { _id, ...updateData } = removeNullishValues(validatedData);
|
||||
|
||||
// Convert OCR to context in incoming updateData
|
||||
convertOcrToContextInPlace(updateData);
|
||||
|
||||
const existingAgent = await getAgent({ id });
|
||||
|
||||
if (!existingAgent) {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
// Convert legacy OCR tool resource to context format in existing agent
|
||||
const ocrConversion = mergeAgentOcrConversion(existingAgent, updateData);
|
||||
if (ocrConversion.tool_resources) {
|
||||
updateData.tool_resources = ocrConversion.tool_resources;
|
||||
}
|
||||
if (ocrConversion.tools) {
|
||||
updateData.tools = ocrConversion.tools;
|
||||
}
|
||||
|
||||
let updatedAgent =
|
||||
Object.keys(updateData).length > 0
|
||||
? await updateAgent({ id }, updateData, {
|
||||
|
@ -255,7 +273,7 @@ const updateAgentHandler = async (req, res) => {
|
|||
* @param {object} req - Express Request
|
||||
* @param {object} req.params - Request params
|
||||
* @param {string} req.params.id - Agent identifier.
|
||||
* @returns {Agent} 201 - success response - application/json
|
||||
* @returns {Promise<Agent>} 201 - success response - application/json
|
||||
*/
|
||||
const duplicateAgentHandler = async (req, res) => {
|
||||
const { id } = req.params;
|
||||
|
@ -288,9 +306,19 @@ const duplicateAgentHandler = async (req, res) => {
|
|||
hour12: false,
|
||||
})})`;
|
||||
|
||||
if (_tool_resources?.[EToolResources.context]) {
|
||||
cloneData.tool_resources = {
|
||||
[EToolResources.context]: _tool_resources[EToolResources.context],
|
||||
};
|
||||
}
|
||||
|
||||
if (_tool_resources?.[EToolResources.ocr]) {
|
||||
cloneData.tool_resources = {
|
||||
[EToolResources.ocr]: _tool_resources[EToolResources.ocr],
|
||||
/** Legacy conversion from `ocr` to `context` */
|
||||
[EToolResources.context]: {
|
||||
...(_tool_resources[EToolResources.context] ?? {}),
|
||||
..._tool_resources[EToolResources.ocr],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -382,7 +410,7 @@ const duplicateAgentHandler = async (req, res) => {
|
|||
* @param {object} req - Express Request
|
||||
* @param {object} req.params - Request params
|
||||
* @param {string} req.params.id - Agent identifier.
|
||||
* @returns {Agent} 200 - success response - application/json
|
||||
* @returns {Promise<Agent>} 200 - success response - application/json
|
||||
*/
|
||||
const deleteAgentHandler = async (req, res) => {
|
||||
try {
|
||||
|
@ -484,7 +512,7 @@ const getListAgentsHandler = async (req, res) => {
|
|||
* @param {Express.Multer.File} req.file - The avatar image file.
|
||||
* @param {object} req.body - Request body
|
||||
* @param {string} [req.body.avatar] - Optional avatar for the agent's avatar.
|
||||
* @returns {Object} 200 - success response - application/json
|
||||
* @returns {Promise<void>} 200 - success response - application/json
|
||||
*/
|
||||
const uploadAgentAvatarHandler = async (req, res) => {
|
||||
try {
|
||||
|
|
|
@ -512,6 +512,7 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
|||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
tool_resources: {
|
||||
/** Legacy conversion from `ocr` to `context` */
|
||||
ocr: {
|
||||
file_ids: ['ocr1', 'ocr2'],
|
||||
},
|
||||
|
@ -531,7 +532,8 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
|||
|
||||
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(updatedAgent.tool_resources).toBeDefined();
|
||||
expect(updatedAgent.tool_resources.ocr).toBeDefined();
|
||||
expect(updatedAgent.tool_resources.ocr).toBeUndefined();
|
||||
expect(updatedAgent.tool_resources.context).toBeDefined();
|
||||
expect(updatedAgent.tool_resources.execute_code).toBeDefined();
|
||||
expect(updatedAgent.tool_resources.invalid_tool).toBeUndefined();
|
||||
});
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const { logger } = require('@librechat/data-schemas');
|
||||
const { PermissionBits, hasPermissions, ResourceType } = require('librechat-data-provider');
|
||||
const { getEffectivePermissions } = require('~/server/services/PermissionService');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { getAgents } = require('~/models/Agent');
|
||||
const { getFiles } = require('~/models/File');
|
||||
|
||||
/**
|
||||
|
@ -10,11 +10,12 @@ const { getFiles } = require('~/models/File');
|
|||
*/
|
||||
const checkAgentBasedFileAccess = async ({ userId, role, fileId }) => {
|
||||
try {
|
||||
// Find agents that have this file in their tool_resources
|
||||
const agentsWithFile = await getAgent({
|
||||
/** Agents that have this file in their tool_resources */
|
||||
const agentsWithFile = await getAgents({
|
||||
$or: [
|
||||
{ 'tool_resources.file_search.file_ids': fileId },
|
||||
{ 'tool_resources.execute_code.file_ids': fileId },
|
||||
{ 'tool_resources.file_search.file_ids': fileId },
|
||||
{ 'tool_resources.context.file_ids': fileId },
|
||||
{ 'tool_resources.ocr.file_ids': fileId },
|
||||
],
|
||||
});
|
||||
|
@ -24,7 +25,7 @@ const checkAgentBasedFileAccess = async ({ userId, role, fileId }) => {
|
|||
}
|
||||
|
||||
// Check if user has access to any of these agents
|
||||
for (const agent of Array.isArray(agentsWithFile) ? agentsWithFile : [agentsWithFile]) {
|
||||
for (const agent of agentsWithFile) {
|
||||
// Check if user is the agent author
|
||||
if (agent.author && agent.author.toString() === userId) {
|
||||
logger.debug(`[fileAccess] User is author of agent ${agent.id}`);
|
||||
|
@ -83,7 +84,6 @@ const fileAccess = async (req, res, next) => {
|
|||
});
|
||||
}
|
||||
|
||||
// Get the file
|
||||
const [file] = await getFiles({ file_id: fileId });
|
||||
if (!file) {
|
||||
return res.status(404).json({
|
||||
|
@ -92,20 +92,18 @@ const fileAccess = async (req, res, next) => {
|
|||
});
|
||||
}
|
||||
|
||||
// Check if user owns the file
|
||||
if (file.user && file.user.toString() === userId) {
|
||||
req.fileAccess = { file };
|
||||
return next();
|
||||
}
|
||||
|
||||
// Check agent-based access (file inherits agent permissions)
|
||||
/** Agent-based access (file inherits agent permissions) */
|
||||
const hasAgentAccess = await checkAgentBasedFileAccess({ userId, role: userRole, fileId });
|
||||
if (hasAgentAccess) {
|
||||
req.fileAccess = { file };
|
||||
return next();
|
||||
}
|
||||
|
||||
// No access
|
||||
logger.warn(`[fileAccess] User ${userId} denied access to file ${fileId}`);
|
||||
return res.status(403).json({
|
||||
error: 'Forbidden',
|
||||
|
|
483
api/server/middleware/accessResources/fileAccess.spec.js
Normal file
483
api/server/middleware/accessResources/fileAccess.spec.js
Normal file
|
@ -0,0 +1,483 @@
|
|||
const mongoose = require('mongoose');
|
||||
const { ResourceType, PrincipalType, PrincipalModel } = require('librechat-data-provider');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { fileAccess } = require('./fileAccess');
|
||||
const { User, Role, AclEntry } = require('~/db/models');
|
||||
const { createAgent } = require('~/models/Agent');
|
||||
const { createFile } = require('~/models/File');
|
||||
|
||||
describe('fileAccess middleware', () => {
|
||||
let mongoServer;
|
||||
let req, res, next;
|
||||
let testUser, otherUser, thirdUser;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await mongoose.connection.dropDatabase();
|
||||
|
||||
// Create test role
|
||||
await Role.create({
|
||||
name: 'test-role',
|
||||
permissions: {
|
||||
AGENTS: {
|
||||
USE: true,
|
||||
CREATE: true,
|
||||
SHARED_GLOBAL: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Create test users
|
||||
testUser = await User.create({
|
||||
email: 'test@example.com',
|
||||
name: 'Test User',
|
||||
username: 'testuser',
|
||||
role: 'test-role',
|
||||
});
|
||||
|
||||
otherUser = await User.create({
|
||||
email: 'other@example.com',
|
||||
name: 'Other User',
|
||||
username: 'otheruser',
|
||||
role: 'test-role',
|
||||
});
|
||||
|
||||
thirdUser = await User.create({
|
||||
email: 'third@example.com',
|
||||
name: 'Third User',
|
||||
username: 'thirduser',
|
||||
role: 'test-role',
|
||||
});
|
||||
|
||||
// Setup request/response objects
|
||||
req = {
|
||||
user: { id: testUser._id.toString(), role: testUser.role },
|
||||
params: {},
|
||||
};
|
||||
res = {
|
||||
status: jest.fn().mockReturnThis(),
|
||||
json: jest.fn(),
|
||||
};
|
||||
next = jest.fn();
|
||||
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('basic file access', () => {
|
||||
test('should allow access when user owns the file', async () => {
|
||||
// Create a file owned by testUser
|
||||
await createFile({
|
||||
user: testUser._id.toString(),
|
||||
file_id: 'file_owned_by_user',
|
||||
filepath: '/test/file.txt',
|
||||
filename: 'file.txt',
|
||||
type: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
|
||||
req.params.file_id = 'file_owned_by_user';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(req.fileAccess).toBeDefined();
|
||||
expect(req.fileAccess.file).toBeDefined();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should deny access when user does not own the file and no agent access', async () => {
|
||||
// Create a file owned by otherUser
|
||||
await createFile({
|
||||
user: otherUser._id.toString(),
|
||||
file_id: 'file_owned_by_other',
|
||||
filepath: '/test/file.txt',
|
||||
filename: 'file.txt',
|
||||
type: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
|
||||
req.params.file_id = 'file_owned_by_other';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Forbidden',
|
||||
message: 'Insufficient permissions to access this file',
|
||||
});
|
||||
});
|
||||
|
||||
test('should return 404 when file does not exist', async () => {
|
||||
req.params.file_id = 'non_existent_file';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(res.status).toHaveBeenCalledWith(404);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Not Found',
|
||||
message: 'File not found',
|
||||
});
|
||||
});
|
||||
|
||||
test('should return 400 when file_id is missing', async () => {
|
||||
// Don't set file_id in params
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(res.status).toHaveBeenCalledWith(400);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Bad Request',
|
||||
message: 'file_id is required',
|
||||
});
|
||||
});
|
||||
|
||||
test('should return 401 when user is not authenticated', async () => {
|
||||
req.user = null;
|
||||
req.params.file_id = 'some_file';
|
||||
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(res.status).toHaveBeenCalledWith(401);
|
||||
expect(res.json).toHaveBeenCalledWith({
|
||||
error: 'Unauthorized',
|
||||
message: 'Authentication required',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('agent-based file access', () => {
|
||||
beforeEach(async () => {
|
||||
// Create a file owned by otherUser (not testUser)
|
||||
await createFile({
|
||||
user: otherUser._id.toString(),
|
||||
file_id: 'shared_file_via_agent',
|
||||
filepath: '/test/shared.txt',
|
||||
filename: 'shared.txt',
|
||||
type: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
});
|
||||
|
||||
test('should allow access when user is author of agent with file', async () => {
|
||||
// Create agent owned by testUser with the file
|
||||
await createAgent({
|
||||
id: `agent_${Date.now()}`,
|
||||
name: 'Test Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: testUser._id,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: ['shared_file_via_agent'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
req.params.file_id = 'shared_file_via_agent';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(req.fileAccess).toBeDefined();
|
||||
expect(req.fileAccess.file).toBeDefined();
|
||||
});
|
||||
|
||||
test('should allow access when user has VIEW permission on agent with file', async () => {
|
||||
// Create agent owned by otherUser
|
||||
const agent = await createAgent({
|
||||
id: `agent_${Date.now()}`,
|
||||
name: 'Shared Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: otherUser._id,
|
||||
tool_resources: {
|
||||
execute_code: {
|
||||
file_ids: ['shared_file_via_agent'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Grant VIEW permission to testUser
|
||||
await AclEntry.create({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUser._id,
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
permBits: 1, // VIEW permission
|
||||
grantedBy: otherUser._id,
|
||||
});
|
||||
|
||||
req.params.file_id = 'shared_file_via_agent';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(req.fileAccess).toBeDefined();
|
||||
});
|
||||
|
||||
test('should check file in ocr tool_resources', async () => {
|
||||
await createAgent({
|
||||
id: `agent_ocr_${Date.now()}`,
|
||||
name: 'OCR Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: testUser._id,
|
||||
tool_resources: {
|
||||
ocr: {
|
||||
file_ids: ['shared_file_via_agent'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
req.params.file_id = 'shared_file_via_agent';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(req.fileAccess).toBeDefined();
|
||||
});
|
||||
|
||||
test('should deny access when user has no permission on agent with file', async () => {
|
||||
// Create agent owned by otherUser without granting permission to testUser
|
||||
const agent = await createAgent({
|
||||
id: `agent_${Date.now()}`,
|
||||
name: 'Private Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: otherUser._id,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: ['shared_file_via_agent'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Create ACL entry for otherUser only (owner)
|
||||
await AclEntry.create({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: otherUser._id,
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent._id,
|
||||
permBits: 15, // All permissions
|
||||
grantedBy: otherUser._id,
|
||||
});
|
||||
|
||||
req.params.file_id = 'shared_file_via_agent';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
});
|
||||
});
|
||||
|
||||
describe('multiple agents with same file', () => {
|
||||
/**
|
||||
* This test suite verifies that when multiple agents have the same file,
|
||||
* all agents are checked for permissions, not just the first one found.
|
||||
* This ensures users can access files through any agent they have permission for.
|
||||
*/
|
||||
|
||||
test('should check ALL agents with file, not just first one', async () => {
|
||||
// Create a file owned by someone else
|
||||
await createFile({
|
||||
user: otherUser._id.toString(),
|
||||
file_id: 'multi_agent_file',
|
||||
filepath: '/test/multi.txt',
|
||||
filename: 'multi.txt',
|
||||
type: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
|
||||
// Create first agent (owned by otherUser, no access for testUser)
|
||||
const agent1 = await createAgent({
|
||||
id: 'agent_no_access',
|
||||
name: 'No Access Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: otherUser._id,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: ['multi_agent_file'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Create ACL for agent1 - only otherUser has access
|
||||
await AclEntry.create({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: otherUser._id,
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent1._id,
|
||||
permBits: 15,
|
||||
grantedBy: otherUser._id,
|
||||
});
|
||||
|
||||
// Create second agent (owned by thirdUser, but testUser has VIEW access)
|
||||
const agent2 = await createAgent({
|
||||
id: 'agent_with_access',
|
||||
name: 'Accessible Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: thirdUser._id,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: ['multi_agent_file'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Grant testUser VIEW access to agent2
|
||||
await AclEntry.create({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUser._id,
|
||||
principalModel: PrincipalModel.USER,
|
||||
resourceType: ResourceType.AGENT,
|
||||
resourceId: agent2._id,
|
||||
permBits: 1, // VIEW permission
|
||||
grantedBy: thirdUser._id,
|
||||
});
|
||||
|
||||
req.params.file_id = 'multi_agent_file';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
/**
|
||||
* Should succeed because testUser has access to agent2,
|
||||
* even though they don't have access to agent1.
|
||||
* The fix ensures all agents are checked, not just the first one.
|
||||
*/
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(req.fileAccess).toBeDefined();
|
||||
expect(res.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should find file in any agent tool_resources type', async () => {
|
||||
// Create a file
|
||||
await createFile({
|
||||
user: otherUser._id.toString(),
|
||||
file_id: 'multi_tool_file',
|
||||
filepath: '/test/tool.txt',
|
||||
filename: 'tool.txt',
|
||||
type: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
|
||||
// Agent 1: file in file_search (no access for testUser)
|
||||
await createAgent({
|
||||
id: 'agent_file_search',
|
||||
name: 'File Search Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: otherUser._id,
|
||||
tool_resources: {
|
||||
file_search: {
|
||||
file_ids: ['multi_tool_file'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Agent 2: same file in execute_code (testUser has access)
|
||||
await createAgent({
|
||||
id: 'agent_execute_code',
|
||||
name: 'Execute Code Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: thirdUser._id,
|
||||
tool_resources: {
|
||||
execute_code: {
|
||||
file_ids: ['multi_tool_file'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Agent 3: same file in ocr (testUser also has access)
|
||||
await createAgent({
|
||||
id: 'agent_ocr',
|
||||
name: 'OCR Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: testUser._id, // testUser owns this one
|
||||
tool_resources: {
|
||||
ocr: {
|
||||
file_ids: ['multi_tool_file'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
req.params.file_id = 'multi_tool_file';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
/**
|
||||
* Should succeed because testUser owns agent3,
|
||||
* even if other agents with the file are found first.
|
||||
*/
|
||||
expect(next).toHaveBeenCalled();
|
||||
expect(req.fileAccess).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
test('should handle agent with empty tool_resources', async () => {
|
||||
await createFile({
|
||||
user: otherUser._id.toString(),
|
||||
file_id: 'orphan_file',
|
||||
filepath: '/test/orphan.txt',
|
||||
filename: 'orphan.txt',
|
||||
type: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
|
||||
// Create agent with no files in tool_resources
|
||||
await createAgent({
|
||||
id: `agent_empty_${Date.now()}`,
|
||||
name: 'Empty Resources Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: testUser._id,
|
||||
tool_resources: {},
|
||||
});
|
||||
|
||||
req.params.file_id = 'orphan_file';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
});
|
||||
|
||||
test('should handle agent with null tool_resources', async () => {
|
||||
await createFile({
|
||||
user: otherUser._id.toString(),
|
||||
file_id: 'another_orphan_file',
|
||||
filepath: '/test/orphan2.txt',
|
||||
filename: 'orphan2.txt',
|
||||
type: 'text/plain',
|
||||
size: 100,
|
||||
});
|
||||
|
||||
// Create agent with null tool_resources
|
||||
await createAgent({
|
||||
id: `agent_null_${Date.now()}`,
|
||||
name: 'Null Resources Agent',
|
||||
provider: 'openai',
|
||||
model: 'gpt-4',
|
||||
author: testUser._id,
|
||||
tool_resources: null,
|
||||
});
|
||||
|
||||
req.params.file_id = 'another_orphan_file';
|
||||
await fileAccess(req, res, next);
|
||||
|
||||
expect(next).not.toHaveBeenCalled();
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -552,7 +552,7 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
|
|||
throw new Error('File search is not enabled for Agents');
|
||||
}
|
||||
// Note: File search processing continues to dual storage logic below
|
||||
} else if (tool_resource === EToolResources.ocr) {
|
||||
} else if (tool_resource === EToolResources.context) {
|
||||
const { file_id, temp_file_id = null } = metadata;
|
||||
|
||||
/**
|
||||
|
|
|
@ -353,7 +353,12 @@ async function processRequiredActions(client, requiredActions) {
|
|||
async function loadAgentTools({ req, res, agent, signal, tool_resources, openAIApiKey }) {
|
||||
if (!agent.tools || agent.tools.length === 0) {
|
||||
return {};
|
||||
} else if (agent.tools && agent.tools.length === 1 && agent.tools[0] === AgentCapabilities.ocr) {
|
||||
} else if (
|
||||
agent.tools &&
|
||||
agent.tools.length === 1 &&
|
||||
/** Legacy handling for `ocr` as may still exist in existing Agents */
|
||||
(agent.tools[0] === AgentCapabilities.context || agent.tools[0] === AgentCapabilities.ocr)
|
||||
) {
|
||||
return {};
|
||||
}
|
||||
|
||||
|
|
32
client/src/Providers/DragDropContext.tsx
Normal file
32
client/src/Providers/DragDropContext.tsx
Normal file
|
@ -0,0 +1,32 @@
|
|||
import React, { createContext, useContext, useMemo } from 'react';
|
||||
import { useChatContext } from './ChatContext';
|
||||
|
||||
interface DragDropContextValue {
|
||||
conversationId: string | null | undefined;
|
||||
agentId: string | null | undefined;
|
||||
}
|
||||
|
||||
const DragDropContext = createContext<DragDropContextValue | undefined>(undefined);
|
||||
|
||||
export function DragDropProvider({ children }: { children: React.ReactNode }) {
|
||||
const { conversation } = useChatContext();
|
||||
|
||||
/** Context value only created when conversation fields change */
|
||||
const contextValue = useMemo<DragDropContextValue>(
|
||||
() => ({
|
||||
conversationId: conversation?.conversationId,
|
||||
agentId: conversation?.agent_id,
|
||||
}),
|
||||
[conversation?.conversationId, conversation?.agent_id],
|
||||
);
|
||||
|
||||
return <DragDropContext.Provider value={contextValue}>{children}</DragDropContext.Provider>;
|
||||
}
|
||||
|
||||
export function useDragDropContext() {
|
||||
const context = useContext(DragDropContext);
|
||||
if (!context) {
|
||||
throw new Error('useDragDropContext must be used within DragDropProvider');
|
||||
}
|
||||
return context;
|
||||
}
|
|
@ -23,6 +23,7 @@ export * from './SetConvoContext';
|
|||
export * from './SearchContext';
|
||||
export * from './BadgeRowContext';
|
||||
export * from './SidePanelContext';
|
||||
export * from './DragDropContext';
|
||||
export * from './MCPPanelContext';
|
||||
export * from './ArtifactsContext';
|
||||
export * from './PromptGroupsContext';
|
||||
|
|
|
@ -11,9 +11,9 @@ import {
|
|||
AgentListResponse,
|
||||
} from 'librechat-data-provider';
|
||||
import type t from 'librechat-data-provider';
|
||||
import { useLocalize, useDefaultConvo } from '~/hooks';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import { renderAgentAvatar } from '~/utils';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
interface SupportContact {
|
||||
name?: string;
|
||||
|
@ -34,11 +34,11 @@ interface AgentDetailProps {
|
|||
*/
|
||||
const AgentDetail: React.FC<AgentDetailProps> = ({ agent, isOpen, onClose }) => {
|
||||
const localize = useLocalize();
|
||||
// const navigate = useNavigate();
|
||||
const { conversation, newConversation } = useChatContext();
|
||||
const queryClient = useQueryClient();
|
||||
const { showToast } = useToastContext();
|
||||
const dialogRef = useRef<HTMLDivElement>(null);
|
||||
const queryClient = useQueryClient();
|
||||
const getDefaultConversation = useDefaultConvo();
|
||||
const { conversation, newConversation } = useChatContext();
|
||||
|
||||
/**
|
||||
* Navigate to chat with the selected agent
|
||||
|
@ -62,13 +62,22 @@ const AgentDetail: React.FC<AgentDetailProps> = ({ agent, isOpen, onClose }) =>
|
|||
);
|
||||
queryClient.invalidateQueries([QueryKeys.messages]);
|
||||
|
||||
/** Template with agent configuration */
|
||||
const template = {
|
||||
conversationId: Constants.NEW_CONVO as string,
|
||||
endpoint: EModelEndpoint.agents,
|
||||
agent_id: agent.id,
|
||||
title: localize('com_agents_chat_with', { name: agent.name || localize('com_ui_agent') }),
|
||||
};
|
||||
|
||||
const currentConvo = getDefaultConversation({
|
||||
conversation: { ...(conversation ?? {}), ...template },
|
||||
preset: template,
|
||||
});
|
||||
|
||||
newConversation({
|
||||
template: {
|
||||
conversationId: Constants.NEW_CONVO as string,
|
||||
endpoint: EModelEndpoint.agents,
|
||||
agent_id: agent.id,
|
||||
title: `Chat with ${agent.name || 'Agent'}`,
|
||||
},
|
||||
template: currentConvo,
|
||||
preset: template,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
@ -20,6 +20,7 @@ jest.mock('react-router-dom', () => ({
|
|||
jest.mock('~/hooks', () => ({
|
||||
useMediaQuery: jest.fn(() => false), // Mock as desktop by default
|
||||
useLocalize: jest.fn(),
|
||||
useDefaultConvo: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/client', () => ({
|
||||
|
@ -47,7 +48,12 @@ const mockWriteText = jest.fn();
|
|||
|
||||
const mockNavigate = jest.fn();
|
||||
const mockShowToast = jest.fn();
|
||||
const mockLocalize = jest.fn((key: string) => key);
|
||||
const mockLocalize = jest.fn((key: string, values?: Record<string, any>) => {
|
||||
if (key === 'com_agents_chat_with' && values?.name) {
|
||||
return `Chat with ${values.name}`;
|
||||
}
|
||||
return key;
|
||||
});
|
||||
|
||||
const mockAgent: t.Agent = {
|
||||
id: 'test-agent-id',
|
||||
|
@ -106,8 +112,12 @@ describe('AgentDetail', () => {
|
|||
(useNavigate as jest.Mock).mockReturnValue(mockNavigate);
|
||||
const { useToastContext } = require('@librechat/client');
|
||||
(useToastContext as jest.Mock).mockReturnValue({ showToast: mockShowToast });
|
||||
const { useLocalize } = require('~/hooks');
|
||||
const { useLocalize, useDefaultConvo } = require('~/hooks');
|
||||
(useLocalize as jest.Mock).mockReturnValue(mockLocalize);
|
||||
(useDefaultConvo as jest.Mock).mockReturnValue(() => ({
|
||||
conversationId: Constants.NEW_CONVO,
|
||||
endpoint: EModelEndpoint.agents,
|
||||
}));
|
||||
|
||||
// Mock useChatContext
|
||||
const { useChatContext } = require('~/Providers');
|
||||
|
@ -227,6 +237,10 @@ describe('AgentDetail', () => {
|
|||
template: {
|
||||
conversationId: Constants.NEW_CONVO,
|
||||
endpoint: EModelEndpoint.agents,
|
||||
},
|
||||
preset: {
|
||||
conversationId: Constants.NEW_CONVO,
|
||||
endpoint: EModelEndpoint.agents,
|
||||
agent_id: 'test-agent-id',
|
||||
title: 'Chat with Test Agent',
|
||||
},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import React, { useRef, useState, useMemo } from 'react';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { useSetRecoilState } from 'recoil';
|
||||
import { useRecoilState } from 'recoil';
|
||||
import { FileSearch, ImageUpIcon, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
|
||||
import { EToolResources, EModelEndpoint, defaultAgentCapabilities } from 'librechat-data-provider';
|
||||
import {
|
||||
|
@ -42,7 +42,9 @@ const AttachFileMenu = ({
|
|||
const isUploadDisabled = disabled ?? false;
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
const [isPopoverActive, setIsPopoverActive] = useState(false);
|
||||
const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(conversationId));
|
||||
const [ephemeralAgent, setEphemeralAgent] = useRecoilState(
|
||||
ephemeralAgentByConvoId(conversationId),
|
||||
);
|
||||
const [toolResource, setToolResource] = useState<EToolResources | undefined>();
|
||||
const { handleFileChange } = useFileHandling({
|
||||
overrideEndpoint: EModelEndpoint.agents,
|
||||
|
@ -64,7 +66,10 @@ const AttachFileMenu = ({
|
|||
* */
|
||||
const capabilities = useAgentCapabilities(agentsConfig?.capabilities ?? defaultAgentCapabilities);
|
||||
|
||||
const { fileSearchAllowedByAgent, codeAllowedByAgent } = useAgentToolPermissions(agentId);
|
||||
const { fileSearchAllowedByAgent, codeAllowedByAgent } = useAgentToolPermissions(
|
||||
agentId,
|
||||
ephemeralAgent,
|
||||
);
|
||||
|
||||
const handleUploadClick = (isImage?: boolean) => {
|
||||
if (!inputRef.current) {
|
||||
|
@ -89,11 +94,11 @@ const AttachFileMenu = ({
|
|||
},
|
||||
];
|
||||
|
||||
if (capabilities.ocrEnabled) {
|
||||
if (capabilities.contextEnabled) {
|
||||
items.push({
|
||||
label: localize('com_ui_upload_ocr_text'),
|
||||
onClick: () => {
|
||||
setToolResource(EToolResources.ocr);
|
||||
setToolResource(EToolResources.context);
|
||||
onAction();
|
||||
},
|
||||
icon: <FileType2Icon className="icon-md" />,
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
import React, { useMemo } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { OGDialog, OGDialogTemplate } from '@librechat/client';
|
||||
import { ImageUpIcon, FileSearch, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
|
||||
import { EToolResources, defaultAgentCapabilities } from 'librechat-data-provider';
|
||||
import { ImageUpIcon, FileSearch, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
|
||||
import {
|
||||
useAgentToolPermissions,
|
||||
useAgentCapabilities,
|
||||
useGetAgentsConfig,
|
||||
useLocalize,
|
||||
} from '~/hooks';
|
||||
import { useChatContext } from '~/Providers';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
import { useDragDropContext } from '~/Providers';
|
||||
|
||||
interface DragDropModalProps {
|
||||
onOptionSelect: (option: EToolResources | undefined) => void;
|
||||
|
@ -32,9 +34,11 @@ const DragDropModal = ({ onOptionSelect, setShowModal, files, isVisible }: DragD
|
|||
* Use definition for agents endpoint for ephemeral agents
|
||||
* */
|
||||
const capabilities = useAgentCapabilities(agentsConfig?.capabilities ?? defaultAgentCapabilities);
|
||||
const { conversation } = useChatContext();
|
||||
const { conversationId, agentId } = useDragDropContext();
|
||||
const ephemeralAgent = useRecoilValue(ephemeralAgentByConvoId(conversationId ?? ''));
|
||||
const { fileSearchAllowedByAgent, codeAllowedByAgent } = useAgentToolPermissions(
|
||||
conversation?.agent_id,
|
||||
agentId,
|
||||
ephemeralAgent,
|
||||
);
|
||||
|
||||
const options = useMemo(() => {
|
||||
|
@ -60,10 +64,10 @@ const DragDropModal = ({ onOptionSelect, setShowModal, files, isVisible }: DragD
|
|||
icon: <TerminalSquareIcon className="icon-md" />,
|
||||
});
|
||||
}
|
||||
if (capabilities.ocrEnabled) {
|
||||
if (capabilities.contextEnabled) {
|
||||
_options.push({
|
||||
label: localize('com_ui_upload_ocr_text'),
|
||||
value: EToolResources.ocr,
|
||||
value: EToolResources.context,
|
||||
icon: <FileType2Icon className="icon-md" />,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { useDragHelpers } from '~/hooks';
|
||||
import DragDropOverlay from '~/components/Chat/Input/Files/DragDropOverlay';
|
||||
import DragDropModal from '~/components/Chat/Input/Files/DragDropModal';
|
||||
import { DragDropProvider } from '~/Providers';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
interface DragDropWrapperProps {
|
||||
|
@ -19,12 +20,14 @@ export default function DragDropWrapper({ children, className }: DragDropWrapper
|
|||
{children}
|
||||
{/** Always render overlay to avoid mount/unmount overhead */}
|
||||
<DragDropOverlay isActive={isActive} />
|
||||
<DragDropModal
|
||||
files={draggedFiles}
|
||||
isVisible={showModal}
|
||||
setShowModal={setShowModal}
|
||||
onOptionSelect={handleOptionSelect}
|
||||
/>
|
||||
<DragDropProvider>
|
||||
<DragDropModal
|
||||
files={draggedFiles}
|
||||
isVisible={showModal}
|
||||
setShowModal={setShowModal}
|
||||
onOptionSelect={handleOptionSelect}
|
||||
/>
|
||||
</DragDropProvider>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -79,9 +79,9 @@ export default function AgentConfig({ createMutation }: Pick<AgentPanelProps, 'c
|
|||
}, [fileMap, agentFiles]);
|
||||
|
||||
const {
|
||||
ocrEnabled,
|
||||
codeEnabled,
|
||||
toolsEnabled,
|
||||
contextEnabled,
|
||||
actionsEnabled,
|
||||
artifactsEnabled,
|
||||
webSearchEnabled,
|
||||
|
@ -291,7 +291,7 @@ export default function AgentConfig({ createMutation }: Pick<AgentPanelProps, 'c
|
|||
{(codeEnabled ||
|
||||
fileSearchEnabled ||
|
||||
artifactsEnabled ||
|
||||
ocrEnabled ||
|
||||
contextEnabled ||
|
||||
webSearchEnabled) && (
|
||||
<div className="mb-4 flex w-full flex-col items-start gap-3">
|
||||
<label className="text-token-text-primary block font-medium">
|
||||
|
@ -301,8 +301,8 @@ export default function AgentConfig({ createMutation }: Pick<AgentPanelProps, 'c
|
|||
{codeEnabled && <CodeForm agent_id={agent_id} files={code_files} />}
|
||||
{/* Web Search */}
|
||||
{webSearchEnabled && <SearchForm />}
|
||||
{/* File Context (OCR) */}
|
||||
{ocrEnabled && <FileContext agent_id={agent_id} files={context_files} />}
|
||||
{/* File Context */}
|
||||
{contextEnabled && <FileContext agent_id={agent_id} files={context_files} />}
|
||||
{/* Artifacts */}
|
||||
{artifactsEnabled && <Artifacts />}
|
||||
{/* File Search */}
|
||||
|
|
|
@ -47,7 +47,7 @@ export default function FileContext({
|
|||
|
||||
const { handleFileChange } = useFileHandling({
|
||||
overrideEndpoint: EModelEndpoint.agents,
|
||||
additionalMetadata: { agent_id, tool_resource: EToolResources.ocr },
|
||||
additionalMetadata: { agent_id, tool_resource: EToolResources.context },
|
||||
fileSetter: setFiles,
|
||||
});
|
||||
const { handleSharePointFiles, isProcessing, downloadProgress } = useSharePointFileHandling({
|
||||
|
@ -113,7 +113,7 @@ export default function FileContext({
|
|||
<HoverCardTrigger asChild>
|
||||
<span className="flex items-center gap-2">
|
||||
<label className="text-token-text-primary block font-medium">
|
||||
{localize('com_agents_file_context')}
|
||||
{localize('com_agents_file_context_label')}
|
||||
</label>
|
||||
<CircleHelpIcon className="h-4 w-4 text-text-tertiary" />
|
||||
</span>
|
||||
|
@ -122,7 +122,7 @@ export default function FileContext({
|
|||
<HoverCardContent side={ESide.Top} className="w-80">
|
||||
<div className="space-y-2">
|
||||
<p className="text-sm text-text-secondary">
|
||||
{localize('com_agents_file_context_info')}
|
||||
{localize('com_agents_file_context_description')}
|
||||
</p>
|
||||
</div>
|
||||
</HoverCardContent>
|
||||
|
@ -130,13 +130,13 @@ export default function FileContext({
|
|||
</div>
|
||||
</HoverCard>
|
||||
<div className="flex flex-col gap-3">
|
||||
{/* File Context (OCR) Files */}
|
||||
{/* File Context Files */}
|
||||
<FileRow
|
||||
files={files}
|
||||
setFiles={setFiles}
|
||||
setFilesLoading={setFilesLoading}
|
||||
agent_id={agent_id}
|
||||
tool_resource={EToolResources.ocr}
|
||||
tool_resource={EToolResources.context}
|
||||
Wrapper={({ children }) => <div className="flex flex-wrap gap-2">{children}</div>}
|
||||
/>
|
||||
<div>
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { renderHook } from '@testing-library/react';
|
||||
import { Tools, Constants } from 'librechat-data-provider';
|
||||
import { Tools, Constants, EToolResources } from 'librechat-data-provider';
|
||||
import type { TEphemeralAgent } from 'librechat-data-provider';
|
||||
import useAgentToolPermissions from '../useAgentToolPermissions';
|
||||
|
||||
// Mock dependencies
|
||||
|
@ -15,57 +16,165 @@ jest.mock('~/Providers', () => ({
|
|||
import { useGetAgentByIdQuery } from '~/data-provider';
|
||||
import { useAgentsMapContext } from '~/Providers';
|
||||
|
||||
type HookProps = {
|
||||
agentId?: string | null;
|
||||
ephemeralAgent?: TEphemeralAgent | null;
|
||||
};
|
||||
|
||||
describe('useAgentToolPermissions', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Ephemeral Agent Scenarios', () => {
|
||||
it('should return true for all tools when agentId is null', () => {
|
||||
describe('Ephemeral Agent Scenarios (without ephemeralAgent parameter)', () => {
|
||||
it('should return false for all tools when agentId is null and no ephemeralAgent provided', () => {
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(null));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return true for all tools when agentId is undefined', () => {
|
||||
it('should return false for all tools when agentId is undefined and no ephemeralAgent provided', () => {
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(undefined));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return true for all tools when agentId is empty string', () => {
|
||||
it('should return false for all tools when agentId is empty string and no ephemeralAgent provided', () => {
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(''));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return false for all tools when agentId is EPHEMERAL_AGENT_ID and no ephemeralAgent provided', () => {
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(Constants.EPHEMERAL_AGENT_ID));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Ephemeral Agent with Tool Settings', () => {
|
||||
it('should return true for file_search when ephemeralAgent has file_search enabled', () => {
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.file_search]: true,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(null, ephemeralAgent));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return true for execute_code when ephemeralAgent has execute_code enabled', () => {
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.execute_code]: true,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(undefined, ephemeralAgent));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return true for both tools when ephemeralAgent has both enabled', () => {
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.file_search]: true,
|
||||
[EToolResources.execute_code]: true,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions('', ephemeralAgent));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return true for all tools when agentId is EPHEMERAL_AGENT_ID', () => {
|
||||
it('should return false for tools when ephemeralAgent has them explicitly disabled', () => {
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useAgentToolPermissions(Constants.EPHEMERAL_AGENT_ID)
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.file_search]: false,
|
||||
[EToolResources.execute_code]: false,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useAgentToolPermissions(Constants.EPHEMERAL_AGENT_ID, ephemeralAgent),
|
||||
);
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle ephemeralAgent with ocr property without affecting other tools', () => {
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.file_search]: true,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(null, ephemeralAgent));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not affect regular agents when ephemeralAgent is provided', () => {
|
||||
const agentId = 'regular-agent';
|
||||
const mockAgent = {
|
||||
id: agentId,
|
||||
tools: [Tools.file_search],
|
||||
};
|
||||
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({
|
||||
[agentId]: mockAgent,
|
||||
});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.execute_code]: true,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(agentId, ephemeralAgent));
|
||||
|
||||
// Should use regular agent's tools, not ephemeralAgent
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toEqual([Tools.file_search]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Regular Agent with Tools', () => {
|
||||
|
@ -300,7 +409,7 @@ describe('useAgentToolPermissions', () => {
|
|||
expect(firstResult.codeAllowedByAgent).toBe(secondResult.codeAllowedByAgent);
|
||||
// Tools array reference should be the same since it comes from useMemo
|
||||
expect(firstResult.tools).toBe(secondResult.tools);
|
||||
|
||||
|
||||
// Verify the actual values are correct
|
||||
expect(secondResult.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(secondResult.codeAllowedByAgent).toBe(false);
|
||||
|
@ -318,10 +427,9 @@ describe('useAgentToolPermissions', () => {
|
|||
(useAgentsMapContext as jest.Mock).mockReturnValue(mockAgents);
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const { result, rerender } = renderHook(
|
||||
({ agentId }) => useAgentToolPermissions(agentId),
|
||||
{ initialProps: { agentId: agentId1 } }
|
||||
);
|
||||
const { result, rerender } = renderHook(({ agentId }) => useAgentToolPermissions(agentId), {
|
||||
initialProps: { agentId: agentId1 },
|
||||
});
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
|
@ -345,24 +453,34 @@ describe('useAgentToolPermissions', () => {
|
|||
});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({ data: undefined });
|
||||
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.file_search]: true,
|
||||
[EToolResources.execute_code]: true,
|
||||
};
|
||||
|
||||
const { result, rerender } = renderHook(
|
||||
({ agentId }) => useAgentToolPermissions(agentId),
|
||||
{ initialProps: { agentId: null } }
|
||||
({ agentId, ephemeralAgent }) => useAgentToolPermissions(agentId, ephemeralAgent),
|
||||
{ initialProps: { agentId: null, ephemeralAgent } as HookProps },
|
||||
);
|
||||
|
||||
// Start with ephemeral agent (null)
|
||||
// Start with ephemeral agent (null) with tools enabled
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
|
||||
// Switch to regular agent
|
||||
rerender({ agentId: regularAgentId });
|
||||
rerender({ agentId: regularAgentId, ephemeralAgent });
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
|
||||
// Switch back to ephemeral
|
||||
rerender({ agentId: '' });
|
||||
rerender({ agentId: '', ephemeralAgent });
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
|
||||
// Switch to ephemeral without tools
|
||||
rerender({ agentId: null, ephemeralAgent: undefined });
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -403,9 +521,9 @@ describe('useAgentToolPermissions', () => {
|
|||
|
||||
it('should handle query loading state', () => {
|
||||
const agentId = 'loading-agent';
|
||||
|
||||
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
error: null,
|
||||
|
@ -421,9 +539,9 @@ describe('useAgentToolPermissions', () => {
|
|||
|
||||
it('should handle query error state', () => {
|
||||
const agentId = 'error-agent';
|
||||
|
||||
|
||||
(useAgentsMapContext as jest.Mock).mockReturnValue({});
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({
|
||||
(useGetAgentByIdQuery as jest.Mock).mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
error: new Error('Failed to fetch agent'),
|
|
@ -1,5 +1,5 @@
|
|||
import { renderHook } from '@testing-library/react';
|
||||
import { Tools } from 'librechat-data-provider';
|
||||
import { Tools, EToolResources } from 'librechat-data-provider';
|
||||
import useAgentToolPermissions from '../useAgentToolPermissions';
|
||||
|
||||
// Mock the dependencies
|
||||
|
@ -20,36 +20,36 @@ describe('useAgentToolPermissions', () => {
|
|||
});
|
||||
|
||||
describe('when no agentId is provided', () => {
|
||||
it('should allow all tools for ephemeral agents', () => {
|
||||
it('should disallow all tools for ephemeral agents when no ephemeralAgent settings provided', () => {
|
||||
mockUseAgentsMapContext.mockReturnValue({});
|
||||
mockUseGetAgentByIdQuery.mockReturnValue({ data: undefined });
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(null));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should allow all tools when agentId is undefined', () => {
|
||||
it('should disallow all tools when agentId is undefined and no ephemeralAgent settings', () => {
|
||||
mockUseAgentsMapContext.mockReturnValue({});
|
||||
mockUseGetAgentByIdQuery.mockReturnValue({ data: undefined });
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(undefined));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should allow all tools when agentId is empty string', () => {
|
||||
it('should disallow all tools when agentId is empty string and no ephemeralAgent settings', () => {
|
||||
mockUseAgentsMapContext.mockReturnValue({});
|
||||
mockUseGetAgentByIdQuery.mockReturnValue({ data: undefined });
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(''));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
@ -177,4 +177,74 @@ describe('useAgentToolPermissions', () => {
|
|||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when ephemeralAgent settings are provided', () => {
|
||||
it('should allow file_search when ephemeralAgent has file_search enabled', () => {
|
||||
mockUseAgentsMapContext.mockReturnValue({});
|
||||
mockUseGetAgentByIdQuery.mockReturnValue({ data: undefined });
|
||||
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.file_search]: true,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(null, ephemeralAgent));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should allow execute_code when ephemeralAgent has execute_code enabled', () => {
|
||||
mockUseAgentsMapContext.mockReturnValue({});
|
||||
mockUseGetAgentByIdQuery.mockReturnValue({ data: undefined });
|
||||
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.execute_code]: true,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(undefined, ephemeralAgent));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(false);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should allow both tools when ephemeralAgent has both enabled', () => {
|
||||
mockUseAgentsMapContext.mockReturnValue({});
|
||||
mockUseGetAgentByIdQuery.mockReturnValue({ data: undefined });
|
||||
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.file_search]: true,
|
||||
[EToolResources.execute_code]: true,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions('', ephemeralAgent));
|
||||
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(true);
|
||||
expect(result.current.tools).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not affect regular agents when ephemeralAgent is provided', () => {
|
||||
const agentId = 'regular-agent';
|
||||
const agent = {
|
||||
id: agentId,
|
||||
tools: [Tools.file_search],
|
||||
};
|
||||
|
||||
mockUseAgentsMapContext.mockReturnValue({ [agentId]: agent });
|
||||
mockUseGetAgentByIdQuery.mockReturnValue({ data: undefined });
|
||||
|
||||
const ephemeralAgent = {
|
||||
[EToolResources.execute_code]: true,
|
||||
};
|
||||
|
||||
const { result } = renderHook(() => useAgentToolPermissions(agentId, ephemeralAgent));
|
||||
|
||||
// Should use regular agent's tools, not ephemeralAgent
|
||||
expect(result.current.fileSearchAllowedByAgent).toBe(true);
|
||||
expect(result.current.codeAllowedByAgent).toBe(false);
|
||||
expect(result.current.tools).toEqual([Tools.file_search]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,6 +6,7 @@ interface AgentCapabilitiesResult {
|
|||
actionsEnabled: boolean;
|
||||
artifactsEnabled: boolean;
|
||||
ocrEnabled: boolean;
|
||||
contextEnabled: boolean;
|
||||
fileSearchEnabled: boolean;
|
||||
webSearchEnabled: boolean;
|
||||
codeEnabled: boolean;
|
||||
|
@ -34,6 +35,11 @@ export default function useAgentCapabilities(
|
|||
[capabilities],
|
||||
);
|
||||
|
||||
const contextEnabled = useMemo(
|
||||
() => capabilities?.includes(AgentCapabilities.context) ?? false,
|
||||
[capabilities],
|
||||
);
|
||||
|
||||
const fileSearchEnabled = useMemo(
|
||||
() => capabilities?.includes(AgentCapabilities.file_search) ?? false,
|
||||
[capabilities],
|
||||
|
@ -54,6 +60,7 @@ export default function useAgentCapabilities(
|
|||
codeEnabled,
|
||||
toolsEnabled,
|
||||
actionsEnabled,
|
||||
contextEnabled,
|
||||
artifactsEnabled,
|
||||
webSearchEnabled,
|
||||
fileSearchEnabled,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { useMemo } from 'react';
|
||||
import { Tools, Constants } from 'librechat-data-provider';
|
||||
import { Tools, Constants, EToolResources } from 'librechat-data-provider';
|
||||
import type { TEphemeralAgent } from 'librechat-data-provider';
|
||||
import { useGetAgentByIdQuery } from '~/data-provider';
|
||||
import { useAgentsMapContext } from '~/Providers';
|
||||
|
||||
|
@ -16,11 +17,13 @@ function isEphemeralAgent(agentId: string | null | undefined): boolean {
|
|||
/**
|
||||
* Hook to determine whether specific tools are allowed for a given agent.
|
||||
*
|
||||
* @param agentId - The ID of the agent. If null/undefined/empty, returns true for all tools (ephemeral agent behavior)
|
||||
* @param agentId - The ID of the agent. If null/undefined/empty, checks ephemeralAgent settings
|
||||
* @param ephemeralAgent - Optional ephemeral agent settings for tool permissions
|
||||
* @returns Object with boolean flags for file_search and execute_code permissions, plus the tools array
|
||||
*/
|
||||
export default function useAgentToolPermissions(
|
||||
agentId: string | null | undefined,
|
||||
ephemeralAgent?: TEphemeralAgent | null,
|
||||
): AgentToolPermissionsResult {
|
||||
const agentsMap = useAgentsMapContext();
|
||||
|
||||
|
@ -37,22 +40,26 @@ export default function useAgentToolPermissions(
|
|||
);
|
||||
|
||||
const fileSearchAllowedByAgent = useMemo(() => {
|
||||
// Allow for ephemeral agents
|
||||
if (isEphemeralAgent(agentId)) return true;
|
||||
// Check ephemeral agent settings
|
||||
if (isEphemeralAgent(agentId)) {
|
||||
return ephemeralAgent?.[EToolResources.file_search] ?? false;
|
||||
}
|
||||
// If agentId exists but agent not found, disallow
|
||||
if (!selectedAgent) return false;
|
||||
// Check if the agent has the file_search tool
|
||||
return tools?.includes(Tools.file_search) ?? false;
|
||||
}, [agentId, selectedAgent, tools]);
|
||||
}, [agentId, selectedAgent, tools, ephemeralAgent]);
|
||||
|
||||
const codeAllowedByAgent = useMemo(() => {
|
||||
// Allow for ephemeral agents
|
||||
if (isEphemeralAgent(agentId)) return true;
|
||||
// Check ephemeral agent settings
|
||||
if (isEphemeralAgent(agentId)) {
|
||||
return ephemeralAgent?.[EToolResources.execute_code] ?? false;
|
||||
}
|
||||
// If agentId exists but agent not found, disallow
|
||||
if (!selectedAgent) return false;
|
||||
// Check if the agent has the execute_code tool
|
||||
return tools?.includes(Tools.execute_code) ?? false;
|
||||
}, [agentId, selectedAgent, tools]);
|
||||
}, [agentId, selectedAgent, tools, ephemeralAgent]);
|
||||
|
||||
return {
|
||||
fileSearchAllowedByAgent,
|
||||
|
|
|
@ -71,7 +71,7 @@ export default function useDragHelpers() {
|
|||
const capabilities = agentsConfig?.capabilities ?? defaultAgentCapabilities;
|
||||
const fileSearchEnabled = capabilities.includes(AgentCapabilities.file_search) === true;
|
||||
const codeEnabled = capabilities.includes(AgentCapabilities.execute_code) === true;
|
||||
const ocrEnabled = capabilities.includes(AgentCapabilities.ocr) === true;
|
||||
const contextEnabled = capabilities.includes(AgentCapabilities.context) === true;
|
||||
|
||||
/** Get agent permissions at drop time */
|
||||
const agentId = conversationRef.current?.agent_id;
|
||||
|
@ -99,7 +99,7 @@ export default function useDragHelpers() {
|
|||
allImages ||
|
||||
(fileSearchEnabled && fileSearchAllowedByAgent) ||
|
||||
(codeEnabled && codeAllowedByAgent) ||
|
||||
ocrEnabled;
|
||||
contextEnabled;
|
||||
|
||||
if (!shouldShowModal) {
|
||||
// Fallback: directly handle files without showing modal
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
"com_agents_all_category": "All",
|
||||
"com_agents_all_description": "Browse all shared agents across all categories",
|
||||
"com_agents_by_librechat": "by LibreChat",
|
||||
"com_agents_chat_with": "Chat with {{name}}",
|
||||
"com_agents_category_aftersales": "After Sales",
|
||||
"com_agents_category_aftersales_description": "Agents specialized in post-sale support, maintenance, and customer service",
|
||||
"com_agents_category_empty": "No agents found in the {{category}} category",
|
||||
|
@ -59,9 +60,9 @@
|
|||
"com_agents_error_timeout_suggestion": "Please check your internet connection and try again.",
|
||||
"com_agents_error_timeout_title": "Connection Timeout",
|
||||
"com_agents_error_title": "Something went wrong",
|
||||
"com_agents_file_context": "File Context (OCR)",
|
||||
"com_agents_file_context_label": "File Context",
|
||||
"com_agents_file_context_disabled": "Agent must be created before uploading files for File Context.",
|
||||
"com_agents_file_context_info": "Files uploaded as \"Context\" are processed using OCR to extract text, which is then added to the Agent's instructions. Ideal for documents, images with text, or PDFs where you need the full text content of a file",
|
||||
"com_agents_file_context_description": "Files uploaded as \"Context\" are parsed as text to supplement the Agent's instructions. If OCR is available, or if configured for the uploaded filetype, the process is used to extract text. Ideal for documents, images with text, or PDFs where you need the full text content of a file",
|
||||
"com_agents_file_search_disabled": "Agent must be created before uploading files for File Search.",
|
||||
"com_agents_file_search_info": "When enabled, the agent will be informed of the exact filenames listed below, allowing it to retrieve relevant context from these files.",
|
||||
"com_agents_grid_announcement": "Showing {{count}} agents in {{category}} category",
|
||||
|
|
|
@ -654,7 +654,7 @@
|
|||
"com_ui_agent_chain_info": "Ļauj izveidot aģentu secību ķēdes. Katrs aģents var piekļūt iepriekšējo ķēdē esošo aģentu izvades datiem. Balstīts uz \"Aģentu sajaukuma\" arhitektūru, kurā aģenti izmanto iepriekšējos izvades datus kā palīginformāciju.",
|
||||
"com_ui_agent_chain_max": "Jūs esat sasniedzis maksimālo skaitu {{0}} aģentu.",
|
||||
"com_ui_agent_delete_error": "Dzēšot aģentu, radās kļūda.",
|
||||
"com_ui_agent_deleted": "Aģents veiksmīgi izdzēsts",
|
||||
"com_ui_agent_deleted": "Aģents veiksmīgi dzēsts",
|
||||
"com_ui_agent_duplicate_error": "Dublējot aģentu, radās kļūda.",
|
||||
"com_ui_agent_duplicated": "Aģents veiksmīgi dublēts",
|
||||
"com_ui_agent_name_is_required": "Obligāti jānorāda aģenta nosaukums",
|
||||
|
@ -695,7 +695,7 @@
|
|||
"com_ui_ascending": "Augošā",
|
||||
"com_ui_assistant": "Asistents",
|
||||
"com_ui_assistant_delete_error": "Dzēšot asistentu, radās kļūda.",
|
||||
"com_ui_assistant_deleted": "Asistents ir veiksmīgi izdzēsts.",
|
||||
"com_ui_assistant_deleted": "Asistents ir veiksmīgi dzēsts.",
|
||||
"com_ui_assistants": "Asistenti",
|
||||
"com_ui_assistants_output": "Asistentu izvade",
|
||||
"com_ui_at_least_one_owner_required": "Nepieciešams vismaz viens īpašnieks",
|
||||
|
@ -738,7 +738,7 @@
|
|||
"com_ui_bookmarks_create_success": "Grāmatzīme veiksmīgi izveidota",
|
||||
"com_ui_bookmarks_delete": "Dzēst grāmatzīmi",
|
||||
"com_ui_bookmarks_delete_error": "Dzēšot grāmatzīmi, radās kļūda.",
|
||||
"com_ui_bookmarks_delete_success": "Grāmatzīme veiksmīgi izdzēsta",
|
||||
"com_ui_bookmarks_delete_success": "Grāmatzīme veiksmīgi dzēsta",
|
||||
"com_ui_bookmarks_description": "Apraksts",
|
||||
"com_ui_bookmarks_edit": "Rediģēt grāmatzīmi",
|
||||
"com_ui_bookmarks_filter": "Filtrēt grāmatzīmes...",
|
||||
|
@ -825,7 +825,7 @@
|
|||
"com_ui_delete_mcp": "Dzēst MCP",
|
||||
"com_ui_delete_mcp_confirm": "Vai tiešām vēlaties dzēst šo MCP serveri?",
|
||||
"com_ui_delete_mcp_error": "Neizdevās izdzēst MCP serveri.",
|
||||
"com_ui_delete_mcp_success": "MCP serveris veiksmīgi izdzēsts",
|
||||
"com_ui_delete_mcp_success": "MCP serveris veiksmīgi dzēsts",
|
||||
"com_ui_delete_memory": "Dzēst atmiņu",
|
||||
"com_ui_delete_not_allowed": "Dzēšanas darbība nav atļauta",
|
||||
"com_ui_delete_prompt": "Vai dzēst uzvedni?",
|
||||
|
@ -847,7 +847,7 @@
|
|||
"com_ui_download_artifact": "Lejupielādēt artefaktu",
|
||||
"com_ui_download_backup": "Lejupielādēt rezerves kodus",
|
||||
"com_ui_download_backup_tooltip": "Pirms turpināt, lejupielādējiet rezerves kodus. Tie būs nepieciešami, lai atgūtu piekļuvi, ja pazaudēsiet autentifikatora ierīci.",
|
||||
"com_ui_download_error": "Kļūda, lejupielādējot failu. Iespējams, fails ir izdzēsts.",
|
||||
"com_ui_download_error": "Kļūda, lejupielādējot failu. Iespējams, fails ir dzēsts.",
|
||||
"com_ui_drag_drop": "Ievietojiet šeit jebkuru failu, lai pievienotu to sarunai",
|
||||
"com_ui_dropdown_variables": "Nolaižamās izvēlnes mainīgie:",
|
||||
"com_ui_dropdown_variables_info": "Izveidojiet pielāgotas nolaižamās izvēlnes savām uzvednēm:{{variable_name:option1|option2|option3}}` (mainīgā_nosakums:opcija1|opcija2|opcija3)",
|
||||
|
@ -1162,8 +1162,8 @@
|
|||
"com_ui_share_link_to_chat": "Kopīgot saiti sarunai",
|
||||
"com_ui_share_update_message": "Jūsu vārds, pielāgotie norādījumi un visas ziņas, ko pievienojat pēc kopīgošanas, paliek privātas.",
|
||||
"com_ui_share_var": "Kopīgot {{0}}",
|
||||
"com_ui_shared_link_bulk_delete_success": "Koplietotās saites ir veiksmīgi izdzēstas.",
|
||||
"com_ui_shared_link_delete_success": "Koplietotā saite ir veiksmīgi izdzēsta.",
|
||||
"com_ui_shared_link_bulk_delete_success": "Koplietotās saites ir veiksmīgi dzēstas.",
|
||||
"com_ui_shared_link_delete_success": "Koplietotā saite ir veiksmīgi dzēsta.",
|
||||
"com_ui_shared_link_not_found": "Kopīgotā saite nav atrasta",
|
||||
"com_ui_shared_prompts": "Koplietotas uzvednes",
|
||||
"com_ui_shop": "Iepirkšanās",
|
||||
|
|
|
@ -253,7 +253,7 @@ export const validateFiles = ({
|
|||
}
|
||||
|
||||
let mimeTypesToCheck = supportedMimeTypes;
|
||||
if (toolResource === EToolResources.ocr) {
|
||||
if (toolResource === EToolResources.context) {
|
||||
mimeTypesToCheck = [
|
||||
...(fileConfig?.text?.supportedMimeTypes || []),
|
||||
...(fileConfig?.ocr?.supportedMimeTypes || []),
|
||||
|
|
|
@ -62,14 +62,19 @@ export const processAgentOption = ({
|
|||
fileMap?: Record<string, TFile | undefined>;
|
||||
}): TAgentOption => {
|
||||
const isGlobal = _agent?.isPublic ?? false;
|
||||
|
||||
const context_files = _agent?.tool_resources?.context?.file_ids ?? [];
|
||||
if (_agent?.tool_resources?.ocr?.file_ids) {
|
||||
/** Backwards-compatibility */
|
||||
context_files.push(..._agent.tool_resources.ocr.file_ids);
|
||||
}
|
||||
|
||||
const agent: TAgentOption = {
|
||||
...(_agent ?? ({} as Agent)),
|
||||
label: _agent?.name ?? '',
|
||||
value: _agent?.id ?? '',
|
||||
icon: isGlobal ? <EarthIcon className="icon-md text-green-400" /> : null,
|
||||
context_files: _agent?.tool_resources?.ocr?.file_ids
|
||||
? ([] as Array<[string, ExtendedFile]>)
|
||||
: undefined,
|
||||
context_files: context_files.length > 0 ? ([] as Array<[string, ExtendedFile]>) : undefined,
|
||||
knowledge_files: _agent?.tool_resources?.file_search?.file_ids
|
||||
? ([] as Array<[string, ExtendedFile]>)
|
||||
: undefined,
|
||||
|
@ -130,12 +135,12 @@ export const processAgentOption = ({
|
|||
}
|
||||
};
|
||||
|
||||
if (agent.context_files && _agent?.tool_resources?.ocr?.file_ids) {
|
||||
_agent.tool_resources.ocr.file_ids.forEach((file_id) =>
|
||||
if (agent.context_files && context_files.length > 0) {
|
||||
context_files.forEach((file_id) =>
|
||||
handleFile({
|
||||
file_id,
|
||||
list: agent.context_files,
|
||||
tool_resource: EToolResources.ocr,
|
||||
tool_resource: EToolResources.context,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
export * from './config';
|
||||
export * from './memory';
|
||||
export * from './migration';
|
||||
export * from './legacy';
|
||||
export * from './resources';
|
||||
export * from './run';
|
||||
export * from './validation';
|
||||
|
|
697
packages/api/src/agents/legacy.test.ts
Normal file
697
packages/api/src/agents/legacy.test.ts
Normal file
|
@ -0,0 +1,697 @@
|
|||
import { EToolResources } from 'librechat-data-provider';
|
||||
import { convertOcrToContextInPlace, mergeAgentOcrConversion } from './legacy';
|
||||
import type { AgentToolResources, TFile } from 'librechat-data-provider';
|
||||
|
||||
describe('OCR to Context Conversion for updateAgentHandler', () => {
|
||||
describe('convertOcrToContextInPlace', () => {
|
||||
it('should do nothing when no OCR resource exists', () => {
|
||||
const data = {
|
||||
tool_resources: {
|
||||
[EToolResources.execute_code]: {
|
||||
file_ids: ['file1'],
|
||||
},
|
||||
},
|
||||
tools: ['execute_code'],
|
||||
};
|
||||
|
||||
const originalCopy = JSON.parse(JSON.stringify(data));
|
||||
convertOcrToContextInPlace(data);
|
||||
|
||||
expect(data).toEqual(originalCopy);
|
||||
});
|
||||
|
||||
it('should convert OCR to context when context does not exist', () => {
|
||||
const data = {
|
||||
tool_resources: {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1', 'ocr2'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'ocr1',
|
||||
filename: 'doc.pdf',
|
||||
filepath: '/doc.pdf',
|
||||
type: 'application/pdf',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
} as AgentToolResources,
|
||||
};
|
||||
|
||||
convertOcrToContextInPlace(data);
|
||||
|
||||
expect(data.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(data.tool_resources?.[EToolResources.context]).toEqual({
|
||||
file_ids: ['ocr1', 'ocr2'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'ocr1',
|
||||
filename: 'doc.pdf',
|
||||
filepath: '/doc.pdf',
|
||||
type: 'application/pdf',
|
||||
user: 'user1',
|
||||
object: 'file',
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge OCR into existing context', () => {
|
||||
const data = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['context1'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'context1',
|
||||
filename: 'existing.txt',
|
||||
filepath: '/existing.txt',
|
||||
type: 'text/plain',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1', 'ocr2'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'ocr1',
|
||||
filename: 'scan.pdf',
|
||||
filepath: '/scan.pdf',
|
||||
type: 'application/pdf',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
convertOcrToContextInPlace(data);
|
||||
|
||||
expect(data.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(data.tool_resources?.[EToolResources.context]?.file_ids).toEqual([
|
||||
'context1',
|
||||
'ocr1',
|
||||
'ocr2',
|
||||
]);
|
||||
expect(data.tool_resources?.[EToolResources.context]?.files).toHaveLength(2);
|
||||
expect(data.tool_resources?.[EToolResources.context]?.files?.map((f) => f.file_id)).toEqual([
|
||||
'context1',
|
||||
'ocr1',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should deduplicate file_ids when merging', () => {
|
||||
const data = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['file1', 'file2'],
|
||||
},
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['file2', 'file3'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
convertOcrToContextInPlace(data);
|
||||
|
||||
expect(data.tool_resources?.[EToolResources.context]?.file_ids).toEqual([
|
||||
'file1',
|
||||
'file2',
|
||||
'file3',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should deduplicate files by file_id when merging', () => {
|
||||
const sharedFile: TFile = {
|
||||
file_id: 'shared',
|
||||
filename: 'shared.txt',
|
||||
filepath: '/shared.txt',
|
||||
type: 'text/plain',
|
||||
user: 'user1',
|
||||
object: 'file',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
};
|
||||
|
||||
const data = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
files: [sharedFile],
|
||||
},
|
||||
[EToolResources.ocr]: {
|
||||
files: [
|
||||
sharedFile,
|
||||
{
|
||||
file_id: 'unique',
|
||||
filename: 'unique.pdf',
|
||||
filepath: '/unique.pdf',
|
||||
type: 'application/pdf',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
convertOcrToContextInPlace(data);
|
||||
|
||||
expect(data.tool_resources?.[EToolResources.context]?.files).toHaveLength(2);
|
||||
expect(
|
||||
data.tool_resources?.[EToolResources.context]?.files?.map((f) => f.file_id).sort(),
|
||||
).toEqual(['shared', 'unique']);
|
||||
});
|
||||
|
||||
it('should replace OCR with context in tools array', () => {
|
||||
const data = {
|
||||
tools: ['execute_code', 'ocr', 'file_search'],
|
||||
};
|
||||
|
||||
convertOcrToContextInPlace(data);
|
||||
|
||||
expect(data.tools).toEqual(['execute_code', 'context', 'file_search']);
|
||||
});
|
||||
|
||||
it('should remove duplicates when context already exists in tools', () => {
|
||||
const data = {
|
||||
tools: ['context', 'ocr', 'execute_code'],
|
||||
};
|
||||
|
||||
convertOcrToContextInPlace(data);
|
||||
|
||||
expect(data.tools).toEqual(['context', 'execute_code']);
|
||||
});
|
||||
|
||||
it('should handle both tool_resources and tools conversion', () => {
|
||||
const data = {
|
||||
tool_resources: {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1'],
|
||||
},
|
||||
} as AgentToolResources,
|
||||
tools: ['ocr', 'execute_code'],
|
||||
};
|
||||
|
||||
convertOcrToContextInPlace(data);
|
||||
|
||||
expect(data.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(data.tool_resources?.[EToolResources.context]).toEqual({
|
||||
file_ids: ['ocr1'],
|
||||
});
|
||||
expect(data.tools).toEqual(['context', 'execute_code']);
|
||||
});
|
||||
|
||||
it('should preserve other tool resources during OCR conversion', () => {
|
||||
const data = {
|
||||
tool_resources: {
|
||||
[EToolResources.execute_code]: {
|
||||
file_ids: ['exec1', 'exec2'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'exec1',
|
||||
filename: 'script.py',
|
||||
filepath: '/script.py',
|
||||
type: 'text/x-python',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 512,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
[EToolResources.file_search]: {
|
||||
file_ids: ['search1'],
|
||||
vector_store_ids: ['vector1', 'vector2'],
|
||||
},
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1'],
|
||||
},
|
||||
} as AgentToolResources,
|
||||
tools: ['execute_code', 'file_search', 'ocr'],
|
||||
};
|
||||
|
||||
const originalExecuteCode = JSON.parse(JSON.stringify(data.tool_resources.execute_code));
|
||||
const originalFileSearch = JSON.parse(JSON.stringify(data.tool_resources.file_search));
|
||||
|
||||
convertOcrToContextInPlace(data);
|
||||
|
||||
// OCR should be converted to context
|
||||
expect(data.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(data.tool_resources?.[EToolResources.context]).toEqual({
|
||||
file_ids: ['ocr1'],
|
||||
});
|
||||
|
||||
// Other resources should remain unchanged
|
||||
expect(data.tool_resources?.[EToolResources.execute_code]).toEqual(originalExecuteCode);
|
||||
expect(data.tool_resources?.[EToolResources.file_search]).toEqual(originalFileSearch);
|
||||
|
||||
// Tools array should have ocr replaced with context
|
||||
expect(data.tools).toEqual(['execute_code', 'file_search', 'context']);
|
||||
});
|
||||
|
||||
it('should preserve image_edit resource during OCR conversion', () => {
|
||||
const data = {
|
||||
tool_resources: {
|
||||
[EToolResources.image_edit]: {
|
||||
file_ids: ['image1'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'image1',
|
||||
filename: 'photo.png',
|
||||
filepath: '/photo.png',
|
||||
type: 'image/png',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 2048,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
width: 800,
|
||||
height: 600,
|
||||
},
|
||||
],
|
||||
},
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1'],
|
||||
},
|
||||
} as AgentToolResources,
|
||||
};
|
||||
|
||||
const originalImageEdit = JSON.parse(JSON.stringify(data.tool_resources.image_edit));
|
||||
|
||||
convertOcrToContextInPlace(data);
|
||||
|
||||
expect(data.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(data.tool_resources?.[EToolResources.context]).toEqual({
|
||||
file_ids: ['ocr1'],
|
||||
});
|
||||
expect(data.tool_resources?.[EToolResources.image_edit]).toEqual(originalImageEdit);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeAgentOcrConversion', () => {
|
||||
it('should return empty object when existing agent has no OCR', () => {
|
||||
const existingAgent = {
|
||||
tool_resources: {
|
||||
[EToolResources.execute_code]: {
|
||||
file_ids: ['file1'],
|
||||
},
|
||||
},
|
||||
tools: ['execute_code'],
|
||||
};
|
||||
|
||||
const updateData = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['context1'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = mergeAgentOcrConversion(existingAgent, updateData);
|
||||
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
|
||||
it('should convert existing OCR to context when no context exists', () => {
|
||||
const existingAgent = {
|
||||
tool_resources: {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1', 'ocr2'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'ocr1',
|
||||
filename: 'doc.pdf',
|
||||
filepath: '/doc.pdf',
|
||||
type: 'application/pdf',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
tools: ['ocr', 'execute_code'],
|
||||
};
|
||||
|
||||
const updateData = {};
|
||||
|
||||
const result = mergeAgentOcrConversion(existingAgent, updateData);
|
||||
|
||||
expect(result.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(result.tool_resources?.[EToolResources.context]).toEqual({
|
||||
file_ids: ['ocr1', 'ocr2'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'ocr1',
|
||||
filename: 'doc.pdf',
|
||||
filepath: '/doc.pdf',
|
||||
type: 'application/pdf',
|
||||
user: 'user1',
|
||||
object: 'file',
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(result.tools).toEqual(['context', 'execute_code']);
|
||||
});
|
||||
|
||||
it('should merge existing OCR with existing context', () => {
|
||||
const existingAgent = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['context1'],
|
||||
},
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const updateData = {};
|
||||
|
||||
const result = mergeAgentOcrConversion(existingAgent, updateData);
|
||||
|
||||
expect(result.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(result.tool_resources?.[EToolResources.context]?.file_ids).toEqual([
|
||||
'context1',
|
||||
'ocr1',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should merge converted context with updateData context', () => {
|
||||
const existingAgent = {
|
||||
tool_resources: {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const updateData = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['update-context1'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = mergeAgentOcrConversion(existingAgent, updateData);
|
||||
|
||||
expect(result.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(result.tool_resources?.[EToolResources.context]?.file_ids?.sort()).toEqual([
|
||||
'ocr1',
|
||||
'update-context1',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle complex merge with files and file_ids', () => {
|
||||
const existingAgent = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['context1'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'context1',
|
||||
filename: 'existing.txt',
|
||||
filepath: '/existing.txt',
|
||||
type: 'text/plain',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1', 'ocr2'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'ocr1',
|
||||
filename: 'scan.pdf',
|
||||
filepath: '/scan.pdf',
|
||||
type: 'application/pdf',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
tools: ['context', 'ocr'],
|
||||
};
|
||||
|
||||
const updateData = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['update1'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'update1',
|
||||
filename: 'update.txt',
|
||||
filepath: '/update.txt',
|
||||
type: 'text/plain',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 512,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = mergeAgentOcrConversion(existingAgent, updateData);
|
||||
|
||||
expect(result.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(result.tool_resources?.[EToolResources.context]?.file_ids?.sort()).toEqual([
|
||||
'context1',
|
||||
'ocr1',
|
||||
'ocr2',
|
||||
'update1',
|
||||
]);
|
||||
expect(result.tool_resources?.[EToolResources.context]?.files).toHaveLength(3);
|
||||
expect(result.tools).toEqual(['context']);
|
||||
});
|
||||
|
||||
it('should not mutate original objects', () => {
|
||||
const existingAgent = {
|
||||
tool_resources: {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1'],
|
||||
},
|
||||
},
|
||||
tools: ['ocr'],
|
||||
};
|
||||
|
||||
const updateData = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['context1'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const existingCopy = JSON.parse(JSON.stringify(existingAgent));
|
||||
const updateCopy = JSON.parse(JSON.stringify(updateData));
|
||||
|
||||
mergeAgentOcrConversion(existingAgent, updateData);
|
||||
|
||||
expect(existingAgent).toEqual(existingCopy);
|
||||
expect(updateData).toEqual(updateCopy);
|
||||
});
|
||||
|
||||
it('should preserve other tool resources in existing agent during merge', () => {
|
||||
const existingAgent = {
|
||||
tool_resources: {
|
||||
[EToolResources.execute_code]: {
|
||||
file_ids: ['exec1', 'exec2'],
|
||||
files: [
|
||||
{
|
||||
file_id: 'exec1',
|
||||
filename: 'script.py',
|
||||
filepath: '/script.py',
|
||||
type: 'text/x-python',
|
||||
user: 'user1',
|
||||
object: 'file' as const,
|
||||
bytes: 512,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
[EToolResources.file_search]: {
|
||||
file_ids: ['search1'],
|
||||
vector_store_ids: ['vector1', 'vector2'],
|
||||
},
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1'],
|
||||
},
|
||||
},
|
||||
tools: ['execute_code', 'file_search', 'ocr'],
|
||||
};
|
||||
|
||||
const updateData = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['new-context1'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const originalExecuteCode = JSON.parse(
|
||||
JSON.stringify(existingAgent.tool_resources.execute_code),
|
||||
);
|
||||
const originalFileSearch = JSON.parse(
|
||||
JSON.stringify(existingAgent.tool_resources.file_search),
|
||||
);
|
||||
|
||||
const result = mergeAgentOcrConversion(existingAgent, updateData);
|
||||
|
||||
// OCR should be converted to context and merged with updateData context
|
||||
expect(result.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(result.tool_resources?.[EToolResources.context]?.file_ids?.sort()).toEqual([
|
||||
'new-context1',
|
||||
'ocr1',
|
||||
]);
|
||||
|
||||
// Other resources should be preserved
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]).toEqual(originalExecuteCode);
|
||||
expect(result.tool_resources?.[EToolResources.file_search]).toEqual(originalFileSearch);
|
||||
|
||||
// Tools should have ocr replaced with context
|
||||
expect(result.tools).toEqual(['execute_code', 'file_search', 'context']);
|
||||
});
|
||||
|
||||
it('should not affect updateData tool resources that are not context', () => {
|
||||
const existingAgent = {
|
||||
tool_resources: {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1'],
|
||||
},
|
||||
},
|
||||
tools: ['ocr'],
|
||||
};
|
||||
|
||||
const updateData = {
|
||||
tool_resources: {
|
||||
[EToolResources.execute_code]: {
|
||||
file_ids: ['update-exec1'],
|
||||
},
|
||||
[EToolResources.file_search]: {
|
||||
file_ids: ['update-search1'],
|
||||
vector_store_ids: ['update-vector1'],
|
||||
},
|
||||
},
|
||||
tools: ['execute_code', 'file_search'],
|
||||
};
|
||||
|
||||
const originalUpdateData = JSON.parse(JSON.stringify(updateData));
|
||||
|
||||
const result = mergeAgentOcrConversion(existingAgent, updateData);
|
||||
|
||||
// OCR should be converted to context
|
||||
expect(result.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(result.tool_resources?.[EToolResources.context]).toEqual({
|
||||
file_ids: ['ocr1'],
|
||||
});
|
||||
|
||||
// UpdateData's other resources should not be affected
|
||||
expect(updateData.tool_resources?.[EToolResources.execute_code]).toEqual(
|
||||
originalUpdateData.tool_resources.execute_code,
|
||||
);
|
||||
expect(updateData.tool_resources?.[EToolResources.file_search]).toEqual(
|
||||
originalUpdateData.tool_resources.file_search,
|
||||
);
|
||||
|
||||
// Result should only have the converted OCR resources and tools
|
||||
expect(result.tools).toEqual(['context']);
|
||||
});
|
||||
|
||||
it('should handle all tool resources together', () => {
|
||||
const existingAgent = {
|
||||
tool_resources: {
|
||||
[EToolResources.execute_code]: {
|
||||
file_ids: ['exec1'],
|
||||
},
|
||||
[EToolResources.file_search]: {
|
||||
file_ids: ['search1'],
|
||||
vector_store_ids: ['vector1'],
|
||||
},
|
||||
[EToolResources.image_edit]: {
|
||||
file_ids: ['image1'],
|
||||
},
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['existing-context1'],
|
||||
},
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr1', 'ocr2'],
|
||||
},
|
||||
},
|
||||
tools: ['execute_code', 'file_search', 'image_edit', 'context', 'ocr'],
|
||||
};
|
||||
|
||||
const updateData = {
|
||||
tool_resources: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['update-context1'],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = mergeAgentOcrConversion(existingAgent, updateData);
|
||||
|
||||
// OCR should be merged with existing context and update context
|
||||
expect(result.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(result.tool_resources?.[EToolResources.context]?.file_ids?.sort()).toEqual([
|
||||
'existing-context1',
|
||||
'ocr1',
|
||||
'ocr2',
|
||||
'update-context1',
|
||||
]);
|
||||
|
||||
// All other resources should be preserved
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]).toEqual({
|
||||
file_ids: ['exec1'],
|
||||
});
|
||||
expect(result.tool_resources?.[EToolResources.file_search]).toEqual({
|
||||
file_ids: ['search1'],
|
||||
vector_store_ids: ['vector1'],
|
||||
});
|
||||
expect(result.tool_resources?.[EToolResources.image_edit]).toEqual({
|
||||
file_ids: ['image1'],
|
||||
});
|
||||
|
||||
// Tools should have ocr replaced with context (no duplicates)
|
||||
expect(result.tools).toEqual(['execute_code', 'file_search', 'image_edit', 'context']);
|
||||
});
|
||||
});
|
||||
});
|
141
packages/api/src/agents/legacy.ts
Normal file
141
packages/api/src/agents/legacy.ts
Normal file
|
@ -0,0 +1,141 @@
|
|||
import { EToolResources } from 'librechat-data-provider';
|
||||
import type { AgentToolResources, TFile } from 'librechat-data-provider';
|
||||
|
||||
/**
|
||||
* Converts OCR tool resource to context tool resource in place.
|
||||
* This modifies the input object directly (used for updateData in the handler).
|
||||
*
|
||||
* @param data - Object containing tool_resources and/or tools to convert
|
||||
* @returns void - modifies the input object directly
|
||||
*/
|
||||
export function convertOcrToContextInPlace(data: {
|
||||
tool_resources?: AgentToolResources;
|
||||
tools?: string[];
|
||||
}): void {
|
||||
// Convert OCR to context in tool_resources
|
||||
if (data.tool_resources?.ocr) {
|
||||
if (!data.tool_resources.context) {
|
||||
data.tool_resources.context = data.tool_resources.ocr;
|
||||
} else {
|
||||
// Merge OCR into existing context
|
||||
if (data.tool_resources.ocr?.file_ids?.length) {
|
||||
const existingFileIds = data.tool_resources.context.file_ids || [];
|
||||
const ocrFileIds = data.tool_resources.ocr.file_ids || [];
|
||||
data.tool_resources.context.file_ids = [...new Set([...existingFileIds, ...ocrFileIds])];
|
||||
}
|
||||
if (data.tool_resources.ocr?.files?.length) {
|
||||
const existingFiles = data.tool_resources.context.files || [];
|
||||
const ocrFiles = data.tool_resources.ocr.files || [];
|
||||
const filesMap = new Map<string, TFile>();
|
||||
[...existingFiles, ...ocrFiles].forEach((file) => {
|
||||
if (file?.file_id) {
|
||||
filesMap.set(file.file_id, file);
|
||||
}
|
||||
});
|
||||
data.tool_resources.context.files = Array.from(filesMap.values());
|
||||
}
|
||||
}
|
||||
delete data.tool_resources.ocr;
|
||||
}
|
||||
|
||||
// Convert OCR to context in tools array
|
||||
if (data.tools?.includes(EToolResources.ocr)) {
|
||||
data.tools = data.tools.map((tool) =>
|
||||
tool === EToolResources.ocr ? EToolResources.context : tool,
|
||||
);
|
||||
data.tools = [...new Set(data.tools)];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges tool resources from existing agent with incoming update data,
|
||||
* converting OCR to context and handling deduplication.
|
||||
* Used when existing agent has OCR that needs to be converted and merged with updateData.
|
||||
*
|
||||
* @param existingAgent - The existing agent data
|
||||
* @param updateData - The incoming update data
|
||||
* @returns Object with merged tool_resources and tools
|
||||
*/
|
||||
export function mergeAgentOcrConversion(
|
||||
existingAgent: { tool_resources?: AgentToolResources; tools?: string[] },
|
||||
updateData: { tool_resources?: AgentToolResources; tools?: string[] },
|
||||
): { tool_resources?: AgentToolResources; tools?: string[] } {
|
||||
if (!existingAgent.tool_resources?.ocr) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const result: { tool_resources?: AgentToolResources; tools?: string[] } = {};
|
||||
|
||||
// Convert existing agent's OCR to context
|
||||
result.tool_resources = { ...existingAgent.tool_resources };
|
||||
|
||||
if (!result.tool_resources.context) {
|
||||
// Simple case: no context exists, just move ocr to context
|
||||
result.tool_resources.context = result.tool_resources.ocr;
|
||||
} else {
|
||||
// Merge case: context already exists, merge both file_ids and files arrays
|
||||
|
||||
// Merge file_ids if they exist
|
||||
if (result.tool_resources.ocr?.file_ids?.length) {
|
||||
const existingFileIds = result.tool_resources.context.file_ids || [];
|
||||
const ocrFileIds = result.tool_resources.ocr.file_ids || [];
|
||||
result.tool_resources.context.file_ids = [...new Set([...existingFileIds, ...ocrFileIds])];
|
||||
}
|
||||
|
||||
// Merge files array if it exists (already fetched files)
|
||||
if (result.tool_resources.ocr?.files?.length) {
|
||||
const existingFiles = result.tool_resources.context.files || [];
|
||||
const ocrFiles = result.tool_resources.ocr?.files || [];
|
||||
// Merge and deduplicate by file_id
|
||||
const filesMap = new Map<string, TFile>();
|
||||
[...existingFiles, ...ocrFiles].forEach((file) => {
|
||||
if (file?.file_id) {
|
||||
filesMap.set(file.file_id, file);
|
||||
}
|
||||
});
|
||||
result.tool_resources.context.files = Array.from(filesMap.values());
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the deprecated ocr resource
|
||||
delete result.tool_resources.ocr;
|
||||
|
||||
// Update tools array: replace 'ocr' with 'context'
|
||||
if (existingAgent.tools?.includes(EToolResources.ocr)) {
|
||||
result.tools = existingAgent.tools.map((tool) =>
|
||||
tool === EToolResources.ocr ? EToolResources.context : tool,
|
||||
);
|
||||
// Remove duplicates if context already existed
|
||||
result.tools = [...new Set(result.tools)];
|
||||
}
|
||||
|
||||
// Merge with any context that might already be in updateData (from incoming OCR conversion)
|
||||
if (updateData.tool_resources?.context && result.tool_resources.context) {
|
||||
// Merge the contexts
|
||||
const mergedContext = { ...result.tool_resources.context };
|
||||
|
||||
// Merge file_ids
|
||||
if (updateData.tool_resources.context.file_ids?.length) {
|
||||
const existingIds = mergedContext.file_ids || [];
|
||||
const newIds = updateData.tool_resources.context.file_ids || [];
|
||||
mergedContext.file_ids = [...new Set([...existingIds, ...newIds])];
|
||||
}
|
||||
|
||||
// Merge files
|
||||
if (updateData.tool_resources.context.files?.length) {
|
||||
const existingFiles = mergedContext.files || [];
|
||||
const newFiles = updateData.tool_resources.context.files || [];
|
||||
const filesMap = new Map<string, TFile>();
|
||||
[...existingFiles, ...newFiles].forEach((file) => {
|
||||
if (file?.file_id) {
|
||||
filesMap.set(file.file_id, file);
|
||||
}
|
||||
});
|
||||
mergedContext.files = Array.from(filesMap.values());
|
||||
}
|
||||
|
||||
result.tool_resources.context = mergedContext;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
|
@ -31,7 +31,7 @@ describe('primeResources', () => {
|
|||
mockAppConfig = {
|
||||
endpoints: {
|
||||
[EModelEndpoint.agents]: {
|
||||
capabilities: [AgentCapabilities.ocr],
|
||||
capabilities: [AgentCapabilities.context],
|
||||
} as TAgentsEndpoint,
|
||||
},
|
||||
} as AppConfig;
|
||||
|
@ -43,8 +43,8 @@ describe('primeResources', () => {
|
|||
requestFileSet = new Set(['file1', 'file2', 'file3']);
|
||||
});
|
||||
|
||||
describe('when OCR is enabled and tool_resources has OCR file_ids', () => {
|
||||
it('should fetch OCR files and include them in attachments', async () => {
|
||||
describe('when `context` capability is enabled and tool_resources has "context" file_ids', () => {
|
||||
it('should fetch context files and include them in attachments', async () => {
|
||||
const mockOcrFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
|
@ -62,7 +62,7 @@ describe('primeResources', () => {
|
|||
mockGetFiles.mockResolvedValue(mockOcrFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
};
|
||||
|
@ -83,16 +83,18 @@ describe('primeResources', () => {
|
|||
{ userId: undefined, agentId: undefined },
|
||||
);
|
||||
expect(result.attachments).toEqual(mockOcrFiles);
|
||||
expect(result.tool_resources).toEqual(tool_resources);
|
||||
// Context field is deleted after files are fetched and re-categorized
|
||||
// Since the file is not embedded and has no special properties, it won't be categorized
|
||||
expect(result.tool_resources).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('when OCR is disabled', () => {
|
||||
it('should not fetch OCR files even if tool_resources has OCR file_ids', async () => {
|
||||
describe('when `context` capability is disabled', () => {
|
||||
it('should not fetch context files even if tool_resources has context file_ids', async () => {
|
||||
(mockAppConfig.endpoints![EModelEndpoint.agents] as TAgentsEndpoint).capabilities = [];
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
};
|
||||
|
@ -371,8 +373,60 @@ describe('primeResources', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('when both OCR and attachments are provided', () => {
|
||||
it('should include both OCR files and attachment files', async () => {
|
||||
describe('when both "context" files and "attachments" are provided', () => {
|
||||
it('should include both context files and attachment files', async () => {
|
||||
const mockOcrFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'ocr-file-1',
|
||||
filename: 'document.pdf',
|
||||
filepath: '/uploads/document.pdf',
|
||||
object: 'file',
|
||||
type: 'application/pdf',
|
||||
bytes: 1024,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
const mockAttachmentFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'file1',
|
||||
filename: 'attachment.txt',
|
||||
filepath: '/uploads/attachment.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
mockGetFiles.mockResolvedValue(mockOcrFiles);
|
||||
const attachments = Promise.resolve(mockAttachmentFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
appConfig: mockAppConfig,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
expect(result.attachments).toHaveLength(2);
|
||||
expect(result.attachments?.[0]?.file_id).toBe('ocr-file-1');
|
||||
expect(result.attachments?.[1]?.file_id).toBe('file1');
|
||||
});
|
||||
|
||||
it('should include both context (as `ocr` resource) files and attachment files', async () => {
|
||||
const mockOcrFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
|
@ -424,7 +478,7 @@ describe('primeResources', () => {
|
|||
expect(result.attachments?.[1]?.file_id).toBe('file1');
|
||||
});
|
||||
|
||||
it('should prevent duplicate files when same file exists in OCR and attachments', async () => {
|
||||
it('should prevent duplicate files when same file exists in context tool_resource and attachments', async () => {
|
||||
const sharedFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'shared-file-id',
|
||||
|
@ -457,7 +511,7 @@ describe('primeResources', () => {
|
|||
const attachments = Promise.resolve(mockAttachmentFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['shared-file-id'],
|
||||
},
|
||||
};
|
||||
|
@ -500,7 +554,7 @@ describe('primeResources', () => {
|
|||
const attachments = Promise.resolve(mockAttachmentFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['shared-file-id'],
|
||||
},
|
||||
};
|
||||
|
@ -569,7 +623,7 @@ describe('primeResources', () => {
|
|||
const attachments = Promise.resolve(mockAttachmentFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['file-1', 'file-2'],
|
||||
},
|
||||
};
|
||||
|
@ -583,7 +637,7 @@ describe('primeResources', () => {
|
|||
tool_resources,
|
||||
});
|
||||
|
||||
// Should have 3 files total (2 from OCR + 1 unique from attachments)
|
||||
// Should have 3 files total (2 from context files + 1 unique from attachments)
|
||||
expect(result.attachments).toHaveLength(3);
|
||||
|
||||
// Each file should appear only once
|
||||
|
@ -628,7 +682,7 @@ describe('primeResources', () => {
|
|||
const attachments = Promise.resolve(mockAttachmentFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['normal-file'],
|
||||
},
|
||||
};
|
||||
|
@ -801,7 +855,7 @@ describe('primeResources', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('should handle complex scenario with OCR, existing tool_resources, and attachments', async () => {
|
||||
it('should handle complex scenario with context files, existing tool_resources, and attachments', async () => {
|
||||
const ocrFile: TFile = {
|
||||
user: 'user1',
|
||||
file_id: 'ocr-file',
|
||||
|
@ -843,11 +897,11 @@ describe('primeResources', () => {
|
|||
width: 600,
|
||||
};
|
||||
|
||||
mockGetFiles.mockResolvedValue([ocrFile, existingFile]); // OCR returns both files
|
||||
mockGetFiles.mockResolvedValue([ocrFile, existingFile]); // context returns both files
|
||||
const attachments = Promise.resolve([existingFile, ocrFile, newFile]); // Attachments has duplicates
|
||||
|
||||
const existingToolResources = {
|
||||
[EToolResources.ocr]: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['ocr-file', 'existing-file'],
|
||||
},
|
||||
[EToolResources.execute_code]: {
|
||||
|
@ -899,11 +953,11 @@ describe('primeResources', () => {
|
|||
const attachments = Promise.resolve(mockFiles);
|
||||
const error = new Error('Test error');
|
||||
|
||||
// Mock getFiles to throw an error when called for OCR
|
||||
// Mock getFiles to throw an error when called for context
|
||||
mockGetFiles.mockRejectedValue(error);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
};
|
||||
|
@ -949,6 +1003,245 @@ describe('primeResources', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('tool_resources field deletion behavior', () => {
|
||||
it('should not mutate the original tool_resources object', async () => {
|
||||
const originalToolResources = {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['context-file-1'],
|
||||
files: [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'context-file-1',
|
||||
filename: 'original.txt',
|
||||
filepath: '/uploads/original.txt',
|
||||
object: 'file' as const,
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
};
|
||||
|
||||
// Create a deep copy to compare later
|
||||
const originalCopy = JSON.parse(JSON.stringify(originalToolResources));
|
||||
|
||||
const mockOcrFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'ocr-file-1',
|
||||
filename: 'document.pdf',
|
||||
filepath: '/uploads/document.pdf',
|
||||
object: 'file',
|
||||
type: 'application/pdf',
|
||||
bytes: 1024,
|
||||
embedded: true,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
mockGetFiles.mockResolvedValue(mockOcrFiles);
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
appConfig: mockAppConfig,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments: undefined,
|
||||
tool_resources: originalToolResources,
|
||||
});
|
||||
|
||||
// Original object should remain unchanged
|
||||
expect(originalToolResources).toEqual(originalCopy);
|
||||
|
||||
// Result should have modifications
|
||||
expect(result.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
expect(result.tool_resources?.[EToolResources.context]).toBeUndefined();
|
||||
expect(result.tool_resources?.[EToolResources.file_search]).toBeDefined();
|
||||
});
|
||||
|
||||
it('should delete ocr field after merging file_ids with context', async () => {
|
||||
const mockOcrFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'ocr-file-1',
|
||||
filename: 'document.pdf',
|
||||
filepath: '/uploads/document.pdf',
|
||||
object: 'file',
|
||||
type: 'application/pdf',
|
||||
bytes: 1024,
|
||||
embedded: true, // Will be categorized as file_search
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
mockGetFiles.mockResolvedValue(mockOcrFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['context-file-1'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
appConfig: mockAppConfig,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments: undefined,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
// OCR field should be deleted after merging
|
||||
expect(result.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
// Context field should also be deleted since files were fetched and re-categorized
|
||||
expect(result.tool_resources?.[EToolResources.context]).toBeUndefined();
|
||||
// File should be categorized as file_search based on embedded=true
|
||||
expect(result.tool_resources?.[EToolResources.file_search]?.files).toHaveLength(1);
|
||||
expect(result.tool_resources?.[EToolResources.file_search]?.files?.[0]?.file_id).toBe(
|
||||
'ocr-file-1',
|
||||
);
|
||||
|
||||
// Verify getFiles was called with merged file_ids
|
||||
expect(mockGetFiles).toHaveBeenCalledWith(
|
||||
{ file_id: { $in: ['context-file-1', 'ocr-file-1'] } },
|
||||
{},
|
||||
{},
|
||||
{ userId: undefined, agentId: undefined },
|
||||
);
|
||||
});
|
||||
|
||||
it('should delete context field when fetching and re-categorizing files', async () => {
|
||||
const mockContextFiles: TFile[] = [
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'context-file-1',
|
||||
filename: 'script.py',
|
||||
filepath: '/uploads/script.py',
|
||||
object: 'file',
|
||||
type: 'text/x-python',
|
||||
bytes: 512,
|
||||
embedded: false,
|
||||
usage: 0,
|
||||
metadata: {
|
||||
fileIdentifier: 'python-script',
|
||||
},
|
||||
},
|
||||
{
|
||||
user: 'user1',
|
||||
file_id: 'context-file-2',
|
||||
filename: 'data.txt',
|
||||
filepath: '/uploads/data.txt',
|
||||
object: 'file',
|
||||
type: 'text/plain',
|
||||
bytes: 256,
|
||||
embedded: true,
|
||||
usage: 0,
|
||||
},
|
||||
];
|
||||
|
||||
mockGetFiles.mockResolvedValue(mockContextFiles);
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['context-file-1', 'context-file-2'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
appConfig: mockAppConfig,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments: undefined,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
// Context field should be deleted after fetching files
|
||||
expect(result.tool_resources?.[EToolResources.context]).toBeUndefined();
|
||||
|
||||
// Files should be re-categorized based on their properties
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]?.files).toHaveLength(1);
|
||||
expect(result.tool_resources?.[EToolResources.execute_code]?.files?.[0]?.file_id).toBe(
|
||||
'context-file-1',
|
||||
);
|
||||
|
||||
expect(result.tool_resources?.[EToolResources.file_search]?.files).toHaveLength(1);
|
||||
expect(result.tool_resources?.[EToolResources.file_search]?.files?.[0]?.file_id).toBe(
|
||||
'context-file-2',
|
||||
);
|
||||
});
|
||||
|
||||
it('should preserve context field when context capability is disabled', async () => {
|
||||
// Disable context capability
|
||||
(mockAppConfig.endpoints![EModelEndpoint.agents] as TAgentsEndpoint).capabilities = [];
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['context-file-1'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
appConfig: mockAppConfig,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments: undefined,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
// Context field should be preserved when capability is disabled
|
||||
expect(result.tool_resources?.[EToolResources.context]).toEqual({
|
||||
file_ids: ['context-file-1'],
|
||||
});
|
||||
|
||||
// getFiles should not have been called
|
||||
expect(mockGetFiles).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should still delete ocr field even when context capability is disabled', async () => {
|
||||
// Disable context capability
|
||||
(mockAppConfig.endpoints![EModelEndpoint.agents] as TAgentsEndpoint).capabilities = [];
|
||||
|
||||
const tool_resources = {
|
||||
[EToolResources.ocr]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['context-file-1'],
|
||||
},
|
||||
};
|
||||
|
||||
const result = await primeResources({
|
||||
req: mockReq,
|
||||
appConfig: mockAppConfig,
|
||||
getFiles: mockGetFiles,
|
||||
requestFileSet,
|
||||
attachments: undefined,
|
||||
tool_resources,
|
||||
});
|
||||
|
||||
// OCR field should still be deleted (merged into context)
|
||||
expect(result.tool_resources?.[EToolResources.ocr]).toBeUndefined();
|
||||
|
||||
// Context field should contain merged file_ids but not be processed
|
||||
expect(result.tool_resources?.[EToolResources.context]).toEqual({
|
||||
file_ids: ['context-file-1', 'ocr-file-1'],
|
||||
});
|
||||
|
||||
// getFiles should not have been called since context is disabled
|
||||
expect(mockGetFiles).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle missing appConfig agents endpoint gracefully', async () => {
|
||||
const reqWithoutLocals = {} as ServerRequest & { user?: IUser };
|
||||
|
@ -961,14 +1254,14 @@ describe('primeResources', () => {
|
|||
requestFileSet,
|
||||
attachments: undefined,
|
||||
tool_resources: {
|
||||
[EToolResources.ocr]: {
|
||||
[EToolResources.context]: {
|
||||
file_ids: ['ocr-file-1'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(mockGetFiles).not.toHaveBeenCalled();
|
||||
// When appConfig agents endpoint is missing, OCR is disabled
|
||||
// When appConfig agents endpoint is missing, context is disabled
|
||||
// and no attachments are provided, the function returns undefined
|
||||
expect(result.attachments).toBeUndefined();
|
||||
});
|
||||
|
|
|
@ -183,18 +183,32 @@ export const primeResources = async ({
|
|||
const processedResourceFiles = new Set<string>();
|
||||
/**
|
||||
* The agent's tool resources object that will be updated with categorized files
|
||||
* Initialized from input parameter or empty object if not provided
|
||||
* Create a shallow copy first to avoid mutating the original
|
||||
*/
|
||||
const tool_resources = _tool_resources ?? {};
|
||||
const tool_resources: AgentToolResources = { ...(_tool_resources ?? {}) };
|
||||
|
||||
// Track existing files in tool_resources to prevent duplicates within resources
|
||||
// Deep copy each resource to avoid mutating nested objects/arrays
|
||||
for (const [resourceType, resource] of Object.entries(tool_resources)) {
|
||||
if (resource?.files && Array.isArray(resource.files)) {
|
||||
if (!resource) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Deep copy the resource to avoid mutations
|
||||
tool_resources[resourceType as keyof AgentToolResources] = {
|
||||
...resource,
|
||||
// Deep copy arrays to prevent mutations
|
||||
...(resource.files && { files: [...resource.files] }),
|
||||
...(resource.file_ids && { file_ids: [...resource.file_ids] }),
|
||||
...(resource.vector_store_ids && { vector_store_ids: [...resource.vector_store_ids] }),
|
||||
} as AgentBaseResource;
|
||||
|
||||
// Now track existing files
|
||||
if (resource.files && Array.isArray(resource.files)) {
|
||||
for (const file of resource.files) {
|
||||
if (file?.file_id) {
|
||||
processedResourceFiles.add(`${resourceType}:${file.file_id}`);
|
||||
// Files from non-OCR resources should not be added to attachments from _attachments
|
||||
if (resourceType !== EToolResources.ocr) {
|
||||
// Files from non-context resources should not be added to attachments from _attachments
|
||||
if (resourceType !== EToolResources.context && resourceType !== EToolResources.ocr) {
|
||||
attachmentFileIds.add(file.file_id);
|
||||
}
|
||||
}
|
||||
|
@ -202,14 +216,22 @@ export const primeResources = async ({
|
|||
}
|
||||
}
|
||||
|
||||
const isOCREnabled = (
|
||||
const isContextEnabled = (
|
||||
appConfig?.endpoints?.[EModelEndpoint.agents]?.capabilities ?? []
|
||||
).includes(AgentCapabilities.ocr);
|
||||
).includes(AgentCapabilities.context);
|
||||
|
||||
if (tool_resources[EToolResources.ocr]?.file_ids && isOCREnabled) {
|
||||
const fileIds = tool_resources[EToolResources.context]?.file_ids ?? [];
|
||||
const ocrFileIds = tool_resources[EToolResources.ocr]?.file_ids;
|
||||
if (ocrFileIds != null) {
|
||||
fileIds.push(...ocrFileIds);
|
||||
delete tool_resources[EToolResources.ocr];
|
||||
}
|
||||
|
||||
if (fileIds.length > 0 && isContextEnabled) {
|
||||
delete tool_resources[EToolResources.context];
|
||||
const context = await getFiles(
|
||||
{
|
||||
file_id: { $in: tool_resources.ocr.file_ids },
|
||||
file_id: { $in: fileIds },
|
||||
},
|
||||
{},
|
||||
{},
|
||||
|
|
|
@ -26,6 +26,8 @@ export const agentToolResourcesSchema = z
|
|||
image_edit: agentBaseResourceSchema.optional(),
|
||||
execute_code: agentBaseResourceSchema.optional(),
|
||||
file_search: agentFileResourceSchema.optional(),
|
||||
context: agentBaseResourceSchema.optional(),
|
||||
/** @deprecated Use context instead */
|
||||
ocr: agentBaseResourceSchema.optional(),
|
||||
})
|
||||
.optional();
|
||||
|
|
|
@ -1311,6 +1311,142 @@ describe('updateInterfacePermissions - permissions', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should re-enable memory permissions when memory.disabled changes from true to false', async () => {
|
||||
// Mock existing memory permissions that are disabled
|
||||
mockGetRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: false,
|
||||
[Permissions.CREATE]: false,
|
||||
[Permissions.READ]: false,
|
||||
[Permissions.UPDATE]: false,
|
||||
[Permissions.OPT_OUT]: false,
|
||||
},
|
||||
// Other existing permissions
|
||||
[PermissionTypes.PROMPTS]: { [Permissions.USE]: true },
|
||||
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
|
||||
},
|
||||
});
|
||||
|
||||
const config = {
|
||||
interface: {
|
||||
// Not explicitly configuring memories in interface
|
||||
prompts: true,
|
||||
bookmarks: true,
|
||||
},
|
||||
memory: {
|
||||
disabled: false, // Memory is explicitly enabled (changed from true to false)
|
||||
agent: {
|
||||
id: 'test-agent-id',
|
||||
},
|
||||
personalize: true,
|
||||
} as unknown as TCustomConfig['memory'],
|
||||
};
|
||||
const configDefaults = {
|
||||
interface: {
|
||||
memories: true,
|
||||
prompts: true,
|
||||
bookmarks: true,
|
||||
},
|
||||
} as TConfigDefaults;
|
||||
const interfaceConfig = await loadDefaultInterface({ config, configDefaults });
|
||||
const appConfig = { config, interfaceConfig } as unknown as AppConfig;
|
||||
|
||||
await updateInterfacePermissions({
|
||||
appConfig,
|
||||
getRoleByName: mockGetRoleByName,
|
||||
updateAccessPermissions: mockUpdateAccessPermissions,
|
||||
});
|
||||
|
||||
// Check USER role call
|
||||
const userCall = mockUpdateAccessPermissions.mock.calls.find(
|
||||
(call) => call[0] === SystemRoles.USER,
|
||||
);
|
||||
// Memory permissions should be re-enabled
|
||||
expect(userCall[1][PermissionTypes.MEMORIES]).toEqual({
|
||||
[Permissions.USE]: true,
|
||||
[Permissions.CREATE]:
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.MEMORIES]?.[Permissions.CREATE],
|
||||
[Permissions.READ]:
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.MEMORIES]?.[Permissions.READ],
|
||||
[Permissions.UPDATE]:
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.MEMORIES]?.[Permissions.UPDATE],
|
||||
[Permissions.OPT_OUT]: true, // Should be true when personalize is enabled
|
||||
});
|
||||
|
||||
// Check ADMIN role call
|
||||
const adminCall = mockUpdateAccessPermissions.mock.calls.find(
|
||||
(call) => call[0] === SystemRoles.ADMIN,
|
||||
);
|
||||
expect(adminCall[1][PermissionTypes.MEMORIES]).toEqual({
|
||||
[Permissions.USE]: true,
|
||||
[Permissions.CREATE]:
|
||||
roleDefaults[SystemRoles.ADMIN].permissions[PermissionTypes.MEMORIES]?.[Permissions.CREATE],
|
||||
[Permissions.READ]:
|
||||
roleDefaults[SystemRoles.ADMIN].permissions[PermissionTypes.MEMORIES]?.[Permissions.READ],
|
||||
[Permissions.UPDATE]:
|
||||
roleDefaults[SystemRoles.ADMIN].permissions[PermissionTypes.MEMORIES]?.[Permissions.UPDATE],
|
||||
[Permissions.OPT_OUT]: true, // Should be true when personalize is enabled
|
||||
});
|
||||
|
||||
// Verify the existing role data was passed to updateAccessPermissions
|
||||
expect(userCall[2]).toMatchObject({
|
||||
permissions: expect.objectContaining({
|
||||
[PermissionTypes.MEMORIES]: expect.any(Object),
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
it('should re-enable memory permissions when valid memory config exists without disabled field', async () => {
|
||||
// Mock existing memory permissions that are disabled
|
||||
mockGetRoleByName.mockResolvedValue({
|
||||
permissions: {
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: false,
|
||||
[Permissions.CREATE]: false,
|
||||
[Permissions.READ]: false,
|
||||
[Permissions.UPDATE]: false,
|
||||
[Permissions.OPT_OUT]: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const config = {
|
||||
memory: {
|
||||
// No disabled field, but valid config
|
||||
agent: {
|
||||
id: 'test-agent-id',
|
||||
provider: 'openai',
|
||||
},
|
||||
personalize: false,
|
||||
} as unknown as TCustomConfig['memory'],
|
||||
};
|
||||
const configDefaults = { interface: {} } as TConfigDefaults;
|
||||
const interfaceConfig = await loadDefaultInterface({ config, configDefaults });
|
||||
const appConfig = { config, interfaceConfig } as unknown as AppConfig;
|
||||
|
||||
await updateInterfacePermissions({
|
||||
appConfig,
|
||||
getRoleByName: mockGetRoleByName,
|
||||
updateAccessPermissions: mockUpdateAccessPermissions,
|
||||
});
|
||||
|
||||
// Check USER role call - memory should be re-enabled
|
||||
const userCall = mockUpdateAccessPermissions.mock.calls.find(
|
||||
(call) => call[0] === SystemRoles.USER,
|
||||
);
|
||||
expect(userCall[1][PermissionTypes.MEMORIES]).toEqual({
|
||||
[Permissions.USE]: true,
|
||||
[Permissions.CREATE]:
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.MEMORIES]?.[Permissions.CREATE],
|
||||
[Permissions.READ]:
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.MEMORIES]?.[Permissions.READ],
|
||||
[Permissions.UPDATE]:
|
||||
roleDefaults[SystemRoles.USER].permissions[PermissionTypes.MEMORIES]?.[Permissions.UPDATE],
|
||||
[Permissions.OPT_OUT]: undefined, // Should be undefined when personalize is false
|
||||
});
|
||||
});
|
||||
|
||||
it('should override existing memory permissions when memory.disabled is true', async () => {
|
||||
// Mock existing memory permissions that are enabled
|
||||
mockGetRoleByName.mockResolvedValue({
|
||||
|
|
|
@ -69,8 +69,12 @@ export async function updateInterfacePermissions({
|
|||
const interfaceConfig = appConfig?.config?.interface;
|
||||
const memoryConfig = appConfig?.config?.memory;
|
||||
const memoryEnabled = isMemoryEnabled(memoryConfig);
|
||||
/** Check if memory is explicitly disabled */
|
||||
const isMemoryExplicitlyDisabled = memoryConfig && !memoryEnabled;
|
||||
/** Check if memory is explicitly disabled (memory.disabled === true) */
|
||||
const isMemoryExplicitlyDisabled = memoryConfig?.disabled === true;
|
||||
/** Check if memory should be enabled (explicitly enabled or valid config) */
|
||||
const shouldEnableMemory =
|
||||
memoryConfig?.disabled === false ||
|
||||
(memoryConfig && memoryEnabled && memoryConfig.disabled === undefined);
|
||||
/** Check if personalization is enabled (defaults to true if memory is configured and enabled) */
|
||||
const isPersonalizationEnabled =
|
||||
memoryConfig && memoryEnabled && memoryConfig.personalize !== false;
|
||||
|
@ -111,19 +115,24 @@ export async function updateInterfacePermissions({
|
|||
const permTypeExists = existingPermissions?.[permType];
|
||||
const isExplicitlyConfigured =
|
||||
interfaceConfig && hasExplicitConfig(interfaceConfig, permType);
|
||||
const isMemoryDisabled =
|
||||
permType === PermissionTypes.MEMORIES && isMemoryExplicitlyDisabled === true;
|
||||
const isMemoryDisabled = permType === PermissionTypes.MEMORIES && isMemoryExplicitlyDisabled;
|
||||
const isMemoryReenabling =
|
||||
permType === PermissionTypes.MEMORIES &&
|
||||
shouldEnableMemory &&
|
||||
existingPermissions?.[PermissionTypes.MEMORIES]?.[Permissions.USE] === false;
|
||||
|
||||
// Only update if: doesn't exist OR explicitly configured
|
||||
if (!permTypeExists || isExplicitlyConfigured || isMemoryDisabled) {
|
||||
// Only update if: doesn't exist OR explicitly configured OR memory state change
|
||||
if (!permTypeExists || isExplicitlyConfigured || isMemoryDisabled || isMemoryReenabling) {
|
||||
permissionsToUpdate[permType] = permissions;
|
||||
if (!permTypeExists) {
|
||||
logger.debug(`Role '${roleName}': Setting up default permissions for '${permType}'`);
|
||||
} else if (isExplicitlyConfigured) {
|
||||
logger.debug(`Role '${roleName}': Applying explicit config for '${permType}'`);
|
||||
} else if (isMemoryDisabled) {
|
||||
logger.debug(`Role '${roleName}': Disabling memories as memory.disabled is true`);
|
||||
} else if (isMemoryReenabling) {
|
||||
logger.debug(
|
||||
`Role '${roleName}': Disabling memories as it is explicitly disabled in config`,
|
||||
`Role '${roleName}': Re-enabling memories due to valid memory configuration`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
|
@ -147,13 +156,15 @@ export async function updateInterfacePermissions({
|
|||
),
|
||||
},
|
||||
[PermissionTypes.MEMORIES]: {
|
||||
[Permissions.USE]: isMemoryExplicitlyDisabled
|
||||
? false
|
||||
: getPermissionValue(
|
||||
loadedInterface.memories,
|
||||
defaultPerms[PermissionTypes.MEMORIES]?.[Permissions.USE],
|
||||
defaults.memories,
|
||||
),
|
||||
[Permissions.USE]: (() => {
|
||||
if (isMemoryExplicitlyDisabled) return false;
|
||||
if (shouldEnableMemory) return true;
|
||||
return getPermissionValue(
|
||||
loadedInterface.memories,
|
||||
defaultPerms[PermissionTypes.MEMORIES]?.[Permissions.USE],
|
||||
defaults.memories,
|
||||
);
|
||||
})(),
|
||||
...(defaultPerms[PermissionTypes.MEMORIES]?.[Permissions.CREATE] !== undefined && {
|
||||
[Permissions.CREATE]: isMemoryExplicitlyDisabled
|
||||
? false
|
||||
|
@ -169,7 +180,9 @@ export async function updateInterfacePermissions({
|
|||
? false
|
||||
: defaultPerms[PermissionTypes.MEMORIES][Permissions.UPDATE],
|
||||
}),
|
||||
[Permissions.OPT_OUT]: isPersonalizationEnabled,
|
||||
[Permissions.OPT_OUT]: isMemoryExplicitlyDisabled
|
||||
? false
|
||||
: isPersonalizationEnabled || undefined,
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: {
|
||||
[Permissions.USE]: getPermissionValue(
|
||||
|
|
|
@ -8,7 +8,7 @@ import type * as t from '~/mcp/types';
|
|||
import { ConnectionsRepository } from '~/mcp/ConnectionsRepository';
|
||||
import { detectOAuthRequirement } from '~/mcp/oauth';
|
||||
import { sanitizeUrlForLogging } from '~/mcp/utils';
|
||||
import { processMCPEnv } from '~/utils';
|
||||
import { processMCPEnv, isEnabled } from '~/utils';
|
||||
import { CONSTANTS } from '~/mcp/enum';
|
||||
|
||||
/**
|
||||
|
@ -158,8 +158,13 @@ export class MCPServersRegistry {
|
|||
private async fetchServerInstructions(serverName: string): Promise<void> {
|
||||
const config = this.parsedConfigs[serverName];
|
||||
if (!config.serverInstructions) return;
|
||||
if (typeof config.serverInstructions === 'string') return;
|
||||
|
||||
// If it's a string that's not "true", it's a custom instruction
|
||||
if (typeof config.serverInstructions === 'string' && !isEnabled(config.serverInstructions)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch from server if true (boolean) or "true" (string)
|
||||
const conn = await this.connections.get(serverName);
|
||||
config.serverInstructions = conn.client.getInstructions();
|
||||
if (!config.serverInstructions) {
|
||||
|
|
|
@ -288,5 +288,74 @@ describe('MCPServersRegistry - Initialize Function', () => {
|
|||
// Compare the actual parsedConfigs against the expected fixture
|
||||
expect(registry.parsedConfigs).toEqual(expectedParsedConfigs);
|
||||
});
|
||||
|
||||
it('should handle serverInstructions as string "true" correctly and fetch from server', async () => {
|
||||
// Create test config with serverInstructions as string "true"
|
||||
const testConfig: t.MCPServers = {
|
||||
test_server_string_true: {
|
||||
type: 'stdio',
|
||||
args: [],
|
||||
command: 'test-command',
|
||||
serverInstructions: 'true', // Simulating string "true" from YAML parsing
|
||||
},
|
||||
test_server_custom_string: {
|
||||
type: 'stdio',
|
||||
args: [],
|
||||
command: 'test-command',
|
||||
serverInstructions: 'Custom instructions here',
|
||||
},
|
||||
test_server_bool_true: {
|
||||
type: 'stdio',
|
||||
args: [],
|
||||
command: 'test-command',
|
||||
serverInstructions: true,
|
||||
},
|
||||
};
|
||||
|
||||
const registry = new MCPServersRegistry(testConfig);
|
||||
|
||||
// Setup mock connection for servers that should fetch
|
||||
const mockClient = {
|
||||
listTools: jest.fn().mockResolvedValue({ tools: [] }),
|
||||
getInstructions: jest.fn().mockReturnValue('Fetched instructions from server'),
|
||||
getServerCapabilities: jest.fn().mockReturnValue({ tools: {} }),
|
||||
};
|
||||
const mockConnection = {
|
||||
client: mockClient,
|
||||
} as unknown as jest.Mocked<MCPConnection>;
|
||||
|
||||
mockConnectionsRepo.get.mockResolvedValue(mockConnection);
|
||||
mockConnectionsRepo.getLoaded.mockResolvedValue(
|
||||
new Map([
|
||||
['test_server_string_true', mockConnection],
|
||||
['test_server_bool_true', mockConnection],
|
||||
]),
|
||||
);
|
||||
mockDetectOAuthRequirement.mockResolvedValue({
|
||||
requiresOAuth: false,
|
||||
method: 'no-metadata-found',
|
||||
metadata: null,
|
||||
});
|
||||
|
||||
await registry.initialize();
|
||||
|
||||
// Verify that string "true" was treated as fetch-from-server
|
||||
expect(registry.parsedConfigs['test_server_string_true'].serverInstructions).toBe(
|
||||
'Fetched instructions from server',
|
||||
);
|
||||
|
||||
// Verify that custom string was kept as-is
|
||||
expect(registry.parsedConfigs['test_server_custom_string'].serverInstructions).toBe(
|
||||
'Custom instructions here',
|
||||
);
|
||||
|
||||
// Verify that boolean true also fetched from server
|
||||
expect(registry.parsedConfigs['test_server_bool_true'].serverInstructions).toBe(
|
||||
'Fetched instructions from server',
|
||||
);
|
||||
|
||||
// Verify getInstructions was called for both "true" cases
|
||||
expect(mockClient.getInstructions).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -180,6 +180,7 @@ export enum AgentCapabilities {
|
|||
web_search = 'web_search',
|
||||
artifacts = 'artifacts',
|
||||
actions = 'actions',
|
||||
context = 'context',
|
||||
tools = 'tools',
|
||||
chain = 'chain',
|
||||
ocr = 'ocr',
|
||||
|
@ -253,6 +254,7 @@ export const defaultAgentCapabilities = [
|
|||
AgentCapabilities.web_search,
|
||||
AgentCapabilities.artifacts,
|
||||
AgentCapabilities.actions,
|
||||
AgentCapabilities.context,
|
||||
AgentCapabilities.tools,
|
||||
AgentCapabilities.chain,
|
||||
AgentCapabilities.ocr,
|
||||
|
|
|
@ -31,6 +31,7 @@ export enum EToolResources {
|
|||
execute_code = 'execute_code',
|
||||
file_search = 'file_search',
|
||||
image_edit = 'image_edit',
|
||||
context = 'context',
|
||||
ocr = 'ocr',
|
||||
}
|
||||
|
||||
|
@ -182,6 +183,8 @@ export interface AgentToolResources {
|
|||
[EToolResources.image_edit]?: AgentBaseResource;
|
||||
[EToolResources.execute_code]?: ExecuteCodeResource;
|
||||
[EToolResources.file_search]?: AgentFileResource;
|
||||
[EToolResources.context]?: AgentBaseResource;
|
||||
/** @deprecated Use context instead */
|
||||
[EToolResources.ocr]?: AgentBaseResource;
|
||||
}
|
||||
/**
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue