mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-03-21 23:26:34 +01:00
* fix: use ACL ownership for prompt group cleanup on user deletion deleteUserPrompts previously called getAllPromptGroups with only an author filter, which defaults to searchShared=true and drops the author filter for shared/global project entries. This caused any user deleting their account to strip shared prompt group associations and ACL entries for other users. Replace the author-based query with ACL-based ownership lookup: - Find prompt groups where the user has OWNER permission (DELETE bit) - Only delete groups where the user is the sole owner - Preserve multi-owned groups and their ACL entries for other owners * fix: use ACL ownership for agent cleanup on user deletion deleteUserAgents used the deprecated author field to find and delete agents, then unconditionally removed all ACL entries for those agents. This could destroy ACL entries for agents shared with or co-owned by other users. Replace the author-based query with ACL-based ownership lookup: - Find agents where the user has OWNER permission (DELETE bit) - Only delete agents where the user is the sole owner - Preserve multi-owned agents and their ACL entries for other owners - Also clean up handoff edges referencing deleted agents * fix: add MCP server cleanup on user deletion User deletion had no cleanup for MCP servers, leaving solely-owned servers orphaned in the database with dangling ACL entries for other users. Add deleteUserMcpServers that follows the same ACL ownership pattern as prompt groups and agents: find servers with OWNER permission, check for sole ownership, and only delete those with no other owners. * style: fix prettier formatting in Prompt.spec.js * refactor: extract getSoleOwnedResourceIds to PermissionService The ACL sole-ownership detection algorithm was duplicated across deleteUserPrompts, deleteUserAgents, and deleteUserMcpServers. Centralizes the three-step pattern (find owned entries, find other owners, compute sole-owned set) into a single reusable utility. * refactor: use getSoleOwnedResourceIds in all deletion functions - Replace inline ACL queries with the centralized utility - Remove vestigial _req parameter from deleteUserPrompts - Use Promise.all for parallel project removal instead of sequential awaits - Disconnect live MCP sessions and invalidate tool cache before deleting sole-owned MCP server documents - Export deleteUserMcpServers for testability * test: improve deletion test coverage and quality - Move deleteUserPrompts call to beforeAll to eliminate execution-order dependency between tests - Standardize on test() instead of it() for consistency in Prompt.spec.js - Add assertion for deleting user's own ACL entry preservation on multi-owned agents - Add deleteUserMcpServers integration test suite with 6 tests covering sole-owner deletion, multi-owner preservation, session disconnect, cache invalidation, model-not-registered guard, and missing MCPManager - Add PermissionService mock to existing deleteUser.spec.js to fix import chain * fix: add legacy author-based fallback for unmigrated resources Resources created before the ACL system have author set but no AclEntry records. The sole-ownership detection returns empty for these, causing deleteUserPrompts, deleteUserAgents, and deleteUserMcpServers to silently skip them — permanently orphaning data on user deletion. Add a fallback that identifies author-owned resources with zero ACL entries (truly unmigrated) and includes them in the deletion set. This preserves the multi-owner safety of the ACL path while ensuring pre-ACL resources are still cleaned up regardless of migration status. * style: fix prettier formatting across all changed files * test: add resource type coverage guard for user deletion Ensures every ResourceType in the ACL system has a corresponding cleanup handler wired into deleteUserController. When a new ResourceType is added (e.g. WORKFLOW), this test fails immediately, preventing silent data orphaning on user account deletion. * style: fix import order in PermissionService destructure * test: add opt-out set and fix test lifecycle in coverage guard Add NO_USER_CLEANUP_NEEDED set for resource types that legitimately require no per-user deletion. Move fs.readFileSync into beforeAll so path errors surface as clean test failures instead of unhandled crashes.
974 lines
31 KiB
JavaScript
974 lines
31 KiB
JavaScript
const mongoose = require('mongoose');
|
|
const crypto = require('node:crypto');
|
|
const { logger } = require('@librechat/data-schemas');
|
|
const { getCustomEndpointConfig } = require('@librechat/api');
|
|
const {
|
|
Tools,
|
|
SystemRoles,
|
|
ResourceType,
|
|
actionDelimiter,
|
|
isAgentsEndpoint,
|
|
isEphemeralAgentId,
|
|
encodeEphemeralAgentId,
|
|
} = require('librechat-data-provider');
|
|
const { mcp_all, mcp_delimiter } = require('librechat-data-provider').Constants;
|
|
const {
|
|
removeAgentFromAllProjects,
|
|
removeAgentIdsFromProject,
|
|
addAgentIdsToProject,
|
|
} = require('./Project');
|
|
const {
|
|
getSoleOwnedResourceIds,
|
|
removeAllPermissions,
|
|
} = require('~/server/services/PermissionService');
|
|
const { getMCPServerTools } = require('~/server/services/Config');
|
|
const { Agent, AclEntry, User } = require('~/db/models');
|
|
const { getActions } = require('./Action');
|
|
|
|
/**
|
|
* Extracts unique MCP server names from tools array
|
|
* Tools format: "toolName_mcp_serverName" or "sys__server__sys_mcp_serverName"
|
|
* @param {string[]} tools - Array of tool identifiers
|
|
* @returns {string[]} Array of unique MCP server names
|
|
*/
|
|
const extractMCPServerNames = (tools) => {
|
|
if (!tools || !Array.isArray(tools)) {
|
|
return [];
|
|
}
|
|
const serverNames = new Set();
|
|
for (const tool of tools) {
|
|
if (!tool || !tool.includes(mcp_delimiter)) {
|
|
continue;
|
|
}
|
|
const parts = tool.split(mcp_delimiter);
|
|
if (parts.length >= 2) {
|
|
serverNames.add(parts[parts.length - 1]);
|
|
}
|
|
}
|
|
return Array.from(serverNames);
|
|
};
|
|
|
|
/**
|
|
* Create an agent with the provided data.
|
|
* @param {Object} agentData - The agent data to create.
|
|
* @returns {Promise<Agent>} The created agent document as a plain object.
|
|
* @throws {Error} If the agent creation fails.
|
|
*/
|
|
const createAgent = async (agentData) => {
|
|
const { author: _author, ...versionData } = agentData;
|
|
const timestamp = new Date();
|
|
const initialAgentData = {
|
|
...agentData,
|
|
versions: [
|
|
{
|
|
...versionData,
|
|
createdAt: timestamp,
|
|
updatedAt: timestamp,
|
|
},
|
|
],
|
|
category: agentData.category || 'general',
|
|
mcpServerNames: extractMCPServerNames(agentData.tools),
|
|
};
|
|
|
|
return (await Agent.create(initialAgentData)).toObject();
|
|
};
|
|
|
|
/**
|
|
* Get an agent document based on the provided ID.
|
|
*
|
|
* @param {Object} searchParameter - The search parameters to find the agent to update.
|
|
* @param {string} searchParameter.id - The ID of the agent to update.
|
|
* @param {string} searchParameter.author - The user ID of the agent's author.
|
|
* @returns {Promise<Agent|null>} The agent document as a plain object, or null if not found.
|
|
*/
|
|
const getAgent = async (searchParameter) => await Agent.findOne(searchParameter).lean();
|
|
|
|
/**
|
|
* Get multiple agent documents based on the provided search parameters.
|
|
*
|
|
* @param {Object} searchParameter - The search parameters to find agents.
|
|
* @returns {Promise<Agent[]>} Array of agent documents as plain objects.
|
|
*/
|
|
const getAgents = async (searchParameter) => await Agent.find(searchParameter).lean();
|
|
|
|
/**
|
|
* Load an agent based on the provided ID
|
|
*
|
|
* @param {Object} params
|
|
* @param {ServerRequest} params.req
|
|
* @param {string} params.spec
|
|
* @param {string} params.agent_id
|
|
* @param {string} params.endpoint
|
|
* @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
|
|
* @returns {Promise<Agent|null>} The agent document as a plain object, or null if not found.
|
|
*/
|
|
const loadEphemeralAgent = async ({ req, spec, endpoint, model_parameters: _m }) => {
|
|
const { model, ...model_parameters } = _m;
|
|
const modelSpecs = req.config?.modelSpecs?.list;
|
|
/** @type {TModelSpec | null} */
|
|
let modelSpec = null;
|
|
if (spec != null && spec !== '') {
|
|
modelSpec = modelSpecs?.find((s) => s.name === spec) || null;
|
|
}
|
|
/** @type {TEphemeralAgent | null} */
|
|
const ephemeralAgent = req.body.ephemeralAgent;
|
|
const mcpServers = new Set(ephemeralAgent?.mcp);
|
|
const userId = req.user?.id; // note: userId cannot be undefined at runtime
|
|
if (modelSpec?.mcpServers) {
|
|
for (const mcpServer of modelSpec.mcpServers) {
|
|
mcpServers.add(mcpServer);
|
|
}
|
|
}
|
|
/** @type {string[]} */
|
|
const tools = [];
|
|
if (ephemeralAgent?.execute_code === true || modelSpec?.executeCode === true) {
|
|
tools.push(Tools.execute_code);
|
|
}
|
|
if (ephemeralAgent?.file_search === true || modelSpec?.fileSearch === true) {
|
|
tools.push(Tools.file_search);
|
|
}
|
|
if (ephemeralAgent?.web_search === true || modelSpec?.webSearch === true) {
|
|
tools.push(Tools.web_search);
|
|
}
|
|
|
|
const addedServers = new Set();
|
|
if (mcpServers.size > 0) {
|
|
for (const mcpServer of mcpServers) {
|
|
if (addedServers.has(mcpServer)) {
|
|
continue;
|
|
}
|
|
const serverTools = await getMCPServerTools(userId, mcpServer);
|
|
if (!serverTools) {
|
|
tools.push(`${mcp_all}${mcp_delimiter}${mcpServer}`);
|
|
addedServers.add(mcpServer);
|
|
continue;
|
|
}
|
|
tools.push(...Object.keys(serverTools));
|
|
addedServers.add(mcpServer);
|
|
}
|
|
}
|
|
|
|
const instructions = req.body.promptPrefix;
|
|
|
|
// Get endpoint config for modelDisplayLabel fallback
|
|
const appConfig = req.config;
|
|
let endpointConfig = appConfig?.endpoints?.[endpoint];
|
|
if (!isAgentsEndpoint(endpoint) && !endpointConfig) {
|
|
try {
|
|
endpointConfig = getCustomEndpointConfig({ endpoint, appConfig });
|
|
} catch (err) {
|
|
logger.error('[loadEphemeralAgent] Error getting custom endpoint config', err);
|
|
}
|
|
}
|
|
|
|
// For ephemeral agents, use modelLabel if provided, then model spec's label,
|
|
// then modelDisplayLabel from endpoint config, otherwise empty string to show model name
|
|
const sender =
|
|
model_parameters?.modelLabel ?? modelSpec?.label ?? endpointConfig?.modelDisplayLabel ?? '';
|
|
|
|
// Encode ephemeral agent ID with endpoint, model, and computed sender for display
|
|
const ephemeralId = encodeEphemeralAgentId({ endpoint, model, sender });
|
|
|
|
const result = {
|
|
id: ephemeralId,
|
|
instructions,
|
|
provider: endpoint,
|
|
model_parameters,
|
|
model,
|
|
tools,
|
|
};
|
|
|
|
if (ephemeralAgent?.artifacts != null && ephemeralAgent.artifacts) {
|
|
result.artifacts = ephemeralAgent.artifacts;
|
|
}
|
|
return result;
|
|
};
|
|
|
|
/**
|
|
* Load an agent based on the provided ID
|
|
*
|
|
* @param {Object} params
|
|
* @param {ServerRequest} params.req
|
|
* @param {string} params.spec
|
|
* @param {string} params.agent_id
|
|
* @param {string} params.endpoint
|
|
* @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
|
|
* @returns {Promise<Agent|null>} The agent document as a plain object, or null if not found.
|
|
*/
|
|
const loadAgent = async ({ req, spec, agent_id, endpoint, model_parameters }) => {
|
|
if (!agent_id) {
|
|
return null;
|
|
}
|
|
if (isEphemeralAgentId(agent_id)) {
|
|
return await loadEphemeralAgent({ req, spec, endpoint, model_parameters });
|
|
}
|
|
const agent = await getAgent({
|
|
id: agent_id,
|
|
});
|
|
|
|
if (!agent) {
|
|
return null;
|
|
}
|
|
|
|
agent.version = agent.versions ? agent.versions.length : 0;
|
|
return agent;
|
|
};
|
|
|
|
/**
|
|
* Check if a version already exists in the versions array, excluding timestamp and author fields
|
|
* @param {Object} updateData - The update data to compare
|
|
* @param {Object} currentData - The current agent data
|
|
* @param {Array} versions - The existing versions array
|
|
* @param {string} [actionsHash] - Hash of current action metadata
|
|
* @returns {Object|null} - The matching version if found, null otherwise
|
|
*/
|
|
const isDuplicateVersion = (updateData, currentData, versions, actionsHash = null) => {
|
|
if (!versions || versions.length === 0) {
|
|
return null;
|
|
}
|
|
|
|
const excludeFields = [
|
|
'_id',
|
|
'id',
|
|
'createdAt',
|
|
'updatedAt',
|
|
'author',
|
|
'updatedBy',
|
|
'created_at',
|
|
'updated_at',
|
|
'__v',
|
|
'versions',
|
|
'actionsHash', // Exclude actionsHash from direct comparison
|
|
];
|
|
|
|
const { $push: _$push, $pull: _$pull, $addToSet: _$addToSet, ...directUpdates } = updateData;
|
|
|
|
if (Object.keys(directUpdates).length === 0 && !actionsHash) {
|
|
return null;
|
|
}
|
|
|
|
const wouldBeVersion = { ...currentData, ...directUpdates };
|
|
const lastVersion = versions[versions.length - 1];
|
|
|
|
if (actionsHash && lastVersion.actionsHash !== actionsHash) {
|
|
return null;
|
|
}
|
|
|
|
const allFields = new Set([...Object.keys(wouldBeVersion), ...Object.keys(lastVersion)]);
|
|
|
|
const importantFields = Array.from(allFields).filter((field) => !excludeFields.includes(field));
|
|
|
|
let isMatch = true;
|
|
for (const field of importantFields) {
|
|
const wouldBeValue = wouldBeVersion[field];
|
|
const lastVersionValue = lastVersion[field];
|
|
|
|
// Skip if both are undefined/null
|
|
if (!wouldBeValue && !lastVersionValue) {
|
|
continue;
|
|
}
|
|
|
|
// Handle arrays
|
|
if (Array.isArray(wouldBeValue) || Array.isArray(lastVersionValue)) {
|
|
// Normalize: treat undefined/null as empty array for comparison
|
|
let wouldBeArr;
|
|
if (Array.isArray(wouldBeValue)) {
|
|
wouldBeArr = wouldBeValue;
|
|
} else if (wouldBeValue == null) {
|
|
wouldBeArr = [];
|
|
} else {
|
|
wouldBeArr = [wouldBeValue];
|
|
}
|
|
|
|
let lastVersionArr;
|
|
if (Array.isArray(lastVersionValue)) {
|
|
lastVersionArr = lastVersionValue;
|
|
} else if (lastVersionValue == null) {
|
|
lastVersionArr = [];
|
|
} else {
|
|
lastVersionArr = [lastVersionValue];
|
|
}
|
|
|
|
if (wouldBeArr.length !== lastVersionArr.length) {
|
|
isMatch = false;
|
|
break;
|
|
}
|
|
|
|
// Special handling for projectIds (MongoDB ObjectIds)
|
|
if (field === 'projectIds') {
|
|
const wouldBeIds = wouldBeArr.map((id) => id.toString()).sort();
|
|
const versionIds = lastVersionArr.map((id) => id.toString()).sort();
|
|
|
|
if (!wouldBeIds.every((id, i) => id === versionIds[i])) {
|
|
isMatch = false;
|
|
break;
|
|
}
|
|
}
|
|
// Handle arrays of objects
|
|
else if (
|
|
wouldBeArr.length > 0 &&
|
|
typeof wouldBeArr[0] === 'object' &&
|
|
wouldBeArr[0] !== null
|
|
) {
|
|
const sortedWouldBe = [...wouldBeArr].map((item) => JSON.stringify(item)).sort();
|
|
const sortedVersion = [...lastVersionArr].map((item) => JSON.stringify(item)).sort();
|
|
|
|
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
|
isMatch = false;
|
|
break;
|
|
}
|
|
} else {
|
|
const sortedWouldBe = [...wouldBeArr].sort();
|
|
const sortedVersion = [...lastVersionArr].sort();
|
|
|
|
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
|
isMatch = false;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
// Handle objects
|
|
else if (typeof wouldBeValue === 'object' && wouldBeValue !== null) {
|
|
const lastVersionObj =
|
|
typeof lastVersionValue === 'object' && lastVersionValue !== null ? lastVersionValue : {};
|
|
|
|
// For empty objects, normalize the comparison
|
|
const wouldBeKeys = Object.keys(wouldBeValue);
|
|
const lastVersionKeys = Object.keys(lastVersionObj);
|
|
|
|
// If both are empty objects, they're equal
|
|
if (wouldBeKeys.length === 0 && lastVersionKeys.length === 0) {
|
|
continue;
|
|
}
|
|
|
|
// Otherwise do a deep comparison
|
|
if (JSON.stringify(wouldBeValue) !== JSON.stringify(lastVersionObj)) {
|
|
isMatch = false;
|
|
break;
|
|
}
|
|
}
|
|
// Handle primitive values
|
|
else {
|
|
// For primitives, handle the case where one is undefined and the other is a default value
|
|
if (wouldBeValue !== lastVersionValue) {
|
|
// Special handling for boolean false vs undefined
|
|
if (
|
|
typeof wouldBeValue === 'boolean' &&
|
|
wouldBeValue === false &&
|
|
lastVersionValue === undefined
|
|
) {
|
|
continue;
|
|
}
|
|
// Special handling for empty string vs undefined
|
|
if (
|
|
typeof wouldBeValue === 'string' &&
|
|
wouldBeValue === '' &&
|
|
lastVersionValue === undefined
|
|
) {
|
|
continue;
|
|
}
|
|
isMatch = false;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
return isMatch ? lastVersion : null;
|
|
};
|
|
|
|
/**
|
|
* Update an agent with new data without overwriting existing
|
|
* properties, or create a new agent if it doesn't exist.
|
|
* When an agent is updated, a copy of the current state will be saved to the versions array.
|
|
*
|
|
* @param {Object} searchParameter - The search parameters to find the agent to update.
|
|
* @param {string} searchParameter.id - The ID of the agent to update.
|
|
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
|
* @param {Object} updateData - An object containing the properties to update.
|
|
* @param {Object} [options] - Optional configuration object.
|
|
* @param {string} [options.updatingUserId] - The ID of the user performing the update (used for tracking non-author updates).
|
|
* @param {boolean} [options.forceVersion] - Force creation of a new version even if no fields changed.
|
|
* @param {boolean} [options.skipVersioning] - Skip version creation entirely (useful for isolated operations like sharing).
|
|
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
|
|
* @throws {Error} If the update would create a duplicate version
|
|
*/
|
|
const updateAgent = async (searchParameter, updateData, options = {}) => {
|
|
const { updatingUserId = null, forceVersion = false, skipVersioning = false } = options;
|
|
const mongoOptions = { new: true, upsert: false };
|
|
|
|
const currentAgent = await Agent.findOne(searchParameter);
|
|
if (currentAgent) {
|
|
const {
|
|
__v,
|
|
_id,
|
|
id: __id,
|
|
versions,
|
|
author: _author,
|
|
...versionData
|
|
} = currentAgent.toObject();
|
|
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
|
|
|
// Sync mcpServerNames when tools are updated
|
|
if (directUpdates.tools !== undefined) {
|
|
const mcpServerNames = extractMCPServerNames(directUpdates.tools);
|
|
directUpdates.mcpServerNames = mcpServerNames;
|
|
updateData.mcpServerNames = mcpServerNames; // Also update the original updateData
|
|
}
|
|
|
|
let actionsHash = null;
|
|
|
|
// Generate actions hash if agent has actions
|
|
if (currentAgent.actions && currentAgent.actions.length > 0) {
|
|
// Extract action IDs from the format "domain_action_id"
|
|
const actionIds = currentAgent.actions
|
|
.map((action) => {
|
|
const parts = action.split(actionDelimiter);
|
|
return parts[1]; // Get just the action ID part
|
|
})
|
|
.filter(Boolean);
|
|
|
|
if (actionIds.length > 0) {
|
|
try {
|
|
const actions = await getActions(
|
|
{
|
|
action_id: { $in: actionIds },
|
|
},
|
|
true,
|
|
); // Include sensitive data for hash
|
|
|
|
actionsHash = await generateActionMetadataHash(currentAgent.actions, actions);
|
|
} catch (error) {
|
|
logger.error('Error fetching actions for hash generation:', error);
|
|
}
|
|
}
|
|
}
|
|
|
|
const shouldCreateVersion =
|
|
!skipVersioning &&
|
|
(forceVersion || Object.keys(directUpdates).length > 0 || $push || $pull || $addToSet);
|
|
|
|
if (shouldCreateVersion) {
|
|
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
|
|
if (duplicateVersion && !forceVersion) {
|
|
// No changes detected, return the current agent without creating a new version
|
|
const agentObj = currentAgent.toObject();
|
|
agentObj.version = versions.length;
|
|
return agentObj;
|
|
}
|
|
}
|
|
|
|
const versionEntry = {
|
|
...versionData,
|
|
...directUpdates,
|
|
updatedAt: new Date(),
|
|
};
|
|
|
|
// Include actions hash in version if available
|
|
if (actionsHash) {
|
|
versionEntry.actionsHash = actionsHash;
|
|
}
|
|
|
|
// Always store updatedBy field to track who made the change
|
|
if (updatingUserId) {
|
|
versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId);
|
|
}
|
|
|
|
if (shouldCreateVersion) {
|
|
updateData.$push = {
|
|
...($push || {}),
|
|
versions: versionEntry,
|
|
};
|
|
}
|
|
}
|
|
|
|
return Agent.findOneAndUpdate(searchParameter, updateData, mongoOptions).lean();
|
|
};
|
|
|
|
/**
|
|
* Modifies an agent with the resource file id.
|
|
* @param {object} params
|
|
* @param {ServerRequest} params.req
|
|
* @param {string} params.agent_id
|
|
* @param {string} params.tool_resource
|
|
* @param {string} params.file_id
|
|
* @returns {Promise<Agent>} The updated agent.
|
|
*/
|
|
const addAgentResourceFile = async ({ req, agent_id, tool_resource, file_id }) => {
|
|
const searchParameter = { id: agent_id };
|
|
let agent = await getAgent(searchParameter);
|
|
if (!agent) {
|
|
throw new Error('Agent not found for adding resource file');
|
|
}
|
|
const fileIdsPath = `tool_resources.${tool_resource}.file_ids`;
|
|
await Agent.updateOne(
|
|
{
|
|
id: agent_id,
|
|
[`${fileIdsPath}`]: { $exists: false },
|
|
},
|
|
{
|
|
$set: {
|
|
[`${fileIdsPath}`]: [],
|
|
},
|
|
},
|
|
);
|
|
|
|
const updateData = {
|
|
$addToSet: {
|
|
tools: tool_resource,
|
|
[fileIdsPath]: file_id,
|
|
},
|
|
};
|
|
|
|
const updatedAgent = await updateAgent(searchParameter, updateData, {
|
|
updatingUserId: req?.user?.id,
|
|
});
|
|
if (updatedAgent) {
|
|
return updatedAgent;
|
|
} else {
|
|
throw new Error('Agent not found for adding resource file');
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Removes multiple resource files from an agent using atomic operations.
|
|
* @param {object} params
|
|
* @param {string} params.agent_id
|
|
* @param {Array<{tool_resource: string, file_id: string}>} params.files
|
|
* @returns {Promise<Agent>} The updated agent.
|
|
* @throws {Error} If the agent is not found or update fails.
|
|
*/
|
|
const removeAgentResourceFiles = async ({ agent_id, files }) => {
|
|
const searchParameter = { id: agent_id };
|
|
|
|
// Group files to remove by resource
|
|
const filesByResource = files.reduce((acc, { tool_resource, file_id }) => {
|
|
if (!acc[tool_resource]) {
|
|
acc[tool_resource] = [];
|
|
}
|
|
acc[tool_resource].push(file_id);
|
|
return acc;
|
|
}, {});
|
|
|
|
// Step 1: Atomically remove file IDs using $pull
|
|
const pullOps = {};
|
|
const resourcesToCheck = new Set();
|
|
for (const [resource, fileIds] of Object.entries(filesByResource)) {
|
|
const fileIdsPath = `tool_resources.${resource}.file_ids`;
|
|
pullOps[fileIdsPath] = { $in: fileIds };
|
|
resourcesToCheck.add(resource);
|
|
}
|
|
|
|
const updatePullData = { $pull: pullOps };
|
|
const agentAfterPull = await Agent.findOneAndUpdate(searchParameter, updatePullData, {
|
|
new: true,
|
|
}).lean();
|
|
|
|
if (!agentAfterPull) {
|
|
// Agent might have been deleted concurrently, or never existed.
|
|
// Check if it existed before trying to throw.
|
|
const agentExists = await getAgent(searchParameter);
|
|
if (!agentExists) {
|
|
throw new Error('Agent not found for removing resource files');
|
|
}
|
|
// If it existed but findOneAndUpdate returned null, something else went wrong.
|
|
throw new Error('Failed to update agent during file removal (pull step)');
|
|
}
|
|
|
|
// Return the agent state directly after the $pull operation.
|
|
// Skipping the $unset step for now to simplify and test core $pull atomicity.
|
|
// Empty arrays might remain, but the removal itself should be correct.
|
|
return agentAfterPull;
|
|
};
|
|
|
|
/**
|
|
* Deletes an agent based on the provided ID.
|
|
*
|
|
* @param {Object} searchParameter - The search parameters to find the agent to delete.
|
|
* @param {string} searchParameter.id - The ID of the agent to delete.
|
|
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
|
* @returns {Promise<void>} Resolves when the agent has been successfully deleted.
|
|
*/
|
|
const deleteAgent = async (searchParameter) => {
|
|
const agent = await Agent.findOneAndDelete(searchParameter);
|
|
if (agent) {
|
|
await removeAgentFromAllProjects(agent.id);
|
|
await Promise.all([
|
|
removeAllPermissions({
|
|
resourceType: ResourceType.AGENT,
|
|
resourceId: agent._id,
|
|
}),
|
|
removeAllPermissions({
|
|
resourceType: ResourceType.REMOTE_AGENT,
|
|
resourceId: agent._id,
|
|
}),
|
|
]);
|
|
try {
|
|
await Agent.updateMany({ 'edges.to': agent.id }, { $pull: { edges: { to: agent.id } } });
|
|
} catch (error) {
|
|
logger.error('[deleteAgent] Error removing agent from handoff edges', error);
|
|
}
|
|
try {
|
|
await User.updateMany(
|
|
{ 'favorites.agentId': agent.id },
|
|
{ $pull: { favorites: { agentId: agent.id } } },
|
|
);
|
|
} catch (error) {
|
|
logger.error('[deleteAgent] Error removing agent from user favorites', error);
|
|
}
|
|
}
|
|
return agent;
|
|
};
|
|
|
|
/**
|
|
* Deletes agents solely owned by the user and cleans up their ACLs/project references.
|
|
* Agents with other owners are left intact; the caller is responsible for
|
|
* removing the user's own ACL principal entries separately.
|
|
*
|
|
* Also handles legacy (pre-ACL) agents that only have the author field set,
|
|
* ensuring they are not orphaned if no permission migration has been run.
|
|
* @param {string} userId - The ID of the user whose agents should be deleted.
|
|
* @returns {Promise<void>}
|
|
*/
|
|
const deleteUserAgents = async (userId) => {
|
|
try {
|
|
const userObjectId = new mongoose.Types.ObjectId(userId);
|
|
const soleOwnedObjectIds = await getSoleOwnedResourceIds(userObjectId, [
|
|
ResourceType.AGENT,
|
|
ResourceType.REMOTE_AGENT,
|
|
]);
|
|
|
|
const authoredAgents = await Agent.find({ author: userObjectId }).select('id _id').lean();
|
|
|
|
const migratedEntries =
|
|
authoredAgents.length > 0
|
|
? await AclEntry.find({
|
|
resourceType: { $in: [ResourceType.AGENT, ResourceType.REMOTE_AGENT] },
|
|
resourceId: { $in: authoredAgents.map((a) => a._id) },
|
|
})
|
|
.select('resourceId')
|
|
.lean()
|
|
: [];
|
|
const migratedIds = new Set(migratedEntries.map((e) => e.resourceId.toString()));
|
|
const legacyAgents = authoredAgents.filter((a) => !migratedIds.has(a._id.toString()));
|
|
|
|
/** resourceId is the MongoDB _id; agent.id is the string identifier for project/edge queries */
|
|
const soleOwnedAgents =
|
|
soleOwnedObjectIds.length > 0
|
|
? await Agent.find({ _id: { $in: soleOwnedObjectIds } })
|
|
.select('id _id')
|
|
.lean()
|
|
: [];
|
|
|
|
const allAgents = [...soleOwnedAgents, ...legacyAgents];
|
|
|
|
if (allAgents.length === 0) {
|
|
return;
|
|
}
|
|
|
|
const agentIds = allAgents.map((agent) => agent.id);
|
|
const agentObjectIds = allAgents.map((agent) => agent._id);
|
|
|
|
await Promise.all(agentIds.map((id) => removeAgentFromAllProjects(id)));
|
|
|
|
await AclEntry.deleteMany({
|
|
resourceType: { $in: [ResourceType.AGENT, ResourceType.REMOTE_AGENT] },
|
|
resourceId: { $in: agentObjectIds },
|
|
});
|
|
|
|
try {
|
|
await Agent.updateMany(
|
|
{ 'edges.to': { $in: agentIds } },
|
|
{ $pull: { edges: { to: { $in: agentIds } } } },
|
|
);
|
|
} catch (error) {
|
|
logger.error('[deleteUserAgents] Error removing agents from handoff edges', error);
|
|
}
|
|
|
|
try {
|
|
await User.updateMany(
|
|
{ 'favorites.agentId': { $in: agentIds } },
|
|
{ $pull: { favorites: { agentId: { $in: agentIds } } } },
|
|
);
|
|
} catch (error) {
|
|
logger.error('[deleteUserAgents] Error removing agents from user favorites', error);
|
|
}
|
|
|
|
await Agent.deleteMany({ _id: { $in: agentObjectIds } });
|
|
} catch (error) {
|
|
logger.error('[deleteUserAgents] General error:', error);
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Get agents by accessible IDs with optional cursor-based pagination.
|
|
* @param {Object} params - The parameters for getting accessible agents.
|
|
* @param {Array} [params.accessibleIds] - Array of agent ObjectIds the user has ACL access to.
|
|
* @param {Object} [params.otherParams] - Additional query parameters (including author filter).
|
|
* @param {number} [params.limit] - Number of agents to return (max 100). If not provided, returns all agents.
|
|
* @param {string} [params.after] - Cursor for pagination - get agents after this cursor. // base64 encoded JSON string with updatedAt and _id.
|
|
* @returns {Promise<Object>} A promise that resolves to an object containing the agents data and pagination info.
|
|
*/
|
|
const getListAgentsByAccess = async ({
|
|
accessibleIds = [],
|
|
otherParams = {},
|
|
limit = null,
|
|
after = null,
|
|
}) => {
|
|
const isPaginated = limit !== null && limit !== undefined;
|
|
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
|
|
|
|
// Build base query combining ACL accessible agents with other filters
|
|
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
|
|
|
|
// Add cursor condition
|
|
if (after) {
|
|
try {
|
|
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
|
|
const { updatedAt, _id } = cursor;
|
|
|
|
const cursorCondition = {
|
|
$or: [
|
|
{ updatedAt: { $lt: new Date(updatedAt) } },
|
|
{ updatedAt: new Date(updatedAt), _id: { $gt: new mongoose.Types.ObjectId(_id) } },
|
|
],
|
|
};
|
|
|
|
// Merge cursor condition with base query
|
|
if (Object.keys(baseQuery).length > 0) {
|
|
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
|
|
// Remove the original conditions from baseQuery to avoid duplication
|
|
Object.keys(baseQuery).forEach((key) => {
|
|
if (key !== '$and') delete baseQuery[key];
|
|
});
|
|
} else {
|
|
Object.assign(baseQuery, cursorCondition);
|
|
}
|
|
} catch (error) {
|
|
logger.warn('Invalid cursor:', error.message);
|
|
}
|
|
}
|
|
|
|
let query = Agent.find(baseQuery, {
|
|
id: 1,
|
|
_id: 1,
|
|
name: 1,
|
|
avatar: 1,
|
|
author: 1,
|
|
projectIds: 1,
|
|
description: 1,
|
|
updatedAt: 1,
|
|
category: 1,
|
|
support_contact: 1,
|
|
is_promoted: 1,
|
|
}).sort({ updatedAt: -1, _id: 1 });
|
|
|
|
// Only apply limit if pagination is requested
|
|
if (isPaginated) {
|
|
query = query.limit(normalizedLimit + 1);
|
|
}
|
|
|
|
const agents = await query.lean();
|
|
|
|
const hasMore = isPaginated ? agents.length > normalizedLimit : false;
|
|
const data = (isPaginated ? agents.slice(0, normalizedLimit) : agents).map((agent) => {
|
|
if (agent.author) {
|
|
agent.author = agent.author.toString();
|
|
}
|
|
return agent;
|
|
});
|
|
|
|
// Generate next cursor only if paginated
|
|
let nextCursor = null;
|
|
if (isPaginated && hasMore && data.length > 0) {
|
|
const lastAgent = agents[normalizedLimit - 1];
|
|
nextCursor = Buffer.from(
|
|
JSON.stringify({
|
|
updatedAt: lastAgent.updatedAt.toISOString(),
|
|
_id: lastAgent._id.toString(),
|
|
}),
|
|
).toString('base64');
|
|
}
|
|
|
|
return {
|
|
object: 'list',
|
|
data,
|
|
first_id: data.length > 0 ? data[0].id : null,
|
|
last_id: data.length > 0 ? data[data.length - 1].id : null,
|
|
has_more: hasMore,
|
|
after: nextCursor,
|
|
};
|
|
};
|
|
|
|
/**
|
|
* Updates the projects associated with an agent, adding and removing project IDs as specified.
|
|
* This function also updates the corresponding projects to include or exclude the agent ID.
|
|
*
|
|
* @param {Object} params - Parameters for updating the agent's projects.
|
|
* @param {IUser} params.user - Parameters for updating the agent's projects.
|
|
* @param {string} params.agentId - The ID of the agent to update.
|
|
* @param {string[]} [params.projectIds] - Array of project IDs to add to the agent.
|
|
* @param {string[]} [params.removeProjectIds] - Array of project IDs to remove from the agent.
|
|
* @returns {Promise<MongoAgent>} The updated agent document.
|
|
* @throws {Error} If there's an error updating the agent or projects.
|
|
*/
|
|
const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds }) => {
|
|
const updateOps = {};
|
|
|
|
if (removeProjectIds && removeProjectIds.length > 0) {
|
|
for (const projectId of removeProjectIds) {
|
|
await removeAgentIdsFromProject(projectId, [agentId]);
|
|
}
|
|
updateOps.$pull = { projectIds: { $in: removeProjectIds } };
|
|
}
|
|
|
|
if (projectIds && projectIds.length > 0) {
|
|
for (const projectId of projectIds) {
|
|
await addAgentIdsToProject(projectId, [agentId]);
|
|
}
|
|
updateOps.$addToSet = { projectIds: { $each: projectIds } };
|
|
}
|
|
|
|
if (Object.keys(updateOps).length === 0) {
|
|
return await getAgent({ id: agentId });
|
|
}
|
|
|
|
const updateQuery = { id: agentId, author: user.id };
|
|
if (user.role === SystemRoles.ADMIN) {
|
|
delete updateQuery.author;
|
|
}
|
|
|
|
const updatedAgent = await updateAgent(updateQuery, updateOps, {
|
|
updatingUserId: user.id,
|
|
skipVersioning: true,
|
|
});
|
|
if (updatedAgent) {
|
|
return updatedAgent;
|
|
}
|
|
if (updateOps.$addToSet) {
|
|
for (const projectId of projectIds) {
|
|
await removeAgentIdsFromProject(projectId, [agentId]);
|
|
}
|
|
} else if (updateOps.$pull) {
|
|
for (const projectId of removeProjectIds) {
|
|
await addAgentIdsToProject(projectId, [agentId]);
|
|
}
|
|
}
|
|
|
|
return await getAgent({ id: agentId });
|
|
};
|
|
|
|
/**
|
|
* Reverts an agent to a specific version in its version history.
|
|
* @param {Object} searchParameter - The search parameters to find the agent to revert.
|
|
* @param {string} searchParameter.id - The ID of the agent to revert.
|
|
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
|
* @param {number} versionIndex - The index of the version to revert to in the versions array.
|
|
* @returns {Promise<MongoAgent>} The updated agent document after reverting.
|
|
* @throws {Error} If the agent is not found or the specified version does not exist.
|
|
*/
|
|
const revertAgentVersion = async (searchParameter, versionIndex) => {
|
|
const agent = await Agent.findOne(searchParameter);
|
|
if (!agent) {
|
|
throw new Error('Agent not found');
|
|
}
|
|
|
|
if (!agent.versions || !agent.versions[versionIndex]) {
|
|
throw new Error(`Version ${versionIndex} not found`);
|
|
}
|
|
|
|
const revertToVersion = agent.versions[versionIndex];
|
|
|
|
const updateData = {
|
|
...revertToVersion,
|
|
};
|
|
|
|
delete updateData._id;
|
|
delete updateData.id;
|
|
delete updateData.versions;
|
|
delete updateData.author;
|
|
delete updateData.updatedBy;
|
|
|
|
return Agent.findOneAndUpdate(searchParameter, updateData, { new: true }).lean();
|
|
};
|
|
|
|
/**
|
|
* Generates a hash of action metadata for version comparison
|
|
* @param {string[]} actionIds - Array of action IDs in format "domain_action_id"
|
|
* @param {Action[]} actions - Array of action documents
|
|
* @returns {Promise<string>} - SHA256 hash of the action metadata
|
|
*/
|
|
const generateActionMetadataHash = async (actionIds, actions) => {
|
|
if (!actionIds || actionIds.length === 0) {
|
|
return '';
|
|
}
|
|
|
|
// Create a map of action_id to metadata for quick lookup
|
|
const actionMap = new Map();
|
|
actions.forEach((action) => {
|
|
actionMap.set(action.action_id, action.metadata);
|
|
});
|
|
|
|
// Sort action IDs for consistent hashing
|
|
const sortedActionIds = [...actionIds].sort();
|
|
|
|
// Build a deterministic string representation of all action metadata
|
|
const metadataString = sortedActionIds
|
|
.map((actionFullId) => {
|
|
// Extract just the action_id part (after the delimiter)
|
|
const parts = actionFullId.split(actionDelimiter);
|
|
const actionId = parts[1];
|
|
|
|
const metadata = actionMap.get(actionId);
|
|
if (!metadata) {
|
|
return `${actionId}:null`;
|
|
}
|
|
|
|
// Sort metadata keys for deterministic output
|
|
const sortedKeys = Object.keys(metadata).sort();
|
|
const metadataStr = sortedKeys
|
|
.map((key) => `${key}:${JSON.stringify(metadata[key])}`)
|
|
.join(',');
|
|
return `${actionId}:{${metadataStr}}`;
|
|
})
|
|
.join(';');
|
|
|
|
// Use Web Crypto API to generate hash
|
|
const encoder = new TextEncoder();
|
|
const data = encoder.encode(metadataString);
|
|
const hashBuffer = await crypto.webcrypto.subtle.digest('SHA-256', data);
|
|
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
|
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
|
|
|
|
return hashHex;
|
|
};
|
|
/**
|
|
* Counts the number of promoted agents.
|
|
* @returns {Promise<number>} - The count of promoted agents
|
|
*/
|
|
const countPromotedAgents = async () => {
|
|
const count = await Agent.countDocuments({ is_promoted: true });
|
|
return count;
|
|
};
|
|
|
|
/**
|
|
* Load a default agent based on the endpoint
|
|
* @param {string} endpoint
|
|
* @returns {Agent | null}
|
|
*/
|
|
|
|
module.exports = {
|
|
getAgent,
|
|
getAgents,
|
|
loadAgent,
|
|
createAgent,
|
|
updateAgent,
|
|
deleteAgent,
|
|
deleteUserAgents,
|
|
revertAgentVersion,
|
|
updateAgentProjects,
|
|
countPromotedAgents,
|
|
addAgentResourceFile,
|
|
getListAgentsByAccess,
|
|
removeAgentResourceFiles,
|
|
generateActionMetadataHash,
|
|
};
|