mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 06:00:56 +02:00
👤 feat: Enhance Agent Versioning to Track User Updates (#7523)
* feat: Enhance agent update functionality to track user updates - Updated `updateAgent` function to accept an `updatingUserId` parameter for tracking who made changes. - Modified agent versioning to include `updatedBy` field for better audit trails. - Adjusted related functions and tests to ensure proper handling of user updates and version history. - Enhanced tests to verify correct tracking of `updatedBy` during agent updates and restorations. * fix: Refactor import tests for improved readability and consistency - Adjusted formatting in `importChatGptConvo` test to enhance clarity. - Updated expected output string in `processAssistantMessage` test to use double quotes for consistency. - Modified processing time expectation in `processAssistantMessage` test to allow for CI environment variability.
This commit is contained in:
parent
ed9ab8842a
commit
cede5d120c
5 changed files with 172 additions and 22 deletions
|
@ -21,7 +21,7 @@ const Agent = mongoose.model('agent', agentSchema);
|
||||||
* @throws {Error} If the agent creation fails.
|
* @throws {Error} If the agent creation fails.
|
||||||
*/
|
*/
|
||||||
const createAgent = async (agentData) => {
|
const createAgent = async (agentData) => {
|
||||||
const { versions, ...versionData } = agentData;
|
const { author, ...versionData } = agentData;
|
||||||
const timestamp = new Date();
|
const timestamp = new Date();
|
||||||
const initialAgentData = {
|
const initialAgentData = {
|
||||||
...agentData,
|
...agentData,
|
||||||
|
@ -163,6 +163,7 @@ const isDuplicateVersion = (updateData, currentData, versions) => {
|
||||||
'createdAt',
|
'createdAt',
|
||||||
'updatedAt',
|
'updatedAt',
|
||||||
'author',
|
'author',
|
||||||
|
'updatedBy',
|
||||||
'created_at',
|
'created_at',
|
||||||
'updated_at',
|
'updated_at',
|
||||||
'__v',
|
'__v',
|
||||||
|
@ -248,15 +249,16 @@ const isDuplicateVersion = (updateData, currentData, versions) => {
|
||||||
* @param {string} searchParameter.id - The ID of the agent to update.
|
* @param {string} searchParameter.id - The ID of the agent to update.
|
||||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||||
* @param {Object} updateData - An object containing the properties to update.
|
* @param {Object} updateData - An object containing the properties to update.
|
||||||
|
* @param {string} [updatingUserId] - The ID of the user performing the update (used for tracking non-author updates).
|
||||||
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
|
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
|
||||||
* @throws {Error} If the update would create a duplicate version
|
* @throws {Error} If the update would create a duplicate version
|
||||||
*/
|
*/
|
||||||
const updateAgent = async (searchParameter, updateData) => {
|
const updateAgent = async (searchParameter, updateData, updatingUserId = null) => {
|
||||||
const options = { new: true, upsert: false };
|
const options = { new: true, upsert: false };
|
||||||
|
|
||||||
const currentAgent = await Agent.findOne(searchParameter);
|
const currentAgent = await Agent.findOne(searchParameter);
|
||||||
if (currentAgent) {
|
if (currentAgent) {
|
||||||
const { __v, _id, id, versions, ...versionData } = currentAgent.toObject();
|
const { __v, _id, id, versions, author, ...versionData } = currentAgent.toObject();
|
||||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||||
|
|
||||||
if (Object.keys(directUpdates).length > 0 && versions && versions.length > 0) {
|
if (Object.keys(directUpdates).length > 0 && versions && versions.length > 0) {
|
||||||
|
@ -276,13 +278,20 @@ const updateAgent = async (searchParameter, updateData) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const versionEntry = {
|
||||||
|
...versionData,
|
||||||
|
...directUpdates,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Always store updatedBy field to track who made the change
|
||||||
|
if (updatingUserId) {
|
||||||
|
versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId);
|
||||||
|
}
|
||||||
|
|
||||||
updateData.$push = {
|
updateData.$push = {
|
||||||
...($push || {}),
|
...($push || {}),
|
||||||
versions: {
|
versions: versionEntry,
|
||||||
...versionData,
|
|
||||||
...directUpdates,
|
|
||||||
updatedAt: new Date(),
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -298,7 +307,7 @@ const updateAgent = async (searchParameter, updateData) => {
|
||||||
* @param {string} params.file_id
|
* @param {string} params.file_id
|
||||||
* @returns {Promise<Agent>} The updated agent.
|
* @returns {Promise<Agent>} The updated agent.
|
||||||
*/
|
*/
|
||||||
const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
const addAgentResourceFile = async ({ req, agent_id, tool_resource, file_id }) => {
|
||||||
const searchParameter = { id: agent_id };
|
const searchParameter = { id: agent_id };
|
||||||
let agent = await getAgent(searchParameter);
|
let agent = await getAgent(searchParameter);
|
||||||
if (!agent) {
|
if (!agent) {
|
||||||
|
@ -324,7 +333,7 @@ const addAgentResourceFile = async ({ agent_id, tool_resource, file_id }) => {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const updatedAgent = await updateAgent(searchParameter, updateData);
|
const updatedAgent = await updateAgent(searchParameter, updateData, req?.user?.id);
|
||||||
if (updatedAgent) {
|
if (updatedAgent) {
|
||||||
return updatedAgent;
|
return updatedAgent;
|
||||||
} else {
|
} else {
|
||||||
|
@ -488,7 +497,7 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
||||||
delete updateQuery.author;
|
delete updateQuery.author;
|
||||||
}
|
}
|
||||||
|
|
||||||
const updatedAgent = await updateAgent(updateQuery, updateOps);
|
const updatedAgent = await updateAgent(updateQuery, updateOps, user.id);
|
||||||
if (updatedAgent) {
|
if (updatedAgent) {
|
||||||
return updatedAgent;
|
return updatedAgent;
|
||||||
}
|
}
|
||||||
|
@ -533,6 +542,8 @@ const revertAgentVersion = async (searchParameter, versionIndex) => {
|
||||||
delete updateData._id;
|
delete updateData._id;
|
||||||
delete updateData.id;
|
delete updateData.id;
|
||||||
delete updateData.versions;
|
delete updateData.versions;
|
||||||
|
delete updateData.author;
|
||||||
|
delete updateData.updatedBy;
|
||||||
|
|
||||||
return Agent.findOneAndUpdate(searchParameter, updateData, { new: true }).lean();
|
return Agent.findOneAndUpdate(searchParameter, updateData, { new: true }).lean();
|
||||||
};
|
};
|
||||||
|
|
|
@ -679,7 +679,7 @@ describe('Agent Version History', () => {
|
||||||
expect(updatedAgent.versions[0]._id).toBeUndefined();
|
expect(updatedAgent.versions[0]._id).toBeUndefined();
|
||||||
expect(updatedAgent.versions[0].__v).toBeUndefined();
|
expect(updatedAgent.versions[0].__v).toBeUndefined();
|
||||||
expect(updatedAgent.versions[0].name).toBe('Test Agent');
|
expect(updatedAgent.versions[0].name).toBe('Test Agent');
|
||||||
expect(updatedAgent.versions[0].author).toBeDefined();
|
expect(updatedAgent.versions[0].author).toBeUndefined();
|
||||||
|
|
||||||
expect(updatedAgent.versions[1]._id).toBeUndefined();
|
expect(updatedAgent.versions[1]._id).toBeUndefined();
|
||||||
expect(updatedAgent.versions[1].__v).toBeUndefined();
|
expect(updatedAgent.versions[1].__v).toBeUndefined();
|
||||||
|
@ -885,4 +885,141 @@ describe('Agent Version History', () => {
|
||||||
console.error = originalConsoleError;
|
console.error = originalConsoleError;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('should track updatedBy when a different user updates an agent', async () => {
|
||||||
|
const agentId = `agent_${uuidv4()}`;
|
||||||
|
const originalAuthor = new mongoose.Types.ObjectId();
|
||||||
|
const updatingUser = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Original Agent',
|
||||||
|
provider: 'test',
|
||||||
|
model: 'test-model',
|
||||||
|
author: originalAuthor,
|
||||||
|
description: 'Original description',
|
||||||
|
});
|
||||||
|
|
||||||
|
const updatedAgent = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{ name: 'Updated Agent', description: 'Updated description' },
|
||||||
|
updatingUser.toString(),
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(updatedAgent.versions).toHaveLength(2);
|
||||||
|
expect(updatedAgent.versions[1].updatedBy.toString()).toBe(updatingUser.toString());
|
||||||
|
expect(updatedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should include updatedBy even when the original author updates the agent', async () => {
|
||||||
|
const agentId = `agent_${uuidv4()}`;
|
||||||
|
const originalAuthor = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Original Agent',
|
||||||
|
provider: 'test',
|
||||||
|
model: 'test-model',
|
||||||
|
author: originalAuthor,
|
||||||
|
description: 'Original description',
|
||||||
|
});
|
||||||
|
|
||||||
|
const updatedAgent = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{ name: 'Updated Agent', description: 'Updated description' },
|
||||||
|
originalAuthor.toString(),
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(updatedAgent.versions).toHaveLength(2);
|
||||||
|
expect(updatedAgent.versions[1].updatedBy.toString()).toBe(originalAuthor.toString());
|
||||||
|
expect(updatedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should track multiple different users updating the same agent', async () => {
|
||||||
|
const agentId = `agent_${uuidv4()}`;
|
||||||
|
const originalAuthor = new mongoose.Types.ObjectId();
|
||||||
|
const user1 = new mongoose.Types.ObjectId();
|
||||||
|
const user2 = new mongoose.Types.ObjectId();
|
||||||
|
const user3 = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Original Agent',
|
||||||
|
provider: 'test',
|
||||||
|
model: 'test-model',
|
||||||
|
author: originalAuthor,
|
||||||
|
description: 'Original description',
|
||||||
|
});
|
||||||
|
|
||||||
|
// User 1 makes an update
|
||||||
|
await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{ name: 'Updated by User 1', description: 'First update' },
|
||||||
|
user1.toString(),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Original author makes an update
|
||||||
|
await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{ description: 'Updated by original author' },
|
||||||
|
originalAuthor.toString(),
|
||||||
|
);
|
||||||
|
|
||||||
|
// User 2 makes an update
|
||||||
|
await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{ name: 'Updated by User 2', model: 'new-model' },
|
||||||
|
user2.toString(),
|
||||||
|
);
|
||||||
|
|
||||||
|
// User 3 makes an update
|
||||||
|
const finalAgent = await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{ description: 'Final update by User 3' },
|
||||||
|
user3.toString(),
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(finalAgent.versions).toHaveLength(5);
|
||||||
|
expect(finalAgent.author.toString()).toBe(originalAuthor.toString());
|
||||||
|
|
||||||
|
// Check that each version has the correct updatedBy
|
||||||
|
expect(finalAgent.versions[0].updatedBy).toBeUndefined(); // Initial creation has no updatedBy
|
||||||
|
expect(finalAgent.versions[1].updatedBy.toString()).toBe(user1.toString());
|
||||||
|
expect(finalAgent.versions[2].updatedBy.toString()).toBe(originalAuthor.toString());
|
||||||
|
expect(finalAgent.versions[3].updatedBy.toString()).toBe(user2.toString());
|
||||||
|
expect(finalAgent.versions[4].updatedBy.toString()).toBe(user3.toString());
|
||||||
|
|
||||||
|
// Verify the final state
|
||||||
|
expect(finalAgent.name).toBe('Updated by User 2');
|
||||||
|
expect(finalAgent.description).toBe('Final update by User 3');
|
||||||
|
expect(finalAgent.model).toBe('new-model');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('should preserve original author during agent restoration', async () => {
|
||||||
|
const agentId = `agent_${uuidv4()}`;
|
||||||
|
const originalAuthor = new mongoose.Types.ObjectId();
|
||||||
|
const updatingUser = new mongoose.Types.ObjectId();
|
||||||
|
|
||||||
|
await createAgent({
|
||||||
|
id: agentId,
|
||||||
|
name: 'Original Agent',
|
||||||
|
provider: 'test',
|
||||||
|
model: 'test-model',
|
||||||
|
author: originalAuthor,
|
||||||
|
description: 'Original description',
|
||||||
|
});
|
||||||
|
|
||||||
|
await updateAgent(
|
||||||
|
{ id: agentId },
|
||||||
|
{ name: 'Updated Agent', description: 'Updated description' },
|
||||||
|
updatingUser.toString(),
|
||||||
|
);
|
||||||
|
|
||||||
|
const { revertAgentVersion } = require('./Agent');
|
||||||
|
const revertedAgent = await revertAgentVersion({ id: agentId }, 0);
|
||||||
|
|
||||||
|
expect(revertedAgent.author.toString()).toBe(originalAuthor.toString());
|
||||||
|
expect(revertedAgent.name).toBe('Original Agent');
|
||||||
|
expect(revertedAgent.description).toBe('Original description');
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -111,7 +111,7 @@ const getAgentHandler = async (req, res) => {
|
||||||
const originalUrl = agent.avatar.filepath;
|
const originalUrl = agent.avatar.filepath;
|
||||||
agent.avatar.filepath = await refreshS3Url(agent.avatar);
|
agent.avatar.filepath = await refreshS3Url(agent.avatar);
|
||||||
if (originalUrl !== agent.avatar.filepath) {
|
if (originalUrl !== agent.avatar.filepath) {
|
||||||
await updateAgent({ id }, { avatar: agent.avatar });
|
await updateAgent({ id }, { avatar: agent.avatar }, req.user.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,7 +169,9 @@ const updateAgentHandler = async (req, res) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
let updatedAgent =
|
let updatedAgent =
|
||||||
Object.keys(updateData).length > 0 ? await updateAgent({ id }, updateData) : existingAgent;
|
Object.keys(updateData).length > 0
|
||||||
|
? await updateAgent({ id }, updateData, req.user.id)
|
||||||
|
: existingAgent;
|
||||||
|
|
||||||
if (projectIds || removeProjectIds) {
|
if (projectIds || removeProjectIds) {
|
||||||
updatedAgent = await updateAgentProjects({
|
updatedAgent = await updateAgentProjects({
|
||||||
|
@ -405,7 +407,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
promises.push(await updateAgent({ id: agent_id, author: req.user.id }, data));
|
promises.push(await updateAgent({ id: agent_id, author: req.user.id }, data, req.user.id));
|
||||||
|
|
||||||
const resolved = await Promise.all(promises);
|
const resolved = await Promise.all(promises);
|
||||||
res.status(201).json(resolved[0]);
|
res.status(201).json(resolved[0]);
|
||||||
|
|
|
@ -107,7 +107,7 @@ router.post('/:agent_id', async (req, res) => {
|
||||||
.filter((tool) => !(tool && (tool.includes(domain) || tool.includes(action_id))))
|
.filter((tool) => !(tool && (tool.includes(domain) || tool.includes(action_id))))
|
||||||
.concat(functions.map((tool) => `${tool.function.name}${actionDelimiter}${domain}`));
|
.concat(functions.map((tool) => `${tool.function.name}${actionDelimiter}${domain}`));
|
||||||
|
|
||||||
const updatedAgent = await updateAgent(agentQuery, { tools, actions });
|
const updatedAgent = await updateAgent(agentQuery, { tools, actions }, req.user.id);
|
||||||
|
|
||||||
// Only update user field for new actions
|
// Only update user field for new actions
|
||||||
const actionUpdateData = { metadata, agent_id };
|
const actionUpdateData = { metadata, agent_id };
|
||||||
|
@ -172,7 +172,7 @@ router.delete('/:agent_id/:action_id', async (req, res) => {
|
||||||
|
|
||||||
const updatedTools = tools.filter((tool) => !(tool && tool.includes(domain)));
|
const updatedTools = tools.filter((tool) => !(tool && tool.includes(domain)));
|
||||||
|
|
||||||
await updateAgent(agentQuery, { tools: updatedTools, actions: updatedActions });
|
await updateAgent(agentQuery, { tools: updatedTools, actions: updatedActions }, req.user.id);
|
||||||
// If admin, can delete any action, otherwise only user's actions
|
// If admin, can delete any action, otherwise only user's actions
|
||||||
const actionQuery = admin ? { action_id } : { action_id, user: req.user.id };
|
const actionQuery = admin ? { action_id } : { action_id, user: req.user.id };
|
||||||
await deleteAction(actionQuery);
|
await deleteAction(actionQuery);
|
||||||
|
|
|
@ -84,14 +84,14 @@ describe('importChatGptConvo', () => {
|
||||||
const { parent } = jsonData[0].mapping[id];
|
const { parent } = jsonData[0].mapping[id];
|
||||||
|
|
||||||
const expectedParentId = parent
|
const expectedParentId = parent
|
||||||
? idToUUIDMap.get(parent) ?? Constants.NO_PARENT
|
? (idToUUIDMap.get(parent) ?? Constants.NO_PARENT)
|
||||||
: Constants.NO_PARENT;
|
: Constants.NO_PARENT;
|
||||||
|
|
||||||
const actualMessageId = idToUUIDMap.get(id);
|
const actualMessageId = idToUUIDMap.get(id);
|
||||||
const actualParentId = actualMessageId
|
const actualParentId = actualMessageId
|
||||||
? importBatchBuilder.saveMessage.mock.calls.find(
|
? importBatchBuilder.saveMessage.mock.calls.find(
|
||||||
(call) => call[0].messageId === actualMessageId,
|
(call) => call[0].messageId === actualMessageId,
|
||||||
)[0].parentMessageId
|
)[0].parentMessageId
|
||||||
: Constants.NO_PARENT;
|
: Constants.NO_PARENT;
|
||||||
|
|
||||||
expect(actualParentId).toBe(expectedParentId);
|
expect(actualParentId).toBe(expectedParentId);
|
||||||
|
@ -544,7 +544,7 @@ describe('processAssistantMessage', () => {
|
||||||
|
|
||||||
// Expected output should have all citations replaced with markdown links
|
// Expected output should have all citations replaced with markdown links
|
||||||
const expectedOutput =
|
const expectedOutput =
|
||||||
'Signal Sciences is a web application security company that was founded on March 10, 2014, by Andrew Peterson, Nick Galbreath, and Zane Lackey. It operates as a for-profit company with its legal name being Signal Sciences Corp. The company has achieved significant growth and is recognized as the fastest-growing web application security company in the world. Signal Sciences developed a next-gen web application firewall (NGWAF) and runtime application self-protection (RASP) technologies designed to increase security and maintain reliability without compromising the performance of modern web applications distributed across cloud, on-premise, edge, or hybrid environments ([Signal Sciences - Crunchbase Company Profile & Funding](https://www.crunchbase.com/organization/signal-sciences)) ([Demand More from Your WAF - Signal Sciences now part of Fastly](https://www.signalsciences.com/)).\n\nIn a major development, Fastly, Inc., a provider of an edge cloud platform, announced the completion of its acquisition of Signal Sciences on October 1, 2020. This acquisition was valued at approximately $775 million in cash and stock. By integrating Signal Sciences\' powerful web application and API security solutions with Fastly\'s edge cloud platform and existing security offerings, they aimed to form a unified suite of security solutions. The merger was aimed at expanding Fastly\'s security portfolio, particularly at a time when digital security has become paramount for businesses operating online ([Fastly Completes Acquisition of Signal Sciences | Fastly](https://www.fastly.com/press/press-releases/fastly-completes-acquisition-signal-sciences)) ([Fastly Agrees to Acquire Signal Sciences for $775 Million - Cooley](https://www.cooley.com/news/coverage/2020/2020-08-27-fastly-agrees-to-acquire-signal-sciences-for-775-million)).';
|
"Signal Sciences is a web application security company that was founded on March 10, 2014, by Andrew Peterson, Nick Galbreath, and Zane Lackey. It operates as a for-profit company with its legal name being Signal Sciences Corp. The company has achieved significant growth and is recognized as the fastest-growing web application security company in the world. Signal Sciences developed a next-gen web application firewall (NGWAF) and runtime application self-protection (RASP) technologies designed to increase security and maintain reliability without compromising the performance of modern web applications distributed across cloud, on-premise, edge, or hybrid environments ([Signal Sciences - Crunchbase Company Profile & Funding](https://www.crunchbase.com/organization/signal-sciences)) ([Demand More from Your WAF - Signal Sciences now part of Fastly](https://www.signalsciences.com/)).\n\nIn a major development, Fastly, Inc., a provider of an edge cloud platform, announced the completion of its acquisition of Signal Sciences on October 1, 2020. This acquisition was valued at approximately $775 million in cash and stock. By integrating Signal Sciences' powerful web application and API security solutions with Fastly's edge cloud platform and existing security offerings, they aimed to form a unified suite of security solutions. The merger was aimed at expanding Fastly's security portfolio, particularly at a time when digital security has become paramount for businesses operating online ([Fastly Completes Acquisition of Signal Sciences | Fastly](https://www.fastly.com/press/press-releases/fastly-completes-acquisition-signal-sciences)) ([Fastly Agrees to Acquire Signal Sciences for $775 Million - Cooley](https://www.cooley.com/news/coverage/2020/2020-08-27-fastly-agrees-to-acquire-signal-sciences-for-775-million)).";
|
||||||
|
|
||||||
const result = processAssistantMessage(assistantMessage, messageText);
|
const result = processAssistantMessage(assistantMessage, messageText);
|
||||||
expect(result).toBe(expectedOutput);
|
expect(result).toBe(expectedOutput);
|
||||||
|
@ -603,7 +603,7 @@ describe('processAssistantMessage', () => {
|
||||||
// In a ReDoS vulnerability, time would roughly double with each size increase
|
// In a ReDoS vulnerability, time would roughly double with each size increase
|
||||||
for (let i = 1; i < results.length; i++) {
|
for (let i = 1; i < results.length; i++) {
|
||||||
const ratio = results[i] / results[i - 1];
|
const ratio = results[i] / results[i - 1];
|
||||||
expect(ratio).toBeLessThan(2); // Processing time should not double
|
expect(ratio).toBeLessThan(3); // Allow for CI environment variability while still catching ReDoS
|
||||||
console.log(`Size ${sizes[i]} processing time ratio: ${ratio}`);
|
console.log(`Size ${sizes[i]} processing time ratio: ${ratio}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue