mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-06 18:48:50 +01:00
Merge branch 'dev' into added-codeql
This commit is contained in:
commit
74cdad76ba
158 changed files with 5793 additions and 965 deletions
|
|
@ -30,7 +30,7 @@ const DEFAULT_IMAGE_EDIT_DESCRIPTION =
|
|||
|
||||
When to use \`image_edit_oai\`:
|
||||
- The user wants to modify, extend, or remix one **or more** uploaded images, either:
|
||||
- Previously generated, or in the current request (both to be included in the \`image_ids\` array).
|
||||
- Previously generated, or in the current request (both to be included in the \`image_ids\` array).
|
||||
- Always when the user refers to uploaded images for editing, enhancement, remixing, style transfer, or combining elements.
|
||||
- Any current or existing images are to be used as visual guides.
|
||||
- If there are any files in the current request, they are more likely than not expected as references for image edit requests.
|
||||
|
|
|
|||
|
|
@ -21,7 +21,19 @@ const Agent = mongoose.model('agent', agentSchema);
|
|||
* @throws {Error} If the agent creation fails.
|
||||
*/
|
||||
const createAgent = async (agentData) => {
|
||||
return (await Agent.create(agentData)).toObject();
|
||||
const { versions, ...versionData } = agentData;
|
||||
const timestamp = new Date();
|
||||
const initialAgentData = {
|
||||
...agentData,
|
||||
versions: [
|
||||
{
|
||||
...versionData,
|
||||
createdAt: timestamp,
|
||||
updatedAt: timestamp,
|
||||
},
|
||||
],
|
||||
};
|
||||
return (await Agent.create(initialAgentData)).toObject();
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -103,6 +115,8 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
|||
return null;
|
||||
}
|
||||
|
||||
agent.version = agent.versions ? agent.versions.length : 0;
|
||||
|
||||
if (agent.author.toString() === req.user.id) {
|
||||
return agent;
|
||||
}
|
||||
|
|
@ -127,18 +141,146 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a version already exists in the versions array, excluding timestamp and author fields
|
||||
* @param {Object} updateData - The update data to compare
|
||||
* @param {Array} versions - The existing versions array
|
||||
* @returns {Object|null} - The matching version if found, null otherwise
|
||||
*/
|
||||
const isDuplicateVersion = (updateData, currentData, versions) => {
|
||||
if (!versions || versions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const excludeFields = [
|
||||
'_id',
|
||||
'id',
|
||||
'createdAt',
|
||||
'updatedAt',
|
||||
'author',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'__v',
|
||||
'agent_ids',
|
||||
'versions',
|
||||
];
|
||||
|
||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||
|
||||
if (Object.keys(directUpdates).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const wouldBeVersion = { ...currentData, ...directUpdates };
|
||||
const lastVersion = versions[versions.length - 1];
|
||||
|
||||
const allFields = new Set([...Object.keys(wouldBeVersion), ...Object.keys(lastVersion)]);
|
||||
|
||||
const importantFields = Array.from(allFields).filter((field) => !excludeFields.includes(field));
|
||||
|
||||
let isMatch = true;
|
||||
for (const field of importantFields) {
|
||||
if (!wouldBeVersion[field] && !lastVersion[field]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(wouldBeVersion[field]) && Array.isArray(lastVersion[field])) {
|
||||
if (wouldBeVersion[field].length !== lastVersion[field].length) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
|
||||
// Special handling for projectIds (MongoDB ObjectIds)
|
||||
if (field === 'projectIds') {
|
||||
const wouldBeIds = wouldBeVersion[field].map((id) => id.toString()).sort();
|
||||
const versionIds = lastVersion[field].map((id) => id.toString()).sort();
|
||||
|
||||
if (!wouldBeIds.every((id, i) => id === versionIds[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Handle arrays of objects like tool_kwargs
|
||||
else if (typeof wouldBeVersion[field][0] === 'object' && wouldBeVersion[field][0] !== null) {
|
||||
const sortedWouldBe = [...wouldBeVersion[field]].map((item) => JSON.stringify(item)).sort();
|
||||
const sortedVersion = [...lastVersion[field]].map((item) => JSON.stringify(item)).sort();
|
||||
|
||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
const sortedWouldBe = [...wouldBeVersion[field]].sort();
|
||||
const sortedVersion = [...lastVersion[field]].sort();
|
||||
|
||||
if (!sortedWouldBe.every((item, i) => item === sortedVersion[i])) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (field === 'model_parameters') {
|
||||
const wouldBeParams = wouldBeVersion[field] || {};
|
||||
const lastVersionParams = lastVersion[field] || {};
|
||||
if (JSON.stringify(wouldBeParams) !== JSON.stringify(lastVersionParams)) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
} else if (wouldBeVersion[field] !== lastVersion[field]) {
|
||||
isMatch = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return isMatch ? lastVersion : null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Update an agent with new data without overwriting existing
|
||||
* properties, or create a new agent if it doesn't exist.
|
||||
* When an agent is updated, a copy of the current state will be saved to the versions array.
|
||||
*
|
||||
* @param {Object} searchParameter - The search parameters to find the agent to update.
|
||||
* @param {string} searchParameter.id - The ID of the agent to update.
|
||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||
* @param {Object} updateData - An object containing the properties to update.
|
||||
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
|
||||
* @throws {Error} If the update would create a duplicate version
|
||||
*/
|
||||
const updateAgent = async (searchParameter, updateData) => {
|
||||
const options = { new: true, upsert: false };
|
||||
|
||||
const currentAgent = await Agent.findOne(searchParameter);
|
||||
if (currentAgent) {
|
||||
const { __v, _id, id, versions, ...versionData } = currentAgent.toObject();
|
||||
const { $push, $pull, $addToSet, ...directUpdates } = updateData;
|
||||
|
||||
if (Object.keys(directUpdates).length > 0 && versions && versions.length > 0) {
|
||||
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions);
|
||||
if (duplicateVersion) {
|
||||
const error = new Error(
|
||||
'Duplicate version: This would create a version identical to an existing one',
|
||||
);
|
||||
error.statusCode = 409;
|
||||
error.details = {
|
||||
duplicateVersion,
|
||||
versionIndex: versions.findIndex(
|
||||
(v) => JSON.stringify(duplicateVersion) === JSON.stringify(v),
|
||||
),
|
||||
};
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
updateData.$push = {
|
||||
...($push || {}),
|
||||
versions: {
|
||||
...versionData,
|
||||
...directUpdates,
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, options).lean();
|
||||
};
|
||||
|
||||
|
|
@ -358,6 +500,38 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
|
|||
return await getAgent({ id: agentId });
|
||||
};
|
||||
|
||||
/**
|
||||
* Reverts an agent to a specific version in its version history.
|
||||
* @param {Object} searchParameter - The search parameters to find the agent to revert.
|
||||
* @param {string} searchParameter.id - The ID of the agent to revert.
|
||||
* @param {string} [searchParameter.author] - The user ID of the agent's author.
|
||||
* @param {number} versionIndex - The index of the version to revert to in the versions array.
|
||||
* @returns {Promise<MongoAgent>} The updated agent document after reverting.
|
||||
* @throws {Error} If the agent is not found or the specified version does not exist.
|
||||
*/
|
||||
const revertAgentVersion = async (searchParameter, versionIndex) => {
|
||||
const agent = await Agent.findOne(searchParameter);
|
||||
if (!agent) {
|
||||
throw new Error('Agent not found');
|
||||
}
|
||||
|
||||
if (!agent.versions || !agent.versions[versionIndex]) {
|
||||
throw new Error(`Version ${versionIndex} not found`);
|
||||
}
|
||||
|
||||
const revertToVersion = agent.versions[versionIndex];
|
||||
|
||||
const updateData = {
|
||||
...revertToVersion,
|
||||
};
|
||||
|
||||
delete updateData._id;
|
||||
delete updateData.id;
|
||||
delete updateData.versions;
|
||||
|
||||
return Agent.findOneAndUpdate(searchParameter, updateData, { new: true }).lean();
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Agent,
|
||||
getAgent,
|
||||
|
|
@ -369,4 +543,5 @@ module.exports = {
|
|||
updateAgentProjects,
|
||||
addAgentResourceFile,
|
||||
removeAgentResourceFiles,
|
||||
revertAgentVersion,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,7 +1,25 @@
|
|||
const originalEnv = {
|
||||
CREDS_KEY: process.env.CREDS_KEY,
|
||||
CREDS_IV: process.env.CREDS_IV,
|
||||
};
|
||||
|
||||
process.env.CREDS_KEY = '0123456789abcdef0123456789abcdef';
|
||||
process.env.CREDS_IV = '0123456789abcdef';
|
||||
|
||||
const mongoose = require('mongoose');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const { Agent, addAgentResourceFile, removeAgentResourceFiles } = require('./Agent');
|
||||
const {
|
||||
Agent,
|
||||
addAgentResourceFile,
|
||||
removeAgentResourceFiles,
|
||||
createAgent,
|
||||
updateAgent,
|
||||
getAgent,
|
||||
deleteAgent,
|
||||
getListAgents,
|
||||
updateAgentProjects,
|
||||
} = require('./Agent');
|
||||
|
||||
describe('Agent Resource File Operations', () => {
|
||||
let mongoServer;
|
||||
|
|
@ -15,6 +33,8 @@ describe('Agent Resource File Operations', () => {
|
|||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
process.env.CREDS_KEY = originalEnv.CREDS_KEY;
|
||||
process.env.CREDS_IV = originalEnv.CREDS_IV;
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
|
|
@ -332,3 +352,537 @@ describe('Agent Resource File Operations', () => {
|
|||
expect(finalFileIds).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Agent CRUD Operations', () => {
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
});
|
||||
|
||||
test('should create and get an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
const newAgent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
description: 'Test description',
|
||||
});
|
||||
|
||||
expect(newAgent).toBeDefined();
|
||||
expect(newAgent.id).toBe(agentId);
|
||||
expect(newAgent.name).toBe('Test Agent');
|
||||
|
||||
const retrievedAgent = await getAgent({ id: agentId });
|
||||
expect(retrievedAgent).toBeDefined();
|
||||
expect(retrievedAgent.id).toBe(agentId);
|
||||
expect(retrievedAgent.name).toBe('Test Agent');
|
||||
expect(retrievedAgent.description).toBe('Test description');
|
||||
});
|
||||
|
||||
test('should delete an agent', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Agent To Delete',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
});
|
||||
|
||||
const agentBeforeDelete = await getAgent({ id: agentId });
|
||||
expect(agentBeforeDelete).toBeDefined();
|
||||
|
||||
await deleteAgent({ id: agentId });
|
||||
|
||||
const agentAfterDelete = await getAgent({ id: agentId });
|
||||
expect(agentAfterDelete).toBeNull();
|
||||
});
|
||||
|
||||
test('should list agents by author', async () => {
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const otherAuthorId = new mongoose.Types.ObjectId();
|
||||
|
||||
const agentIds = [];
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const id = `agent_${uuidv4()}`;
|
||||
agentIds.push(id);
|
||||
await createAgent({
|
||||
id,
|
||||
name: `Agent ${i}`,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
await createAgent({
|
||||
id: `other_agent_${uuidv4()}`,
|
||||
name: `Other Agent ${i}`,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: otherAuthorId,
|
||||
});
|
||||
}
|
||||
|
||||
const result = await getListAgents({ author: authorId.toString() });
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.data).toBeDefined();
|
||||
expect(result.data).toHaveLength(5);
|
||||
expect(result.has_more).toBe(true);
|
||||
|
||||
for (const agent of result.data) {
|
||||
expect(agent.author).toBe(authorId.toString());
|
||||
}
|
||||
});
|
||||
|
||||
test('should update agent projects', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId1 = new mongoose.Types.ObjectId();
|
||||
const projectId2 = new mongoose.Types.ObjectId();
|
||||
const projectId3 = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Project Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
projectIds: [projectId1],
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{ $addToSet: { projectIds: { $each: [projectId2, projectId3] } } },
|
||||
);
|
||||
|
||||
await updateAgent({ id: agentId }, { $pull: { projectIds: projectId1 } });
|
||||
|
||||
await updateAgent({ id: agentId }, { projectIds: [projectId2, projectId3] });
|
||||
|
||||
const updatedAgent = await getAgent({ id: agentId });
|
||||
expect(updatedAgent.projectIds).toHaveLength(2);
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId2.toString());
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).toContain(projectId3.toString());
|
||||
expect(updatedAgent.projectIds.map((id) => id.toString())).not.toContain(projectId1.toString());
|
||||
|
||||
await updateAgent({ id: agentId }, { projectIds: [] });
|
||||
|
||||
const emptyProjectsAgent = await getAgent({ id: agentId });
|
||||
expect(emptyProjectsAgent.projectIds).toHaveLength(0);
|
||||
|
||||
const nonExistentId = `agent_${uuidv4()}`;
|
||||
await expect(
|
||||
updateAgentProjects({
|
||||
id: nonExistentId,
|
||||
projectIds: [projectId1],
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
test('should handle ephemeral agent loading', async () => {
|
||||
const agentId = 'ephemeral_test';
|
||||
const endpoint = 'openai';
|
||||
|
||||
const originalModule = jest.requireActual('librechat-data-provider');
|
||||
|
||||
const mockDataProvider = {
|
||||
...originalModule,
|
||||
Constants: {
|
||||
...originalModule.Constants,
|
||||
EPHEMERAL_AGENT_ID: 'ephemeral_test',
|
||||
},
|
||||
};
|
||||
|
||||
jest.doMock('librechat-data-provider', () => mockDataProvider);
|
||||
|
||||
const mockReq = {
|
||||
user: { id: 'user123' },
|
||||
body: {
|
||||
promptPrefix: 'This is a test instruction',
|
||||
ephemeralAgent: {
|
||||
execute_code: true,
|
||||
mcp: ['server1', 'server2'],
|
||||
},
|
||||
},
|
||||
app: {
|
||||
locals: {
|
||||
availableTools: {
|
||||
tool__server1: {},
|
||||
tool__server2: {},
|
||||
another_tool: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const params = {
|
||||
req: mockReq,
|
||||
agent_id: agentId,
|
||||
endpoint,
|
||||
model_parameters: {
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
},
|
||||
};
|
||||
|
||||
expect(agentId).toBeDefined();
|
||||
expect(endpoint).toBeDefined();
|
||||
|
||||
jest.dontMock('librechat-data-provider');
|
||||
});
|
||||
|
||||
test('should handle loadAgent functionality and errors', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Load Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
tools: ['tool1', 'tool2'],
|
||||
});
|
||||
|
||||
const agent = await getAgent({ id: agentId });
|
||||
|
||||
expect(agent).toBeDefined();
|
||||
expect(agent.id).toBe(agentId);
|
||||
expect(agent.name).toBe('Test Load Agent');
|
||||
expect(agent.tools).toEqual(expect.arrayContaining(['tool1', 'tool2']));
|
||||
|
||||
const mockLoadAgent = jest.fn().mockResolvedValue(agent);
|
||||
const loadedAgent = await mockLoadAgent();
|
||||
expect(loadedAgent).toBeDefined();
|
||||
expect(loadedAgent.id).toBe(agentId);
|
||||
|
||||
const nonExistentId = `agent_${uuidv4()}`;
|
||||
const nonExistentAgent = await getAgent({ id: nonExistentId });
|
||||
expect(nonExistentAgent).toBeNull();
|
||||
|
||||
const mockLoadAgentError = jest.fn().mockRejectedValue(new Error('No agent found with ID'));
|
||||
await expect(mockLoadAgentError()).rejects.toThrow('No agent found with ID');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Agent Version History', () => {
|
||||
let mongoServer;
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
const mongoUri = mongoServer.getUri();
|
||||
await mongoose.connect(mongoUri);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoose.disconnect();
|
||||
await mongoServer.stop();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await Agent.deleteMany({});
|
||||
});
|
||||
|
||||
test('should create an agent with a single entry in versions array', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const agent = await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
expect(agent.versions).toBeDefined();
|
||||
expect(Array.isArray(agent.versions)).toBe(true);
|
||||
expect(agent.versions).toHaveLength(1);
|
||||
expect(agent.versions[0].name).toBe('Test Agent');
|
||||
expect(agent.versions[0].provider).toBe('test');
|
||||
expect(agent.versions[0].model).toBe('test-model');
|
||||
});
|
||||
|
||||
test('should accumulate version history across multiple updates', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const author = new mongoose.Types.ObjectId();
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'First Name',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author,
|
||||
description: 'First description',
|
||||
});
|
||||
|
||||
await updateAgent({ id: agentId }, { name: 'Second Name', description: 'Second description' });
|
||||
await updateAgent({ id: agentId }, { name: 'Third Name', model: 'new-model' });
|
||||
const finalAgent = await updateAgent({ id: agentId }, { description: 'Final description' });
|
||||
|
||||
expect(finalAgent.versions).toBeDefined();
|
||||
expect(Array.isArray(finalAgent.versions)).toBe(true);
|
||||
expect(finalAgent.versions).toHaveLength(4);
|
||||
|
||||
expect(finalAgent.versions[0].name).toBe('First Name');
|
||||
expect(finalAgent.versions[0].description).toBe('First description');
|
||||
expect(finalAgent.versions[0].model).toBe('test-model');
|
||||
|
||||
expect(finalAgent.versions[1].name).toBe('Second Name');
|
||||
expect(finalAgent.versions[1].description).toBe('Second description');
|
||||
expect(finalAgent.versions[1].model).toBe('test-model');
|
||||
|
||||
expect(finalAgent.versions[2].name).toBe('Third Name');
|
||||
expect(finalAgent.versions[2].description).toBe('Second description');
|
||||
expect(finalAgent.versions[2].model).toBe('new-model');
|
||||
|
||||
expect(finalAgent.versions[3].name).toBe('Third Name');
|
||||
expect(finalAgent.versions[3].description).toBe('Final description');
|
||||
expect(finalAgent.versions[3].model).toBe('new-model');
|
||||
|
||||
expect(finalAgent.name).toBe('Third Name');
|
||||
expect(finalAgent.description).toBe('Final description');
|
||||
expect(finalAgent.model).toBe('new-model');
|
||||
});
|
||||
|
||||
test('should not include metadata fields in version history', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent({ id: agentId }, { description: 'New description' });
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.versions[0]._id).toBeUndefined();
|
||||
expect(updatedAgent.versions[0].__v).toBeUndefined();
|
||||
expect(updatedAgent.versions[0].name).toBe('Test Agent');
|
||||
expect(updatedAgent.versions[0].author).toBeDefined();
|
||||
|
||||
expect(updatedAgent.versions[1]._id).toBeUndefined();
|
||||
expect(updatedAgent.versions[1].__v).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should not recursively include previous versions', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Test Agent',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: new mongoose.Types.ObjectId(),
|
||||
});
|
||||
|
||||
await updateAgent({ id: agentId }, { name: 'Updated Name 1' });
|
||||
await updateAgent({ id: agentId }, { name: 'Updated Name 2' });
|
||||
const finalAgent = await updateAgent({ id: agentId }, { name: 'Updated Name 3' });
|
||||
|
||||
expect(finalAgent.versions).toHaveLength(4);
|
||||
|
||||
finalAgent.versions.forEach((version) => {
|
||||
expect(version.versions).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle MongoDB operators and field updates correctly', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'MongoDB Operator Test',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
tools: ['tool1'],
|
||||
});
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
description: 'Updated description',
|
||||
$push: { tools: 'tool2' },
|
||||
$addToSet: { projectIds: projectId },
|
||||
},
|
||||
);
|
||||
|
||||
const firstUpdate = await getAgent({ id: agentId });
|
||||
expect(firstUpdate.description).toBe('Updated description');
|
||||
expect(firstUpdate.tools).toContain('tool1');
|
||||
expect(firstUpdate.tools).toContain('tool2');
|
||||
expect(firstUpdate.projectIds.map((id) => id.toString())).toContain(projectId.toString());
|
||||
expect(firstUpdate.versions).toHaveLength(2);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
tools: ['tool2', 'tool3'],
|
||||
},
|
||||
);
|
||||
|
||||
const secondUpdate = await getAgent({ id: agentId });
|
||||
expect(secondUpdate.tools).toHaveLength(2);
|
||||
expect(secondUpdate.tools).toContain('tool2');
|
||||
expect(secondUpdate.tools).toContain('tool3');
|
||||
expect(secondUpdate.tools).not.toContain('tool1');
|
||||
expect(secondUpdate.versions).toHaveLength(3);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
$push: { tools: 'tool3' },
|
||||
},
|
||||
);
|
||||
|
||||
const thirdUpdate = await getAgent({ id: agentId });
|
||||
const toolCount = thirdUpdate.tools.filter((t) => t === 'tool3').length;
|
||||
expect(toolCount).toBe(2);
|
||||
expect(thirdUpdate.versions).toHaveLength(4);
|
||||
});
|
||||
|
||||
test('should handle parameter objects correctly', async () => {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
|
||||
await createAgent({
|
||||
id: agentId,
|
||||
name: 'Parameters Test',
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
model_parameters: { temperature: 0.7 },
|
||||
});
|
||||
|
||||
const updatedAgent = await updateAgent(
|
||||
{ id: agentId },
|
||||
{ model_parameters: { temperature: 0.8 } },
|
||||
);
|
||||
|
||||
expect(updatedAgent.versions).toHaveLength(2);
|
||||
expect(updatedAgent.model_parameters.temperature).toBe(0.8);
|
||||
|
||||
await updateAgent(
|
||||
{ id: agentId },
|
||||
{
|
||||
model_parameters: {
|
||||
temperature: 0.8,
|
||||
max_tokens: 1000,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const complexAgent = await getAgent({ id: agentId });
|
||||
expect(complexAgent.versions).toHaveLength(3);
|
||||
expect(complexAgent.model_parameters.temperature).toBe(0.8);
|
||||
expect(complexAgent.model_parameters.max_tokens).toBe(1000);
|
||||
|
||||
await updateAgent({ id: agentId }, { model_parameters: {} });
|
||||
|
||||
const emptyParamsAgent = await getAgent({ id: agentId });
|
||||
expect(emptyParamsAgent.versions).toHaveLength(4);
|
||||
expect(emptyParamsAgent.model_parameters).toEqual({});
|
||||
});
|
||||
|
||||
test('should detect duplicate versions and reject updates', async () => {
|
||||
const originalConsoleError = console.error;
|
||||
console.error = jest.fn();
|
||||
|
||||
try {
|
||||
const agentId = `agent_${uuidv4()}`;
|
||||
const authorId = new mongoose.Types.ObjectId();
|
||||
const projectId1 = new mongoose.Types.ObjectId();
|
||||
const projectId2 = new mongoose.Types.ObjectId();
|
||||
|
||||
const testCases = [
|
||||
{
|
||||
name: 'simple field update',
|
||||
initial: {
|
||||
name: 'Test Agent',
|
||||
description: 'Initial description',
|
||||
},
|
||||
update: { name: 'Updated Name' },
|
||||
duplicate: { name: 'Updated Name' },
|
||||
},
|
||||
{
|
||||
name: 'object field update',
|
||||
initial: {
|
||||
model_parameters: { temperature: 0.7 },
|
||||
},
|
||||
update: { model_parameters: { temperature: 0.8 } },
|
||||
duplicate: { model_parameters: { temperature: 0.8 } },
|
||||
},
|
||||
{
|
||||
name: 'array field update',
|
||||
initial: {
|
||||
tools: ['tool1', 'tool2'],
|
||||
},
|
||||
update: { tools: ['tool2', 'tool3'] },
|
||||
duplicate: { tools: ['tool2', 'tool3'] },
|
||||
},
|
||||
{
|
||||
name: 'projectIds update',
|
||||
initial: {
|
||||
projectIds: [projectId1],
|
||||
},
|
||||
update: { projectIds: [projectId1, projectId2] },
|
||||
duplicate: { projectIds: [projectId2, projectId1] },
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const testAgentId = `agent_${uuidv4()}`;
|
||||
|
||||
await createAgent({
|
||||
id: testAgentId,
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
author: authorId,
|
||||
...testCase.initial,
|
||||
});
|
||||
|
||||
await updateAgent({ id: testAgentId }, testCase.update);
|
||||
|
||||
let error;
|
||||
try {
|
||||
await updateAgent({ id: testAgentId }, testCase.duplicate);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.message).toContain('Duplicate version');
|
||||
expect(error.statusCode).toBe(409);
|
||||
expect(error.details).toBeDefined();
|
||||
expect(error.details.duplicateVersion).toBeDefined();
|
||||
|
||||
const agent = await getAgent({ id: testAgentId });
|
||||
expect(agent.versions).toHaveLength(2);
|
||||
}
|
||||
} finally {
|
||||
console.error = originalConsoleError;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@
|
|||
"mime": "^3.0.0",
|
||||
"module-alias": "^2.2.3",
|
||||
"mongoose": "^8.12.1",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"multer": "^2.0.0",
|
||||
"nanoid": "^3.3.7",
|
||||
"nodemailer": "^6.9.15",
|
||||
"ollama": "^0.5.0",
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ const { updateAction, getActions } = require('~/models/Action');
|
|||
const { updateAgentProjects } = require('~/models/Agent');
|
||||
const { getProjectByName } = require('~/models/Project');
|
||||
const { deleteFileByFilter } = require('~/models/File');
|
||||
const { revertAgentVersion } = require('~/models/Agent');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const systemTools = {
|
||||
|
|
@ -104,6 +105,8 @@ const getAgentHandler = async (req, res) => {
|
|||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
agent.version = agent.versions ? agent.versions.length : 0;
|
||||
|
||||
if (agent.avatar && agent.avatar?.source === FileSources.s3) {
|
||||
const originalUrl = agent.avatar.filepath;
|
||||
agent.avatar.filepath = await refreshS3Url(agent.avatar);
|
||||
|
|
@ -127,6 +130,7 @@ const getAgentHandler = async (req, res) => {
|
|||
author: agent.author,
|
||||
projectIds: agent.projectIds,
|
||||
isCollaborative: agent.isCollaborative,
|
||||
version: agent.version,
|
||||
});
|
||||
}
|
||||
return res.status(200).json(agent);
|
||||
|
|
@ -187,6 +191,14 @@ const updateAgentHandler = async (req, res) => {
|
|||
return res.json(updatedAgent);
|
||||
} catch (error) {
|
||||
logger.error('[/Agents/:id] Error updating Agent', error);
|
||||
|
||||
if (error.statusCode === 409) {
|
||||
return res.status(409).json({
|
||||
error: error.message,
|
||||
details: error.details,
|
||||
});
|
||||
}
|
||||
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
|
@ -411,6 +423,66 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Reverts an agent to a previous version from its version history.
|
||||
* @route PATCH /agents/:id/revert
|
||||
* @param {object} req - Express Request object
|
||||
* @param {object} req.params - Request parameters
|
||||
* @param {string} req.params.id - The ID of the agent to revert
|
||||
* @param {object} req.body - Request body
|
||||
* @param {number} req.body.version_index - The index of the version to revert to
|
||||
* @param {object} req.user - Authenticated user information
|
||||
* @param {string} req.user.id - User ID
|
||||
* @param {string} req.user.role - User role
|
||||
* @param {ServerResponse} res - Express Response object
|
||||
* @returns {Promise<Agent>} 200 - The updated agent after reverting to the specified version
|
||||
* @throws {Error} 400 - If version_index is missing
|
||||
* @throws {Error} 403 - If user doesn't have permission to modify the agent
|
||||
* @throws {Error} 404 - If agent not found
|
||||
* @throws {Error} 500 - If there's an internal server error during the reversion process
|
||||
*/
|
||||
const revertAgentVersionHandler = async (req, res) => {
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const { version_index } = req.body;
|
||||
|
||||
if (version_index === undefined) {
|
||||
return res.status(400).json({ error: 'version_index is required' });
|
||||
}
|
||||
|
||||
const isAdmin = req.user.role === SystemRoles.ADMIN;
|
||||
const existingAgent = await getAgent({ id });
|
||||
|
||||
if (!existingAgent) {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id;
|
||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
||||
|
||||
if (!hasEditPermission) {
|
||||
return res.status(403).json({
|
||||
error: 'You do not have permission to modify this non-collaborative agent',
|
||||
});
|
||||
}
|
||||
|
||||
const updatedAgent = await revertAgentVersion({ id }, version_index);
|
||||
|
||||
if (updatedAgent.author) {
|
||||
updatedAgent.author = updatedAgent.author.toString();
|
||||
}
|
||||
|
||||
if (updatedAgent.author !== req.user.id) {
|
||||
delete updatedAgent.author;
|
||||
}
|
||||
|
||||
return res.json(updatedAgent);
|
||||
} catch (error) {
|
||||
logger.error('[/agents/:id/revert] Error reverting Agent version', error);
|
||||
res.status(500).json({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
createAgent: createAgentHandler,
|
||||
getAgent: getAgentHandler,
|
||||
|
|
@ -419,4 +491,5 @@ module.exports = {
|
|||
deleteAgent: deleteAgentHandler,
|
||||
getListAgents: getListAgentsHandler,
|
||||
uploadAgentAvatar: uploadAgentAvatarHandler,
|
||||
revertAgentVersion: revertAgentVersionHandler,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -326,8 +326,15 @@ const chatV1 = async (req, res) => {
|
|||
|
||||
file_ids = files.map(({ file_id }) => file_id);
|
||||
if (file_ids.length || thread_file_ids.length) {
|
||||
userMessage.file_ids = file_ids;
|
||||
attachedFileIds = new Set([...file_ids, ...thread_file_ids]);
|
||||
if (endpoint === EModelEndpoint.azureAssistants) {
|
||||
userMessage.attachments = Array.from(attachedFileIds).map((file_id) => ({
|
||||
file_id,
|
||||
tools: [{ type: 'file_search' }],
|
||||
}));
|
||||
} else {
|
||||
userMessage.file_ids = Array.from(attachedFileIds);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -24,10 +24,13 @@ const routes = require('./routes');
|
|||
|
||||
const { PORT, HOST, ALLOW_SOCIAL_LOGIN, DISABLE_COMPRESSION, TRUST_PROXY } = process.env ?? {};
|
||||
|
||||
const port = Number(PORT) || 3080;
|
||||
// Allow PORT=0 to be used for automatic free port assignment
|
||||
const port = isNaN(Number(PORT)) ? 3080 : Number(PORT);
|
||||
const host = HOST || 'localhost';
|
||||
const trusted_proxy = Number(TRUST_PROXY) || 1; /* trust first proxy by default */
|
||||
|
||||
const app = express();
|
||||
|
||||
const startServer = async () => {
|
||||
if (typeof Bun !== 'undefined') {
|
||||
axios.defaults.headers.common['Accept-Encoding'] = 'gzip';
|
||||
|
|
@ -36,8 +39,9 @@ const startServer = async () => {
|
|||
logger.info('Connected to MongoDB');
|
||||
await indexSync();
|
||||
|
||||
const app = express();
|
||||
app.disable('x-powered-by');
|
||||
app.set('trust proxy', trusted_proxy);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
const indexPath = path.join(app.locals.paths.dist, 'index.html');
|
||||
|
|
@ -49,23 +53,24 @@ const startServer = async () => {
|
|||
app.use(noIndex);
|
||||
app.use(errorController);
|
||||
app.use(express.json({ limit: '3mb' }));
|
||||
app.use(mongoSanitize());
|
||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||
app.use(staticCache(app.locals.paths.dist));
|
||||
app.use(staticCache(app.locals.paths.fonts));
|
||||
app.use(staticCache(app.locals.paths.assets));
|
||||
app.set('trust proxy', trusted_proxy);
|
||||
app.use(mongoSanitize());
|
||||
app.use(cors());
|
||||
app.use(cookieParser());
|
||||
|
||||
if (!isEnabled(DISABLE_COMPRESSION)) {
|
||||
app.use(compression());
|
||||
} else {
|
||||
console.warn('Response compression has been disabled via DISABLE_COMPRESSION.');
|
||||
}
|
||||
|
||||
// Serve static assets with aggressive caching
|
||||
app.use(staticCache(app.locals.paths.dist));
|
||||
app.use(staticCache(app.locals.paths.fonts));
|
||||
app.use(staticCache(app.locals.paths.assets));
|
||||
|
||||
if (!ALLOW_SOCIAL_LOGIN) {
|
||||
console.warn(
|
||||
'Social logins are disabled. Set Environment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.',
|
||||
);
|
||||
console.warn('Social logins are disabled. Set ALLOW_SOCIAL_LOGIN=true to enable them.');
|
||||
}
|
||||
|
||||
/* OAUTH */
|
||||
|
|
@ -128,7 +133,7 @@ const startServer = async () => {
|
|||
});
|
||||
|
||||
app.listen(port, host, () => {
|
||||
if (host == '0.0.0.0') {
|
||||
if (host === '0.0.0.0') {
|
||||
logger.info(
|
||||
`Server listening on all interfaces at port ${port}. Use http://localhost:${port} to access it`,
|
||||
);
|
||||
|
|
@ -176,3 +181,6 @@ process.on('uncaughtException', (err) => {
|
|||
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// export app for easier testing purposes
|
||||
module.exports = app;
|
||||
|
|
|
|||
78
api/server/index.spec.js
Normal file
78
api/server/index.spec.js
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const request = require('supertest');
|
||||
const { MongoMemoryServer } = require('mongodb-memory-server');
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
describe('Server Configuration', () => {
|
||||
// Increase the default timeout to allow for Mongo cleanup
|
||||
jest.setTimeout(30_000);
|
||||
|
||||
let mongoServer;
|
||||
let app;
|
||||
|
||||
/** Mocked fs.readFileSync for index.html */
|
||||
const originalReadFileSync = fs.readFileSync;
|
||||
beforeAll(() => {
|
||||
fs.readFileSync = function (filepath, options) {
|
||||
if (filepath.includes('index.html')) {
|
||||
return '<!DOCTYPE html><html><head><title>LibreChat</title></head><body><div id="root"></div></body></html>';
|
||||
}
|
||||
return originalReadFileSync(filepath, options);
|
||||
};
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Restore original fs.readFileSync
|
||||
fs.readFileSync = originalReadFileSync;
|
||||
});
|
||||
|
||||
beforeAll(async () => {
|
||||
mongoServer = await MongoMemoryServer.create();
|
||||
process.env.MONGO_URI = mongoServer.getUri();
|
||||
process.env.PORT = '0'; // Use a random available port
|
||||
app = require('~/server');
|
||||
|
||||
// Wait for the app to be healthy
|
||||
await healthCheckPoll(app);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await mongoServer.stop();
|
||||
await mongoose.disconnect();
|
||||
});
|
||||
|
||||
it('should return OK for /health', async () => {
|
||||
const response = await request(app).get('/health');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.text).toBe('OK');
|
||||
});
|
||||
|
||||
it('should not cache index page', async () => {
|
||||
const response = await request(app).get('/');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers['cache-control']).toBe('no-cache, no-store, must-revalidate');
|
||||
expect(response.headers['pragma']).toBe('no-cache');
|
||||
expect(response.headers['expires']).toBe('0');
|
||||
});
|
||||
});
|
||||
|
||||
// Polls the /health endpoint every 30ms for up to 10 seconds to wait for the server to start completely
|
||||
async function healthCheckPoll(app, retries = 0) {
|
||||
const maxRetries = Math.floor(10000 / 30); // 10 seconds / 30ms
|
||||
try {
|
||||
const response = await request(app).get('/health');
|
||||
if (response.status === 200) {
|
||||
return; // App is healthy
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore connection errors during polling
|
||||
}
|
||||
|
||||
if (retries < maxRetries) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 30));
|
||||
await healthCheckPoll(app, retries + 1);
|
||||
} else {
|
||||
throw new Error('App did not become healthy within 10 seconds.');
|
||||
}
|
||||
}
|
||||
|
|
@ -78,6 +78,15 @@ router.post('/:id/duplicate', checkAgentCreate, v1.duplicateAgent);
|
|||
*/
|
||||
router.delete('/:id', checkAgentCreate, v1.deleteAgent);
|
||||
|
||||
/**
|
||||
* Reverts an agent to a previous version.
|
||||
* @route POST /agents/:id/revert
|
||||
* @param {string} req.params.id - Agent identifier.
|
||||
* @param {number} req.body.version_index - Index of the version to revert to.
|
||||
* @returns {Agent} 200 - success response - application/json
|
||||
*/
|
||||
router.post('/:id/revert', checkGlobalAgentShare, v1.revertAgentVersion);
|
||||
|
||||
/**
|
||||
* Returns a list of agents.
|
||||
* @route GET /agents
|
||||
|
|
|
|||
|
|
@ -121,6 +121,14 @@ router.delete('/', async (req, res) => {
|
|||
await processDeleteRequest({ req, files: assistantFiles });
|
||||
res.status(200).json({ message: 'File associations removed successfully from assistant' });
|
||||
return;
|
||||
} else if (
|
||||
req.body.assistant_id &&
|
||||
req.body.files?.[0]?.filepath === EModelEndpoint.azureAssistants
|
||||
) {
|
||||
await processDeleteRequest({ req, files: req.body.files });
|
||||
return res
|
||||
.status(200)
|
||||
.json({ message: 'File associations removed successfully from Azure Assistant' });
|
||||
}
|
||||
|
||||
await processDeleteRequest({ req, files: dbFiles });
|
||||
|
|
|
|||
|
|
@ -10,17 +10,7 @@ const getLogStores = require('~/cache/getLogStores');
|
|||
* */
|
||||
async function getCustomConfig() {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
let customConfig = await cache.get(CacheKeys.CUSTOM_CONFIG);
|
||||
|
||||
if (!customConfig) {
|
||||
customConfig = await loadCustomConfig();
|
||||
}
|
||||
|
||||
if (!customConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return customConfig;
|
||||
return (await cache.get(CacheKeys.CUSTOM_CONFIG)) || (await loadCustomConfig());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -29,7 +29,14 @@ async function loadConfigEndpoints(req) {
|
|||
|
||||
for (let i = 0; i < customEndpoints.length; i++) {
|
||||
const endpoint = customEndpoints[i];
|
||||
const { baseURL, apiKey, name: configName, iconURL, modelDisplayLabel } = endpoint;
|
||||
const {
|
||||
baseURL,
|
||||
apiKey,
|
||||
name: configName,
|
||||
iconURL,
|
||||
modelDisplayLabel,
|
||||
customParams,
|
||||
} = endpoint;
|
||||
const name = normalizeEndpointName(configName);
|
||||
|
||||
const resolvedApiKey = extractEnvVariable(apiKey);
|
||||
|
|
@ -41,6 +48,7 @@ async function loadConfigEndpoints(req) {
|
|||
userProvideURL: isUserProvided(resolvedBaseURL),
|
||||
modelDisplayLabel,
|
||||
iconURL,
|
||||
customParams,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,18 @@
|
|||
const path = require('path');
|
||||
const { CacheKeys, configSchema, EImageOutputType } = require('librechat-data-provider');
|
||||
const {
|
||||
CacheKeys,
|
||||
configSchema,
|
||||
EImageOutputType,
|
||||
validateSettingDefinitions,
|
||||
agentParamSettings,
|
||||
paramSettings,
|
||||
} = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const loadYaml = require('~/utils/loadYaml');
|
||||
const { logger } = require('~/config');
|
||||
const axios = require('axios');
|
||||
const yaml = require('js-yaml');
|
||||
const keyBy = require('lodash/keyBy');
|
||||
|
||||
const projectRoot = path.resolve(__dirname, '..', '..', '..', '..');
|
||||
const defaultConfigPath = path.resolve(projectRoot, 'librechat.yaml');
|
||||
|
|
@ -105,6 +113,10 @@ https://www.librechat.ai/docs/configuration/stt_tts`);
|
|||
logger.debug('Custom config:', customConfig);
|
||||
}
|
||||
|
||||
(customConfig.endpoints?.custom ?? [])
|
||||
.filter((endpoint) => endpoint.customParams)
|
||||
.forEach((endpoint) => parseCustomParams(endpoint.name, endpoint.customParams));
|
||||
|
||||
if (customConfig.cache) {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
await cache.set(CacheKeys.CUSTOM_CONFIG, customConfig);
|
||||
|
|
@ -117,4 +129,52 @@ https://www.librechat.ai/docs/configuration/stt_tts`);
|
|||
return customConfig;
|
||||
}
|
||||
|
||||
// Validate and fill out missing values for custom parameters
|
||||
function parseCustomParams(endpointName, customParams) {
|
||||
const paramEndpoint = customParams.defaultParamsEndpoint;
|
||||
customParams.paramDefinitions = customParams.paramDefinitions || [];
|
||||
|
||||
// Checks if `defaultParamsEndpoint` is a key in `paramSettings`.
|
||||
const validEndpoints = new Set([
|
||||
...Object.keys(paramSettings),
|
||||
...Object.keys(agentParamSettings),
|
||||
]);
|
||||
if (!validEndpoints.has(paramEndpoint)) {
|
||||
throw new Error(
|
||||
`defaultParamsEndpoint of "${endpointName}" endpoint is invalid. ` +
|
||||
`Valid options are ${Array.from(validEndpoints).join(', ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
// creates default param maps
|
||||
const regularParams = paramSettings[paramEndpoint] ?? [];
|
||||
const agentParams = agentParamSettings[paramEndpoint] ?? [];
|
||||
const defaultParams = regularParams.concat(agentParams);
|
||||
const defaultParamsMap = keyBy(defaultParams, 'key');
|
||||
|
||||
// TODO: Remove this check once we support new parameters not part of default parameters.
|
||||
// Checks if every key in `paramDefinitions` is valid.
|
||||
const validKeys = new Set(Object.keys(defaultParamsMap));
|
||||
const paramKeys = customParams.paramDefinitions.map((param) => param.key);
|
||||
if (paramKeys.some((key) => !validKeys.has(key))) {
|
||||
throw new Error(
|
||||
`paramDefinitions of "${endpointName}" endpoint contains invalid key(s). ` +
|
||||
`Valid parameter keys are ${Array.from(validKeys).join(', ')}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Fill out missing values for custom param definitions
|
||||
customParams.paramDefinitions = customParams.paramDefinitions.map((param) => {
|
||||
return { ...defaultParamsMap[param.key], ...param, optionType: 'custom' };
|
||||
});
|
||||
|
||||
try {
|
||||
validateSettingDefinitions(customParams.paramDefinitions);
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`Custom parameter definitions for "${endpointName}" endpoint is malformed: ${e.message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = loadCustomConfig;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,34 @@
|
|||
jest.mock('axios');
|
||||
jest.mock('~/cache/getLogStores');
|
||||
jest.mock('~/utils/loadYaml');
|
||||
jest.mock('librechat-data-provider', () => {
|
||||
const actual = jest.requireActual('librechat-data-provider');
|
||||
return {
|
||||
...actual,
|
||||
paramSettings: { foo: {}, bar: {}, custom: {} },
|
||||
agentParamSettings: {
|
||||
custom: [],
|
||||
google: [
|
||||
{
|
||||
key: 'pressure',
|
||||
type: 'string',
|
||||
component: 'input',
|
||||
},
|
||||
{
|
||||
key: 'temperature',
|
||||
type: 'number',
|
||||
component: 'slider',
|
||||
default: 0.5,
|
||||
range: {
|
||||
min: 0,
|
||||
max: 2,
|
||||
step: 0.01,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const axios = require('axios');
|
||||
const loadCustomConfig = require('./loadCustomConfig');
|
||||
|
|
@ -150,4 +178,126 @@ describe('loadCustomConfig', () => {
|
|||
expect(logger.info).toHaveBeenCalledWith(JSON.stringify(mockConfig, null, 2));
|
||||
expect(logger.debug).toHaveBeenCalledWith('Custom config:', mockConfig);
|
||||
});
|
||||
|
||||
describe('parseCustomParams', () => {
|
||||
const mockConfig = {
|
||||
version: '1.0',
|
||||
cache: false,
|
||||
endpoints: {
|
||||
custom: [
|
||||
{
|
||||
name: 'Google',
|
||||
apiKey: 'user_provided',
|
||||
customParams: {},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
async function loadCustomParams(customParams) {
|
||||
mockConfig.endpoints.custom[0].customParams = customParams;
|
||||
loadYaml.mockReturnValue(mockConfig);
|
||||
return await loadCustomConfig();
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
process.env.CONFIG_PATH = 'validConfig.yaml';
|
||||
});
|
||||
|
||||
it('returns no error when customParams is undefined', async () => {
|
||||
const result = await loadCustomParams(undefined);
|
||||
expect(result).toEqual(mockConfig);
|
||||
});
|
||||
|
||||
it('returns no error when customParams is valid', async () => {
|
||||
const result = await loadCustomParams({
|
||||
defaultParamsEndpoint: 'google',
|
||||
paramDefinitions: [
|
||||
{
|
||||
key: 'temperature',
|
||||
default: 0.5,
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(result).toEqual(mockConfig);
|
||||
});
|
||||
|
||||
it('throws an error when paramDefinitions contain unsupported keys', async () => {
|
||||
const malformedCustomParams = {
|
||||
defaultParamsEndpoint: 'google',
|
||||
paramDefinitions: [
|
||||
{ key: 'temperature', default: 0.5 },
|
||||
{ key: 'unsupportedKey', range: 0.5 },
|
||||
],
|
||||
};
|
||||
await expect(loadCustomParams(malformedCustomParams)).rejects.toThrow(
|
||||
'paramDefinitions of "Google" endpoint contains invalid key(s). Valid parameter keys are pressure, temperature',
|
||||
);
|
||||
});
|
||||
|
||||
it('throws an error when paramDefinitions is malformed', async () => {
|
||||
const malformedCustomParams = {
|
||||
defaultParamsEndpoint: 'google',
|
||||
paramDefinitions: [
|
||||
{
|
||||
key: 'temperature',
|
||||
type: 'noomba',
|
||||
component: 'inpoot',
|
||||
optionType: 'custom',
|
||||
},
|
||||
],
|
||||
};
|
||||
await expect(loadCustomParams(malformedCustomParams)).rejects.toThrow(
|
||||
/Custom parameter definitions for "Google" endpoint is malformed:/,
|
||||
);
|
||||
});
|
||||
|
||||
it('throws an error when defaultParamsEndpoint is not provided', async () => {
|
||||
const malformedCustomParams = { defaultParamsEndpoint: undefined };
|
||||
await expect(loadCustomParams(malformedCustomParams)).rejects.toThrow(
|
||||
'defaultParamsEndpoint of "Google" endpoint is invalid. Valid options are foo, bar, custom, google',
|
||||
);
|
||||
});
|
||||
|
||||
it('fills the paramDefinitions with missing values', async () => {
|
||||
const customParams = {
|
||||
defaultParamsEndpoint: 'google',
|
||||
paramDefinitions: [
|
||||
{ key: 'temperature', default: 0.7, range: { min: 0.1, max: 0.9, step: 0.1 } },
|
||||
{ key: 'pressure', component: 'textarea' },
|
||||
],
|
||||
};
|
||||
|
||||
const parsedConfig = await loadCustomParams(customParams);
|
||||
const paramDefinitions = parsedConfig.endpoints.custom[0].customParams.paramDefinitions;
|
||||
expect(paramDefinitions).toEqual([
|
||||
{
|
||||
columnSpan: 1,
|
||||
component: 'slider',
|
||||
default: 0.7, // overridden
|
||||
includeInput: true,
|
||||
key: 'temperature',
|
||||
label: 'temperature',
|
||||
optionType: 'custom',
|
||||
range: {
|
||||
// overridden
|
||||
max: 0.9,
|
||||
min: 0.1,
|
||||
step: 0.1,
|
||||
},
|
||||
type: 'number',
|
||||
},
|
||||
{
|
||||
columnSpan: 1,
|
||||
component: 'textarea', // overridden
|
||||
key: 'pressure',
|
||||
label: 'pressure',
|
||||
optionType: 'custom',
|
||||
placeholder: '',
|
||||
type: 'string',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -105,6 +105,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
|||
headers: resolvedHeaders,
|
||||
addParams: endpointConfig.addParams,
|
||||
dropParams: endpointConfig.dropParams,
|
||||
customParams: endpointConfig.customParams,
|
||||
titleConvo: endpointConfig.titleConvo,
|
||||
titleModel: endpointConfig.titleModel,
|
||||
forcePrompt: endpointConfig.forcePrompt,
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ async function deleteOpenAIFile(req, file, openai) {
|
|||
throw new Error('OpenAI returned `false` for deleted status');
|
||||
}
|
||||
logger.debug(
|
||||
`[deleteOpenAIFile] User ${req.user.id} successfully deleted ${file.file_id} from OpenAI`,
|
||||
`[deleteOpenAIFile] User ${req.user.id} successfully deleted file "${file.file_id}" from OpenAI`,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('[deleteOpenAIFile] Error deleting file from OpenAI: ' + error.message);
|
||||
|
|
|
|||
|
|
@ -5,9 +5,10 @@ const { EModelEndpoint } = require('librechat-data-provider');
|
|||
* Resizes an image from a given buffer based on the specified resolution.
|
||||
*
|
||||
* @param {Buffer} inputBuffer - The buffer of the image to be resized.
|
||||
* @param {'low' | 'high'} resolution - The resolution to resize the image to.
|
||||
* @param {'low' | 'high' | {percentage?: number, px?: number}} resolution - The resolution to resize the image to.
|
||||
* 'low' for a maximum of 512x512 resolution,
|
||||
* 'high' for a maximum of 768x2000 resolution.
|
||||
* 'high' for a maximum of 768x2000 resolution,
|
||||
* or a custom object with percentage or px values.
|
||||
* @param {EModelEndpoint} endpoint - Identifier for specific endpoint handling
|
||||
* @returns {Promise<{buffer: Buffer, width: number, height: number}>} An object containing the resized image buffer and its dimensions.
|
||||
* @throws Will throw an error if the resolution parameter is invalid.
|
||||
|
|
@ -17,10 +18,32 @@ async function resizeImageBuffer(inputBuffer, resolution, endpoint) {
|
|||
const maxShortSideHighRes = 768;
|
||||
const maxLongSideHighRes = endpoint === EModelEndpoint.anthropic ? 1568 : 2000;
|
||||
|
||||
let customPercent, customPx;
|
||||
if (resolution && typeof resolution === 'object') {
|
||||
if (typeof resolution.percentage === 'number') {
|
||||
customPercent = resolution.percentage;
|
||||
} else if (typeof resolution.px === 'number') {
|
||||
customPx = resolution.px;
|
||||
}
|
||||
}
|
||||
|
||||
let newWidth, newHeight;
|
||||
let resizeOptions = { fit: 'inside', withoutEnlargement: true };
|
||||
|
||||
if (resolution === 'low') {
|
||||
if (customPercent != null || customPx != null) {
|
||||
// percentage-based resize
|
||||
const metadata = await sharp(inputBuffer).metadata();
|
||||
if (customPercent != null) {
|
||||
newWidth = Math.round(metadata.width * (customPercent / 100));
|
||||
newHeight = Math.round(metadata.height * (customPercent / 100));
|
||||
} else {
|
||||
// pixel max on both sides
|
||||
newWidth = Math.min(metadata.width, customPx);
|
||||
newHeight = Math.min(metadata.height, customPx);
|
||||
}
|
||||
resizeOptions.width = newWidth;
|
||||
resizeOptions.height = newHeight;
|
||||
} else if (resolution === 'low') {
|
||||
resizeOptions.width = maxLowRes;
|
||||
resizeOptions.height = maxLowRes;
|
||||
} else if (resolution === 'high') {
|
||||
|
|
|
|||
|
|
@ -137,11 +137,13 @@ const processDeleteRequest = async ({ req, files }) => {
|
|||
/** @type {Record<string, OpenAI | undefined>} */
|
||||
const client = { [FileSources.openai]: undefined, [FileSources.azure]: undefined };
|
||||
const initializeClients = async () => {
|
||||
const openAIClient = await getOpenAIClient({
|
||||
req,
|
||||
overrideEndpoint: EModelEndpoint.assistants,
|
||||
});
|
||||
client[FileSources.openai] = openAIClient.openai;
|
||||
if (req.app.locals[EModelEndpoint.assistants]) {
|
||||
const openAIClient = await getOpenAIClient({
|
||||
req,
|
||||
overrideEndpoint: EModelEndpoint.assistants,
|
||||
});
|
||||
client[FileSources.openai] = openAIClient.openai;
|
||||
}
|
||||
|
||||
if (!req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||
return;
|
||||
|
|
@ -693,7 +695,7 @@ const processOpenAIFile = async ({
|
|||
const processOpenAIImageOutput = async ({ req, buffer, file_id, filename, fileExt }) => {
|
||||
const currentDate = new Date();
|
||||
const formattedDate = currentDate.toISOString();
|
||||
const _file = await convertImage(req, buffer, 'high', `${file_id}${fileExt}`);
|
||||
const _file = await convertImage(req, buffer, undefined, `${file_id}${fileExt}`);
|
||||
const file = {
|
||||
..._file,
|
||||
usage: 1,
|
||||
|
|
@ -838,8 +840,9 @@ function base64ToBuffer(base64String) {
|
|||
|
||||
async function saveBase64Image(
|
||||
url,
|
||||
{ req, file_id: _file_id, filename: _filename, endpoint, context, resolution = 'high' },
|
||||
{ req, file_id: _file_id, filename: _filename, endpoint, context, resolution },
|
||||
) {
|
||||
const effectiveResolution = resolution ?? req.app.locals.fileConfig?.imageGeneration ?? 'high';
|
||||
const file_id = _file_id ?? v4();
|
||||
let filename = `${file_id}-${_filename}`;
|
||||
const { buffer: inputBuffer, type } = base64ToBuffer(url);
|
||||
|
|
@ -852,7 +855,7 @@ async function saveBase64Image(
|
|||
}
|
||||
}
|
||||
|
||||
const image = await resizeImageBuffer(inputBuffer, resolution, endpoint);
|
||||
const image = await resizeImageBuffer(inputBuffer, effectiveResolution, endpoint);
|
||||
const source = req.app.locals.fileStrategy;
|
||||
const { saveBuffer } = getStrategyFunctions(source);
|
||||
const filepath = await saveBuffer({
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
const { z } = require('zod');
|
||||
const { tool } = require('@langchain/core/tools');
|
||||
const { normalizeServerName } = require('librechat-mcp');
|
||||
const { Constants: AgentConstants, Providers } = require('@librechat/agents');
|
||||
const {
|
||||
Constants,
|
||||
|
|
@ -38,6 +39,7 @@ async function createMCPTool({ req, toolKey, provider: _provider }) {
|
|||
}
|
||||
|
||||
const [toolName, serverName] = toolKey.split(Constants.mcp_delimiter);
|
||||
const normalizedToolKey = `${toolName}${Constants.mcp_delimiter}${normalizeServerName(serverName)}`;
|
||||
|
||||
if (!req.user?.id) {
|
||||
logger.error(
|
||||
|
|
@ -83,7 +85,7 @@ async function createMCPTool({ req, toolKey, provider: _provider }) {
|
|||
|
||||
const toolInstance = tool(_call, {
|
||||
schema,
|
||||
name: toolKey,
|
||||
name: normalizedToolKey,
|
||||
description: description || '',
|
||||
responseFormat: AgentConstants.CONTENT_AND_ARTIFACT,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -26,7 +26,17 @@ function loadTurnstileConfig(config, configDefaults) {
|
|||
options: customTurnstile.options ?? defaults.options,
|
||||
});
|
||||
|
||||
logger.info('Turnstile configuration loaded:\n' + JSON.stringify(loadedTurnstile, null, 2));
|
||||
const enabled = Boolean(loadedTurnstile.siteKey);
|
||||
|
||||
if (enabled) {
|
||||
logger.info(
|
||||
'Turnstile is ENABLED with configuration:\n' + JSON.stringify(loadedTurnstile, null, 2),
|
||||
);
|
||||
} else {
|
||||
logger.info('Turnstile is DISABLED (no siteKey provided).');
|
||||
}
|
||||
|
||||
|
||||
return loadedTurnstile;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ const staticCache = (staticPath) =>
|
|||
res.setHeader('Cache-Control', `public, max-age=${maxAge}, s-maxage=${sMaxAge}`);
|
||||
}
|
||||
},
|
||||
index: false,
|
||||
});
|
||||
|
||||
module.exports = staticCache;
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ const {
|
|||
|
||||
// Check required environment variables
|
||||
if (!LDAP_URL || !LDAP_USER_SEARCH_BASE) {
|
||||
return null;
|
||||
module.exports = null;
|
||||
}
|
||||
|
||||
const searchAttributes = [
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ jest.mock('winston', () => {
|
|||
mockFormatFunction.printf = jest.fn();
|
||||
mockFormatFunction.errors = jest.fn();
|
||||
mockFormatFunction.splat = jest.fn();
|
||||
mockFormatFunction.json = jest.fn();
|
||||
return {
|
||||
format: mockFormatFunction,
|
||||
createLogger: jest.fn().mockReturnValue({
|
||||
|
|
@ -19,6 +20,7 @@ jest.mock('winston', () => {
|
|||
transports: {
|
||||
Console: jest.fn(),
|
||||
DailyRotateFile: jest.fn(),
|
||||
File: jest.fn(),
|
||||
},
|
||||
addColors: jest.fn(),
|
||||
};
|
||||
|
|
|
|||
|
|
@ -6,3 +6,7 @@ process.env.BAN_VIOLATIONS = 'true';
|
|||
process.env.BAN_DURATION = '7200000';
|
||||
process.env.BAN_INTERVAL = '20';
|
||||
process.env.CI = 'true';
|
||||
process.env.JWT_SECRET = 'test';
|
||||
process.env.JWT_REFRESH_SECRET = 'test';
|
||||
process.env.CREDS_KEY = 'test';
|
||||
process.env.CREDS_IV = 'test';
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue