mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-02-20 17:34:10 +01:00
📦 refactor: Consolidate DB models, encapsulating Mongoose usage in data-schemas (#11830)
* chore: move database model methods to /packages/data-schemas * chore: add TypeScript ESLint rule to warn on unused variables * refactor: model imports to streamline access - Consolidated model imports across various files to improve code organization and reduce redundancy. - Updated imports for models such as Assistant, Message, Conversation, and others to a unified import path. - Adjusted middleware and service files to reflect the new import structure, ensuring functionality remains intact. - Enhanced test files to align with the new import paths, maintaining test coverage and integrity. * chore: migrate database models to packages/data-schemas and refactor all direct Mongoose Model usage outside of data-schemas * test: update agent model mocks in unit tests - Added `getAgent` mock to `client.test.js` to enhance test coverage for agent-related functionality. - Removed redundant `getAgent` and `getAgents` mocks from `openai.spec.js` and `responses.unit.spec.js` to streamline test setup and reduce duplication. - Ensured consistency in agent mock implementations across test files. * fix: update types in data-schemas * refactor: enhance type definitions in transaction and spending methods - Updated type definitions in `checkBalance.ts` to use specific request and response types. - Refined `spendTokens.ts` to utilize a new `SpendTxData` interface for better clarity and type safety. - Improved transaction handling in `transaction.ts` by introducing `TransactionResult` and `TxData` interfaces, ensuring consistent data structures across methods. - Adjusted unit tests in `transaction.spec.ts` to accommodate new type definitions and enhance robustness. * refactor: streamline model imports and enhance code organization - Consolidated model imports across various controllers and services to a unified import path, improving code clarity and reducing redundancy. - Updated multiple files to reflect the new import structure, ensuring all functionalities remain intact. - Enhanced overall code organization by removing duplicate import statements and optimizing the usage of model methods. * feat: implement loadAddedAgent and refactor agent loading logic - Introduced `loadAddedAgent` function to handle loading agents from added conversations, supporting multi-convo parallel execution. - Created a new `load.ts` file to encapsulate agent loading functionalities, including `loadEphemeralAgent` and `loadAgent`. - Updated the `index.ts` file to export the new `load` module instead of the deprecated `loadAgent`. - Enhanced type definitions and improved error handling in the agent loading process. - Adjusted unit tests to reflect changes in the agent loading structure and ensure comprehensive coverage. * refactor: enhance balance handling with new update interface - Introduced `IBalanceUpdate` interface to streamline balance update operations across the codebase. - Updated `upsertBalanceFields` method signatures in `balance.ts`, `transaction.ts`, and related tests to utilize the new interface for improved type safety. - Adjusted type imports in `balance.spec.ts` to include `IBalanceUpdate`, ensuring consistency in balance management functionalities. - Enhanced overall code clarity and maintainability by refining type definitions related to balance operations. * feat: add unit tests for loadAgent functionality and enhance agent loading logic - Introduced comprehensive unit tests for the `loadAgent` function, covering various scenarios including null and empty agent IDs, loading of ephemeral agents, and permission checks. - Enhanced the `initializeClient` function by moving `getConvoFiles` to the correct position in the database method exports, ensuring proper functionality. - Improved test coverage for agent loading, including handling of non-existent agents and user permissions. * chore: reorder memory method exports for consistency - Moved `deleteAllUserMemories` to the correct position in the exported memory methods, ensuring a consistent and logical order of method exports in `memory.ts`.
This commit is contained in:
parent
a85e99ff45
commit
a6fb257bcf
182 changed files with 8548 additions and 8105 deletions
|
|
@ -5,8 +5,8 @@ const {
|
|||
parseTextParts,
|
||||
findLastSeparatorIndex,
|
||||
} = require('librechat-data-provider');
|
||||
const { getMessage } = require('~/models/Message');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { getMessage } = require('~/models');
|
||||
|
||||
/**
|
||||
* @param {string[]} voiceIds - Array of voice IDs
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ const { createChunkProcessor, splitTextIntoChunks } = require('./streamAudio');
|
|||
jest.mock('keyv');
|
||||
|
||||
const globalCache = {};
|
||||
jest.mock('~/models/Message', () => {
|
||||
jest.mock('~/models', () => {
|
||||
return {
|
||||
getMessage: jest.fn().mockImplementation((messageId) => {
|
||||
return globalCache[messageId] || null;
|
||||
|
|
|
|||
|
|
@ -8,8 +8,7 @@ const {
|
|||
EModelEndpoint,
|
||||
PermissionTypes,
|
||||
} = require('librechat-data-provider');
|
||||
const { getRoleByName } = require('~/models/Role');
|
||||
const { Files } = require('~/models');
|
||||
const { getRoleByName, getFiles } = require('~/models');
|
||||
|
||||
/**
|
||||
* Process file search results from tool calls
|
||||
|
|
@ -127,7 +126,7 @@ async function enhanceSourcesWithMetadata(sources, appConfig) {
|
|||
|
||||
let fileMetadataMap = {};
|
||||
try {
|
||||
const files = await Files.find({ file_id: { $in: fileIds } });
|
||||
const files = await getFiles({ file_id: { $in: fileIds } });
|
||||
fileMetadataMap = files.reduce((map, file) => {
|
||||
map[file.file_id] = file;
|
||||
return map;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const { logger } = require('@librechat/data-schemas');
|
||||
const { PermissionBits, ResourceType } = require('librechat-data-provider');
|
||||
const { checkPermission } = require('~/server/services/PermissionService');
|
||||
const { getAgent } = require('~/models/Agent');
|
||||
const { getAgent } = require('~/models');
|
||||
|
||||
/**
|
||||
* Checks if a user has access to multiple files through a shared agent (batch operation)
|
||||
|
|
|
|||
|
|
@ -26,16 +26,15 @@ const {
|
|||
resizeImageBuffer,
|
||||
} = require('~/server/services/Files/images');
|
||||
const { addResourceFileId, deleteResourceFileId } = require('~/server/controllers/assistants/v2');
|
||||
const { addAgentResourceFile, removeAgentResourceFiles } = require('~/models/Agent');
|
||||
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { createFile, updateFileUsage, deleteFiles } = require('~/models');
|
||||
const { getFileStrategy } = require('~/server/utils/getFileStrategy');
|
||||
const { checkCapability } = require('~/server/services/Config');
|
||||
const { LB_QueueAsyncCall } = require('~/server/utils/queue');
|
||||
const { getStrategyFunctions } = require('./strategies');
|
||||
const { determineFileType } = require('~/server/utils');
|
||||
const { STTService } = require('./Audio/STTService');
|
||||
const db = require('~/models');
|
||||
|
||||
/**
|
||||
* Creates a modular file upload wrapper that ensures filename sanitization
|
||||
|
|
@ -210,7 +209,7 @@ const processDeleteRequest = async ({ req, files }) => {
|
|||
|
||||
if (agentFiles.length > 0) {
|
||||
promises.push(
|
||||
removeAgentResourceFiles({
|
||||
db.removeAgentResourceFiles({
|
||||
agent_id: req.body.agent_id,
|
||||
files: agentFiles,
|
||||
}),
|
||||
|
|
@ -218,7 +217,7 @@ const processDeleteRequest = async ({ req, files }) => {
|
|||
}
|
||||
|
||||
await Promise.allSettled(promises);
|
||||
await deleteFiles(resolvedFileIds);
|
||||
await db.deleteFiles(resolvedFileIds);
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -250,7 +249,7 @@ const processFileURL = async ({ fileStrategy, userId, URL, fileName, basePath, c
|
|||
dimensions = {},
|
||||
} = (await saveURL({ userId, URL, fileName, basePath })) || {};
|
||||
const filepath = await getFileURL({ fileName: `${userId}/${fileName}`, basePath });
|
||||
return await createFile(
|
||||
return await db.createFile(
|
||||
{
|
||||
user: userId,
|
||||
file_id: v4(),
|
||||
|
|
@ -296,7 +295,7 @@ const processImageFile = async ({ req, res, metadata, returnFile = false }) => {
|
|||
endpoint,
|
||||
});
|
||||
|
||||
const result = await createFile(
|
||||
const result = await db.createFile(
|
||||
{
|
||||
user: req.user.id,
|
||||
file_id,
|
||||
|
|
@ -348,7 +347,7 @@ const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true })
|
|||
}
|
||||
const fileName = `${file_id}-${filename}`;
|
||||
const filepath = await saveBuffer({ userId: req.user.id, fileName, buffer });
|
||||
return await createFile(
|
||||
return await db.createFile(
|
||||
{
|
||||
user: req.user.id,
|
||||
file_id,
|
||||
|
|
@ -434,7 +433,7 @@ const processFileUpload = async ({ req, res, metadata }) => {
|
|||
filepath = result.filepath;
|
||||
}
|
||||
|
||||
const result = await createFile(
|
||||
const result = await db.createFile(
|
||||
{
|
||||
user: req.user.id,
|
||||
file_id: id ?? file_id,
|
||||
|
|
@ -538,14 +537,14 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
|
|||
});
|
||||
|
||||
if (!messageAttachment && tool_resource) {
|
||||
await addAgentResourceFile({
|
||||
req,
|
||||
await db.addAgentResourceFile({
|
||||
file_id,
|
||||
agent_id,
|
||||
tool_resource,
|
||||
updatingUserId: req?.user?.id,
|
||||
});
|
||||
}
|
||||
const result = await createFile(fileInfo, true);
|
||||
const result = await db.createFile(fileInfo, true);
|
||||
return res
|
||||
.status(200)
|
||||
.json({ message: 'Agent file uploaded and processed successfully', ...result });
|
||||
|
|
@ -655,11 +654,11 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
|
|||
let filepath = _filepath;
|
||||
|
||||
if (!messageAttachment && tool_resource) {
|
||||
await addAgentResourceFile({
|
||||
req,
|
||||
await db.addAgentResourceFile({
|
||||
file_id,
|
||||
agent_id,
|
||||
tool_resource,
|
||||
updatingUserId: req?.user?.id,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -690,7 +689,7 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
|
|||
width,
|
||||
});
|
||||
|
||||
const result = await createFile(fileInfo, true);
|
||||
const result = await db.createFile(fileInfo, true);
|
||||
|
||||
res.status(200).json({ message: 'Agent file uploaded and processed successfully', ...result });
|
||||
};
|
||||
|
|
@ -736,10 +735,10 @@ const processOpenAIFile = async ({
|
|||
};
|
||||
|
||||
if (saveFile) {
|
||||
await createFile(file, true);
|
||||
await db.createFile(file, true);
|
||||
} else if (updateUsage) {
|
||||
try {
|
||||
await updateFileUsage({ file_id });
|
||||
await db.updateFileUsage({ file_id });
|
||||
} catch (error) {
|
||||
logger.error('Error updating file usage', error);
|
||||
}
|
||||
|
|
@ -777,7 +776,7 @@ const processOpenAIImageOutput = async ({ req, buffer, file_id, filename, fileEx
|
|||
file_id,
|
||||
filename,
|
||||
};
|
||||
createFile(file, true);
|
||||
db.createFile(file, true);
|
||||
return file;
|
||||
};
|
||||
|
||||
|
|
@ -921,7 +920,7 @@ async function saveBase64Image(
|
|||
fileName: filename,
|
||||
buffer: image.buffer,
|
||||
});
|
||||
return await createFile(
|
||||
return await db.createFile(
|
||||
{
|
||||
type,
|
||||
source,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue