mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-24 04:10:15 +01:00
🏗️ refactor: Extract DB layers to data-schemas for shared use (#7650)
* refactor: move model definitions and database-related methods to packages/data-schemas * ci: update tests due to new DB structure fix: disable mocking `librechat-data-provider` feat: Add schema exports to data-schemas package - Introduced a new schema module that exports various schemas including action, agent, and user schemas. - Updated index.ts to include the new schema exports for better modularity and organization. ci: fix appleStrategy tests fix: Agent.spec.js ci: refactor handleTools tests to use MongoMemoryServer for in-memory database fix: getLogStores imports ci: update banViolation tests to use MongoMemoryServer and improve session mocking test: refactor samlStrategy tests to improve mock configurations and user handling ci: fix crypto mock in handleText tests for improved accuracy ci: refactor spendTokens tests to improve model imports and setup ci: refactor Message model tests to use MongoMemoryServer and improve database interactions * refactor: streamline IMessage interface and move feedback properties to types/message.ts * refactor: use exported initializeRoles from `data-schemas`, remove api workspace version (this serves as an example of future migrations that still need to happen) * refactor: update model imports to use destructuring from `~/db/models` for consistency and clarity * refactor: remove unused mongoose imports from model files for cleaner code * refactor: remove unused mongoose imports from Share, Prompt, and Transaction model files for cleaner code * refactor: remove unused import in Transaction model for cleaner code * ci: update deploy workflow to reference new Docker Dev Branch Images Build and add new workflow for building Docker images on dev branch * chore: cleanup imports
This commit is contained in:
parent
4cbab86b45
commit
a2fc7d312a
161 changed files with 2998 additions and 2088 deletions
|
|
@ -48,6 +48,7 @@
|
|||
"@types/express": "^5.0.0",
|
||||
"@types/jest": "^29.5.2",
|
||||
"@types/node": "^20.3.0",
|
||||
"@types/traverse": "^0.6.37",
|
||||
"jest": "^29.5.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"rimraf": "^5.0.1",
|
||||
|
|
@ -58,11 +59,17 @@
|
|||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.0.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"mongoose": "^8.12.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"keyv": "^5.3.2"
|
||||
"keyv": "^5.3.2",
|
||||
"mongoose": "^8.12.1",
|
||||
"librechat-data-provider": "*",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"klona": "^2.0.6",
|
||||
"lodash": "^4.17.21",
|
||||
"meilisearch": "^0.38.0",
|
||||
"traverse": "^0.6.11",
|
||||
"winston": "^3.17.0",
|
||||
"winston-daily-rotate-file": "^5.0.0"
|
||||
},
|
||||
"publishConfig": {
|
||||
"registry": "https://registry.npmjs.org/",
|
||||
|
|
|
|||
75
packages/data-schemas/src/config/meiliLogger.ts
Normal file
75
packages/data-schemas/src/config/meiliLogger.ts
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
import path from 'path';
|
||||
import winston from 'winston';
|
||||
import 'winston-daily-rotate-file';
|
||||
|
||||
const logDir = path.join(__dirname, '..', 'logs');
|
||||
|
||||
const { NODE_ENV, DEBUG_LOGGING = 'false' } = process.env;
|
||||
|
||||
const useDebugLogging =
|
||||
(typeof DEBUG_LOGGING === 'string' && DEBUG_LOGGING.toLowerCase() === 'true') ||
|
||||
DEBUG_LOGGING === 'true';
|
||||
|
||||
const levels: winston.config.AbstractConfigSetLevels = {
|
||||
error: 0,
|
||||
warn: 1,
|
||||
info: 2,
|
||||
http: 3,
|
||||
verbose: 4,
|
||||
debug: 5,
|
||||
activity: 6,
|
||||
silly: 7,
|
||||
};
|
||||
|
||||
winston.addColors({
|
||||
info: 'green',
|
||||
warn: 'italic yellow',
|
||||
error: 'red',
|
||||
debug: 'blue',
|
||||
});
|
||||
|
||||
const level = (): string => {
|
||||
const env = NODE_ENV || 'development';
|
||||
const isDevelopment = env === 'development';
|
||||
return isDevelopment ? 'debug' : 'warn';
|
||||
};
|
||||
|
||||
const fileFormat = winston.format.combine(
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||
winston.format.errors({ stack: true }),
|
||||
winston.format.splat(),
|
||||
);
|
||||
|
||||
const logLevel = useDebugLogging ? 'debug' : 'error';
|
||||
const transports: winston.transport[] = [
|
||||
new winston.transports.DailyRotateFile({
|
||||
level: logLevel,
|
||||
filename: `${logDir}/meiliSync-%DATE%.log`,
|
||||
datePattern: 'YYYY-MM-DD',
|
||||
zippedArchive: true,
|
||||
maxSize: '20m',
|
||||
maxFiles: '14d',
|
||||
format: fileFormat,
|
||||
}),
|
||||
];
|
||||
|
||||
const consoleFormat = winston.format.combine(
|
||||
winston.format.colorize({ all: true }),
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||
winston.format.printf((info) => `${info.timestamp} ${info.level}: ${info.message}`),
|
||||
);
|
||||
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: 'info',
|
||||
format: consoleFormat,
|
||||
}),
|
||||
);
|
||||
|
||||
const logger = winston.createLogger({
|
||||
level: level(),
|
||||
levels,
|
||||
transports,
|
||||
});
|
||||
|
||||
export default logger;
|
||||
241
packages/data-schemas/src/config/parsers.ts
Normal file
241
packages/data-schemas/src/config/parsers.ts
Normal file
|
|
@ -0,0 +1,241 @@
|
|||
import { klona } from 'klona';
|
||||
import winston from 'winston';
|
||||
import traverse from 'traverse';
|
||||
|
||||
const SPLAT_SYMBOL = Symbol.for('splat');
|
||||
const MESSAGE_SYMBOL = Symbol.for('message');
|
||||
const CONSOLE_JSON_STRING_LENGTH: number =
|
||||
parseInt(process.env.CONSOLE_JSON_STRING_LENGTH || '', 10) || 255;
|
||||
|
||||
const sensitiveKeys: RegExp[] = [
|
||||
/^(sk-)[^\s]+/, // OpenAI API key pattern
|
||||
/(Bearer )[^\s]+/, // Header: Bearer token pattern
|
||||
/(api-key:? )[^\s]+/, // Header: API key pattern
|
||||
/(key=)[^\s]+/, // URL query param: sensitive key pattern (Google)
|
||||
];
|
||||
|
||||
/**
|
||||
* Determines if a given value string is sensitive and returns matching regex patterns.
|
||||
*
|
||||
* @param valueStr - The value string to check.
|
||||
* @returns An array of regex patterns that match the value string.
|
||||
*/
|
||||
function getMatchingSensitivePatterns(valueStr: string): RegExp[] {
|
||||
if (valueStr) {
|
||||
// Filter and return all regex patterns that match the value string
|
||||
return sensitiveKeys.filter((regex) => regex.test(valueStr));
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Redacts sensitive information from a console message and trims it to a specified length if provided.
|
||||
* @param str - The console message to be redacted.
|
||||
* @param trimLength - The optional length at which to trim the redacted message.
|
||||
* @returns The redacted and optionally trimmed console message.
|
||||
*/
|
||||
function redactMessage(str: string, trimLength?: number): string {
|
||||
if (!str) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const patterns = getMatchingSensitivePatterns(str);
|
||||
patterns.forEach((pattern) => {
|
||||
str = str.replace(pattern, '$1[REDACTED]');
|
||||
});
|
||||
|
||||
if (trimLength !== undefined && str.length > trimLength) {
|
||||
return `${str.substring(0, trimLength)}...`;
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
/**
|
||||
* Redacts sensitive information from log messages if the log level is 'error'.
|
||||
* Note: Intentionally mutates the object.
|
||||
* @param info - The log information object.
|
||||
* @returns The modified log information object.
|
||||
*/
|
||||
const redactFormat = winston.format((info: winston.Logform.TransformableInfo) => {
|
||||
if (info.level === 'error') {
|
||||
// Type guard to ensure message is a string
|
||||
if (typeof info.message === 'string') {
|
||||
info.message = redactMessage(info.message);
|
||||
}
|
||||
|
||||
// Handle MESSAGE_SYMBOL with type safety
|
||||
const symbolValue = (info as Record<string | symbol, unknown>)[MESSAGE_SYMBOL];
|
||||
if (typeof symbolValue === 'string') {
|
||||
(info as Record<string | symbol, unknown>)[MESSAGE_SYMBOL] = redactMessage(symbolValue);
|
||||
}
|
||||
}
|
||||
return info;
|
||||
});
|
||||
|
||||
/**
|
||||
* Truncates long strings, especially base64 image data, within log messages.
|
||||
*
|
||||
* @param value - The value to be inspected and potentially truncated.
|
||||
* @param length - The length at which to truncate the value. Default: 100.
|
||||
* @returns The truncated or original value.
|
||||
*/
|
||||
const truncateLongStrings = (value: unknown, length = 100): unknown => {
|
||||
if (typeof value === 'string') {
|
||||
return value.length > length ? value.substring(0, length) + '... [truncated]' : value;
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
/**
|
||||
* An array mapping function that truncates long strings (objects converted to JSON strings).
|
||||
* @param item - The item to be condensed.
|
||||
* @returns The condensed item.
|
||||
*/
|
||||
const condenseArray = (item: unknown): string | unknown => {
|
||||
if (typeof item === 'string') {
|
||||
return truncateLongStrings(JSON.stringify(item));
|
||||
} else if (typeof item === 'object') {
|
||||
return truncateLongStrings(JSON.stringify(item));
|
||||
}
|
||||
return item;
|
||||
};
|
||||
|
||||
/**
|
||||
* Formats log messages for debugging purposes.
|
||||
* - Truncates long strings within log messages.
|
||||
* - Condenses arrays by truncating long strings and objects as strings within array items.
|
||||
* - Redacts sensitive information from log messages if the log level is 'error'.
|
||||
* - Converts log information object to a formatted string.
|
||||
*
|
||||
* @param options - The options for formatting log messages.
|
||||
* @returns The formatted log message.
|
||||
*/
|
||||
const debugTraverse = winston.format.printf(
|
||||
({ level, message, timestamp, ...metadata }: Record<string, unknown>) => {
|
||||
if (!message) {
|
||||
return `${timestamp} ${level}`;
|
||||
}
|
||||
|
||||
// Type-safe version of the CJS logic: !message?.trim || typeof message !== 'string'
|
||||
if (typeof message !== 'string' || !message.trim) {
|
||||
return `${timestamp} ${level}: ${JSON.stringify(message)}`;
|
||||
}
|
||||
|
||||
let msg = `${timestamp} ${level}: ${truncateLongStrings(message.trim(), 150)}`;
|
||||
|
||||
try {
|
||||
if (level !== 'debug') {
|
||||
return msg;
|
||||
}
|
||||
|
||||
if (!metadata) {
|
||||
return msg;
|
||||
}
|
||||
|
||||
// Type-safe access to SPLAT_SYMBOL using bracket notation
|
||||
const metadataRecord = metadata as Record<string | symbol, unknown>;
|
||||
const splatArray = metadataRecord[SPLAT_SYMBOL];
|
||||
const debugValue = Array.isArray(splatArray) ? splatArray[0] : undefined;
|
||||
|
||||
if (!debugValue) {
|
||||
return msg;
|
||||
}
|
||||
|
||||
if (debugValue && Array.isArray(debugValue)) {
|
||||
msg += `\n${JSON.stringify(debugValue.map(condenseArray))}`;
|
||||
return msg;
|
||||
}
|
||||
|
||||
if (typeof debugValue !== 'object') {
|
||||
return (msg += ` ${debugValue}`);
|
||||
}
|
||||
|
||||
msg += '\n{';
|
||||
|
||||
const copy = klona(metadata);
|
||||
|
||||
traverse(copy).forEach(function (this: traverse.TraverseContext, value: unknown) {
|
||||
if (typeof this?.key === 'symbol') {
|
||||
return;
|
||||
}
|
||||
|
||||
let _parentKey = '';
|
||||
const parent = this.parent;
|
||||
|
||||
if (typeof parent?.key !== 'symbol' && parent?.key) {
|
||||
_parentKey = parent.key;
|
||||
}
|
||||
|
||||
const parentKey = `${parent && parent.notRoot ? _parentKey + '.' : ''}`;
|
||||
const tabs = `${parent && parent.notRoot ? ' ' : ' '}`;
|
||||
const currentKey = this?.key ?? 'unknown';
|
||||
|
||||
if (this.isLeaf && typeof value === 'string') {
|
||||
const truncatedText = truncateLongStrings(value);
|
||||
msg += `\n${tabs}${parentKey}${currentKey}: ${JSON.stringify(truncatedText)},`;
|
||||
} else if (this.notLeaf && Array.isArray(value) && value.length > 0) {
|
||||
const currentMessage = `\n${tabs}// ${value.length} ${currentKey.replace(/s$/, '')}(s)`;
|
||||
this.update(currentMessage, true);
|
||||
msg += currentMessage;
|
||||
const stringifiedArray = value.map(condenseArray);
|
||||
msg += `\n${tabs}${parentKey}${currentKey}: [${stringifiedArray}],`;
|
||||
} else if (this.isLeaf && typeof value === 'function') {
|
||||
msg += `\n${tabs}${parentKey}${currentKey}: function,`;
|
||||
} else if (this.isLeaf) {
|
||||
msg += `\n${tabs}${parentKey}${currentKey}: ${value},`;
|
||||
}
|
||||
});
|
||||
|
||||
msg += '\n}';
|
||||
return msg;
|
||||
} catch (e: unknown) {
|
||||
const errorMessage = e instanceof Error ? e.message : 'Unknown error';
|
||||
return (msg += `\n[LOGGER PARSING ERROR] ${errorMessage}`);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* Truncates long string values in JSON log objects.
|
||||
* Prevents outputting extremely long values (e.g., base64, blobs).
|
||||
*/
|
||||
const jsonTruncateFormat = winston.format((info: winston.Logform.TransformableInfo) => {
|
||||
const truncateLongStrings = (str: string, maxLength: number): string =>
|
||||
str.length > maxLength ? str.substring(0, maxLength) + '...' : str;
|
||||
|
||||
const seen = new WeakSet<object>();
|
||||
|
||||
const truncateObject = (obj: unknown): unknown => {
|
||||
if (typeof obj !== 'object' || obj === null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
// Handle circular references - now with proper object type
|
||||
if (seen.has(obj)) {
|
||||
return '[Circular]';
|
||||
}
|
||||
seen.add(obj);
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map((item) => truncateObject(item));
|
||||
}
|
||||
|
||||
// We know this is an object at this point
|
||||
const objectRecord = obj as Record<string, unknown>;
|
||||
const newObj: Record<string, unknown> = {};
|
||||
Object.entries(objectRecord).forEach(([key, value]) => {
|
||||
if (typeof value === 'string') {
|
||||
newObj[key] = truncateLongStrings(value, CONSOLE_JSON_STRING_LENGTH);
|
||||
} else {
|
||||
newObj[key] = truncateObject(value);
|
||||
}
|
||||
});
|
||||
return newObj;
|
||||
};
|
||||
|
||||
return truncateObject(info) as winston.Logform.TransformableInfo;
|
||||
});
|
||||
|
||||
export { redactFormat, redactMessage, debugTraverse, jsonTruncateFormat };
|
||||
123
packages/data-schemas/src/config/winston.ts
Normal file
123
packages/data-schemas/src/config/winston.ts
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
import path from 'path';
|
||||
import winston from 'winston';
|
||||
import 'winston-daily-rotate-file';
|
||||
import { redactFormat, redactMessage, debugTraverse, jsonTruncateFormat } from './parsers';
|
||||
|
||||
// Define log directory
|
||||
const logDir = path.join(__dirname, '..', 'logs');
|
||||
|
||||
// Type-safe environment variables
|
||||
const { NODE_ENV, DEBUG_LOGGING, CONSOLE_JSON, DEBUG_CONSOLE } = process.env;
|
||||
|
||||
const useConsoleJson = typeof CONSOLE_JSON === 'string' && CONSOLE_JSON.toLowerCase() === 'true';
|
||||
|
||||
const useDebugConsole = typeof DEBUG_CONSOLE === 'string' && DEBUG_CONSOLE.toLowerCase() === 'true';
|
||||
|
||||
const useDebugLogging = typeof DEBUG_LOGGING === 'string' && DEBUG_LOGGING.toLowerCase() === 'true';
|
||||
|
||||
// Define custom log levels
|
||||
const levels: winston.config.AbstractConfigSetLevels = {
|
||||
error: 0,
|
||||
warn: 1,
|
||||
info: 2,
|
||||
http: 3,
|
||||
verbose: 4,
|
||||
debug: 5,
|
||||
activity: 6,
|
||||
silly: 7,
|
||||
};
|
||||
|
||||
winston.addColors({
|
||||
info: 'green',
|
||||
warn: 'italic yellow',
|
||||
error: 'red',
|
||||
debug: 'blue',
|
||||
});
|
||||
|
||||
const level = (): string => {
|
||||
const env = NODE_ENV || 'development';
|
||||
return env === 'development' ? 'debug' : 'warn';
|
||||
};
|
||||
|
||||
const fileFormat = winston.format.combine(
|
||||
redactFormat(),
|
||||
winston.format.timestamp({ format: () => new Date().toISOString() }),
|
||||
winston.format.errors({ stack: true }),
|
||||
winston.format.splat(),
|
||||
);
|
||||
|
||||
const transports: winston.transport[] = [
|
||||
new winston.transports.DailyRotateFile({
|
||||
level: 'error',
|
||||
filename: `${logDir}/error-%DATE%.log`,
|
||||
datePattern: 'YYYY-MM-DD',
|
||||
zippedArchive: true,
|
||||
maxSize: '20m',
|
||||
maxFiles: '14d',
|
||||
format: fileFormat,
|
||||
}),
|
||||
];
|
||||
|
||||
if (useDebugLogging) {
|
||||
transports.push(
|
||||
new winston.transports.DailyRotateFile({
|
||||
level: 'debug',
|
||||
filename: `${logDir}/debug-%DATE%.log`,
|
||||
datePattern: 'YYYY-MM-DD',
|
||||
zippedArchive: true,
|
||||
maxSize: '20m',
|
||||
maxFiles: '14d',
|
||||
format: winston.format.combine(fileFormat, debugTraverse),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
const consoleFormat = winston.format.combine(
|
||||
redactFormat(),
|
||||
winston.format.colorize({ all: true }),
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||
winston.format.printf((info) => {
|
||||
const message = `${info.timestamp} ${info.level}: ${info.message}`;
|
||||
return info.level.includes('error') ? redactMessage(message) : message;
|
||||
}),
|
||||
);
|
||||
|
||||
let consoleLogLevel: string = 'info';
|
||||
if (useDebugConsole) {
|
||||
consoleLogLevel = 'debug';
|
||||
}
|
||||
|
||||
// Add console transport
|
||||
if (useDebugConsole) {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: consoleLogLevel,
|
||||
format: useConsoleJson
|
||||
? winston.format.combine(fileFormat, jsonTruncateFormat(), winston.format.json())
|
||||
: winston.format.combine(fileFormat, debugTraverse),
|
||||
}),
|
||||
);
|
||||
} else if (useConsoleJson) {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: consoleLogLevel,
|
||||
format: winston.format.combine(fileFormat, jsonTruncateFormat(), winston.format.json()),
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: consoleLogLevel,
|
||||
format: consoleFormat,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
// Create logger
|
||||
const logger = winston.createLogger({
|
||||
level: level(),
|
||||
levels,
|
||||
transports,
|
||||
});
|
||||
|
||||
export default logger;
|
||||
17
packages/data-schemas/src/crypto/index.ts
Normal file
17
packages/data-schemas/src/crypto/index.ts
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import jwt from 'jsonwebtoken';
|
||||
import { webcrypto } from 'node:crypto';
|
||||
import { SignPayloadParams } from '~/types';
|
||||
|
||||
export async function signPayload({
|
||||
payload,
|
||||
secret,
|
||||
expirationTime,
|
||||
}: SignPayloadParams): Promise<string> {
|
||||
return jwt.sign(payload, secret!, { expiresIn: expirationTime });
|
||||
}
|
||||
|
||||
export async function hashToken(str: string): Promise<string> {
|
||||
const data = new TextEncoder().encode(str);
|
||||
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
|
||||
return Buffer.from(hashBuffer).toString('hex');
|
||||
}
|
||||
|
|
@ -1,68 +1,7 @@
|
|||
export { default as actionSchema } from './schema/action';
|
||||
export type { IAction } from './schema/action';
|
||||
|
||||
export { default as agentSchema } from './schema/agent';
|
||||
export type { IAgent } from './schema/agent';
|
||||
|
||||
export { default as assistantSchema } from './schema/assistant';
|
||||
export type { IAssistant } from './schema/assistant';
|
||||
|
||||
export { default as balanceSchema } from './schema/balance';
|
||||
export type { IBalance } from './schema/balance';
|
||||
|
||||
export { default as bannerSchema } from './schema/banner';
|
||||
export type { IBanner } from './schema/banner';
|
||||
|
||||
export { default as categoriesSchema } from './schema/categories';
|
||||
export type { ICategory } from './schema/categories';
|
||||
|
||||
export { default as conversationTagSchema } from './schema/conversationTag';
|
||||
export type { IConversationTag } from './schema/conversationTag';
|
||||
|
||||
export { default as convoSchema } from './schema/convo';
|
||||
export type { IConversation } from './schema/convo';
|
||||
|
||||
export { default as fileSchema } from './schema/file';
|
||||
export type { IMongoFile } from './schema/file';
|
||||
|
||||
export { default as keySchema } from './schema/key';
|
||||
export type { IKey } from './schema/key';
|
||||
|
||||
export { default as messageSchema } from './schema/message';
|
||||
export type { IMessage } from './schema/message';
|
||||
|
||||
export { default as pluginAuthSchema } from './schema/pluginAuth';
|
||||
export type { IPluginAuth } from './schema/pluginAuth';
|
||||
|
||||
export { default as presetSchema } from './schema/preset';
|
||||
export type { IPreset } from './schema/preset';
|
||||
|
||||
export { default as projectSchema } from './schema/project';
|
||||
export type { IMongoProject } from './schema/project';
|
||||
|
||||
export { default as promptSchema } from './schema/prompt';
|
||||
export type { IPrompt } from './schema/prompt';
|
||||
|
||||
export { default as promptGroupSchema } from './schema/promptGroup';
|
||||
export type { IPromptGroup, IPromptGroupDocument } from './schema/promptGroup';
|
||||
|
||||
export { default as roleSchema } from './schema/role';
|
||||
export type { IRole } from './schema/role';
|
||||
|
||||
export { default as sessionSchema } from './schema/session';
|
||||
export type { ISession } from './schema/session';
|
||||
|
||||
export { default as shareSchema } from './schema/share';
|
||||
export type { ISharedLink } from './schema/share';
|
||||
|
||||
export { default as tokenSchema } from './schema/token';
|
||||
export type { IToken } from './schema/token';
|
||||
|
||||
export { default as toolCallSchema } from './schema/toolCall';
|
||||
export type { IToolCallData } from './schema/toolCall';
|
||||
|
||||
export { default as transactionSchema } from './schema/transaction';
|
||||
export type { ITransaction } from './schema/transaction';
|
||||
|
||||
export { default as userSchema } from './schema/user';
|
||||
export type { IUser } from './schema/user';
|
||||
export * from './crypto';
|
||||
export * from './schema';
|
||||
export { createModels } from './models';
|
||||
export { createMethods } from './methods';
|
||||
export type * from './types';
|
||||
export { default as logger } from './config/winston';
|
||||
export { default as meiliLogger } from './config/meiliLogger';
|
||||
|
|
|
|||
18
packages/data-schemas/src/methods/index.ts
Normal file
18
packages/data-schemas/src/methods/index.ts
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import { createUserMethods, type UserMethods } from './user';
|
||||
import { createSessionMethods, type SessionMethods } from './session';
|
||||
import { createTokenMethods, type TokenMethods } from './token';
|
||||
import { createRoleMethods, type RoleMethods } from './role';
|
||||
|
||||
/**
|
||||
* Creates all database methods for all collections
|
||||
*/
|
||||
export function createMethods(mongoose: typeof import('mongoose')) {
|
||||
return {
|
||||
...createUserMethods(mongoose),
|
||||
...createSessionMethods(mongoose),
|
||||
...createTokenMethods(mongoose),
|
||||
...createRoleMethods(mongoose),
|
||||
};
|
||||
}
|
||||
|
||||
export type AllMethods = UserMethods & SessionMethods & TokenMethods & RoleMethods;
|
||||
50
packages/data-schemas/src/methods/role.ts
Normal file
50
packages/data-schemas/src/methods/role.ts
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
import { roleDefaults, SystemRoles } from 'librechat-data-provider';
|
||||
|
||||
// Factory function that takes mongoose instance and returns the methods
|
||||
export function createRoleMethods(mongoose: typeof import('mongoose')) {
|
||||
/**
|
||||
* Initialize default roles in the system.
|
||||
* Creates the default roles (ADMIN, USER) if they don't exist in the database.
|
||||
* Updates existing roles with new permission types if they're missing.
|
||||
*/
|
||||
async function initializeRoles() {
|
||||
const Role = mongoose.models.Role;
|
||||
|
||||
for (const roleName of [SystemRoles.ADMIN, SystemRoles.USER]) {
|
||||
let role = await Role.findOne({ name: roleName });
|
||||
const defaultPerms = roleDefaults[roleName].permissions;
|
||||
|
||||
if (!role) {
|
||||
// Create new role if it doesn't exist.
|
||||
role = new Role(roleDefaults[roleName]);
|
||||
} else {
|
||||
// Ensure role.permissions is defined.
|
||||
role.permissions = role.permissions || {};
|
||||
// For each permission type in defaults, add it if missing.
|
||||
for (const permType of Object.keys(defaultPerms)) {
|
||||
if (role.permissions[permType] == null) {
|
||||
role.permissions[permType] = defaultPerms[permType as keyof typeof defaultPerms];
|
||||
}
|
||||
}
|
||||
}
|
||||
await role.save();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all roles in the system (for testing purposes)
|
||||
* Returns an array of all roles with their names and permissions
|
||||
*/
|
||||
async function listRoles() {
|
||||
const Role = mongoose.models.Role;
|
||||
return await Role.find({}).select('name permissions').lean();
|
||||
}
|
||||
|
||||
// Return all methods you want to expose
|
||||
return {
|
||||
listRoles,
|
||||
initializeRoles,
|
||||
};
|
||||
}
|
||||
|
||||
export type RoleMethods = ReturnType<typeof createRoleMethods>;
|
||||
264
packages/data-schemas/src/methods/session.ts
Normal file
264
packages/data-schemas/src/methods/session.ts
Normal file
|
|
@ -0,0 +1,264 @@
|
|||
import type * as t from '~/types/session';
|
||||
import { signPayload, hashToken } from '~/crypto';
|
||||
import logger from '~/config/winston';
|
||||
|
||||
export class SessionError extends Error {
|
||||
public code: string;
|
||||
|
||||
constructor(message: string, code: string = 'SESSION_ERROR') {
|
||||
super(message);
|
||||
this.name = 'SessionError';
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
|
||||
const { REFRESH_TOKEN_EXPIRY } = process.env ?? {};
|
||||
const expires = eval(REFRESH_TOKEN_EXPIRY ?? '0') ?? 1000 * 60 * 60 * 24 * 7; // 7 days default
|
||||
|
||||
// Factory function that takes mongoose instance and returns the methods
|
||||
export function createSessionMethods(mongoose: typeof import('mongoose')) {
|
||||
const Session = mongoose.models.Session;
|
||||
|
||||
/**
|
||||
* Creates a new session for a user
|
||||
*/
|
||||
async function createSession(
|
||||
userId: string,
|
||||
options: t.CreateSessionOptions = {},
|
||||
): Promise<t.SessionResult> {
|
||||
if (!userId) {
|
||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
||||
}
|
||||
|
||||
try {
|
||||
const session = new Session({
|
||||
user: userId,
|
||||
expiration: options.expiration || new Date(Date.now() + expires),
|
||||
});
|
||||
const refreshToken = await generateRefreshToken(session);
|
||||
|
||||
return { session, refreshToken };
|
||||
} catch (error) {
|
||||
logger.error('[createSession] Error creating session:', error);
|
||||
throw new SessionError('Failed to create session', 'CREATE_SESSION_FAILED');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds a session by various parameters
|
||||
*/
|
||||
async function findSession(
|
||||
params: t.SessionSearchParams,
|
||||
options: t.SessionQueryOptions = { lean: true },
|
||||
): Promise<t.ISession | null> {
|
||||
try {
|
||||
const query: Record<string, unknown> = {};
|
||||
|
||||
if (!params.refreshToken && !params.userId && !params.sessionId) {
|
||||
throw new SessionError(
|
||||
'At least one search parameter is required',
|
||||
'INVALID_SEARCH_PARAMS',
|
||||
);
|
||||
}
|
||||
|
||||
if (params.refreshToken) {
|
||||
const tokenHash = await hashToken(params.refreshToken);
|
||||
query.refreshTokenHash = tokenHash;
|
||||
}
|
||||
|
||||
if (params.userId) {
|
||||
query.user = params.userId;
|
||||
}
|
||||
|
||||
if (params.sessionId) {
|
||||
const sessionId =
|
||||
typeof params.sessionId === 'object' &&
|
||||
params.sessionId !== null &&
|
||||
'sessionId' in params.sessionId
|
||||
? (params.sessionId as { sessionId: string }).sessionId
|
||||
: (params.sessionId as string);
|
||||
if (!mongoose.Types.ObjectId.isValid(sessionId)) {
|
||||
throw new SessionError('Invalid session ID format', 'INVALID_SESSION_ID');
|
||||
}
|
||||
query._id = sessionId;
|
||||
}
|
||||
|
||||
// Add expiration check to only return valid sessions
|
||||
query.expiration = { $gt: new Date() };
|
||||
|
||||
const sessionQuery = Session.findOne(query);
|
||||
|
||||
if (options.lean) {
|
||||
return (await sessionQuery.lean()) as t.ISession | null;
|
||||
}
|
||||
|
||||
return await sessionQuery.exec();
|
||||
} catch (error) {
|
||||
logger.error('[findSession] Error finding session:', error);
|
||||
throw new SessionError('Failed to find session', 'FIND_SESSION_FAILED');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates session expiration
|
||||
*/
|
||||
async function updateExpiration(
|
||||
session: t.ISession | string,
|
||||
newExpiration?: Date,
|
||||
): Promise<t.ISession> {
|
||||
try {
|
||||
const sessionDoc = typeof session === 'string' ? await Session.findById(session) : session;
|
||||
|
||||
if (!sessionDoc) {
|
||||
throw new SessionError('Session not found', 'SESSION_NOT_FOUND');
|
||||
}
|
||||
|
||||
sessionDoc.expiration = newExpiration || new Date(Date.now() + expires);
|
||||
return await sessionDoc.save();
|
||||
} catch (error) {
|
||||
logger.error('[updateExpiration] Error updating session:', error);
|
||||
throw new SessionError('Failed to update session expiration', 'UPDATE_EXPIRATION_FAILED');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a session by refresh token or session ID
|
||||
*/
|
||||
async function deleteSession(params: t.DeleteSessionParams): Promise<{ deletedCount?: number }> {
|
||||
try {
|
||||
if (!params.refreshToken && !params.sessionId) {
|
||||
throw new SessionError(
|
||||
'Either refreshToken or sessionId is required',
|
||||
'INVALID_DELETE_PARAMS',
|
||||
);
|
||||
}
|
||||
|
||||
const query: Record<string, unknown> = {};
|
||||
|
||||
if (params.refreshToken) {
|
||||
query.refreshTokenHash = await hashToken(params.refreshToken);
|
||||
}
|
||||
|
||||
if (params.sessionId) {
|
||||
query._id = params.sessionId;
|
||||
}
|
||||
|
||||
const result = await Session.deleteOne(query);
|
||||
|
||||
if (result.deletedCount === 0) {
|
||||
logger.warn('[deleteSession] No session found to delete');
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[deleteSession] Error deleting session:', error);
|
||||
throw new SessionError('Failed to delete session', 'DELETE_SESSION_FAILED');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes all sessions for a user
|
||||
*/
|
||||
async function deleteAllUserSessions(
|
||||
userId: string | { userId: string },
|
||||
options: t.DeleteAllSessionsOptions = {},
|
||||
): Promise<{ deletedCount?: number }> {
|
||||
try {
|
||||
if (!userId) {
|
||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
||||
}
|
||||
|
||||
const userIdString =
|
||||
typeof userId === 'object' && userId !== null ? userId.userId : (userId as string);
|
||||
|
||||
if (!mongoose.Types.ObjectId.isValid(userIdString)) {
|
||||
throw new SessionError('Invalid user ID format', 'INVALID_USER_ID_FORMAT');
|
||||
}
|
||||
|
||||
const query: Record<string, unknown> = { user: userIdString };
|
||||
|
||||
if (options.excludeCurrentSession && options.currentSessionId) {
|
||||
query._id = { $ne: options.currentSessionId };
|
||||
}
|
||||
|
||||
const result = await Session.deleteMany(query);
|
||||
|
||||
if (result.deletedCount && result.deletedCount > 0) {
|
||||
logger.debug(
|
||||
`[deleteAllUserSessions] Deleted ${result.deletedCount} sessions for user ${userIdString}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[deleteAllUserSessions] Error deleting user sessions:', error);
|
||||
throw new SessionError('Failed to delete user sessions', 'DELETE_ALL_SESSIONS_FAILED');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a refresh token for a session
|
||||
*/
|
||||
async function generateRefreshToken(session: t.ISession): Promise<string> {
|
||||
if (!session || !session.user) {
|
||||
throw new SessionError('Invalid session object', 'INVALID_SESSION');
|
||||
}
|
||||
|
||||
try {
|
||||
const expiresIn = session.expiration ? session.expiration.getTime() : Date.now() + expires;
|
||||
|
||||
if (!session.expiration) {
|
||||
session.expiration = new Date(expiresIn);
|
||||
}
|
||||
|
||||
const refreshToken = await signPayload({
|
||||
payload: {
|
||||
id: session.user,
|
||||
sessionId: session._id,
|
||||
},
|
||||
secret: process.env.JWT_REFRESH_SECRET!,
|
||||
expirationTime: Math.floor((expiresIn - Date.now()) / 1000),
|
||||
});
|
||||
|
||||
session.refreshTokenHash = await hashToken(refreshToken);
|
||||
await session.save();
|
||||
|
||||
return refreshToken;
|
||||
} catch (error) {
|
||||
logger.error('[generateRefreshToken] Error generating refresh token:', error);
|
||||
throw new SessionError('Failed to generate refresh token', 'GENERATE_TOKEN_FAILED');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Counts active sessions for a user
|
||||
*/
|
||||
async function countActiveSessions(userId: string): Promise<number> {
|
||||
try {
|
||||
if (!userId) {
|
||||
throw new SessionError('User ID is required', 'INVALID_USER_ID');
|
||||
}
|
||||
|
||||
return await Session.countDocuments({
|
||||
user: userId,
|
||||
expiration: { $gt: new Date() },
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[countActiveSessions] Error counting active sessions:', error);
|
||||
throw new SessionError('Failed to count active sessions', 'COUNT_SESSIONS_FAILED');
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
findSession,
|
||||
SessionError,
|
||||
deleteSession,
|
||||
createSession,
|
||||
updateExpiration,
|
||||
countActiveSessions,
|
||||
generateRefreshToken,
|
||||
deleteAllUserSessions,
|
||||
};
|
||||
}
|
||||
|
||||
export type SessionMethods = ReturnType<typeof createSessionMethods>;
|
||||
105
packages/data-schemas/src/methods/token.ts
Normal file
105
packages/data-schemas/src/methods/token.ts
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
import { IToken, TokenCreateData, TokenQuery, TokenUpdateData, TokenDeleteResult } from '~/types';
|
||||
import logger from '~/config/winston';
|
||||
|
||||
// Factory function that takes mongoose instance and returns the methods
|
||||
export function createTokenMethods(mongoose: typeof import('mongoose')) {
|
||||
/**
|
||||
* Creates a new Token instance.
|
||||
*/
|
||||
async function createToken(tokenData: TokenCreateData): Promise<IToken> {
|
||||
try {
|
||||
const Token = mongoose.models.Token;
|
||||
const currentTime = new Date();
|
||||
const expiresAt = new Date(currentTime.getTime() + tokenData.expiresIn * 1000);
|
||||
|
||||
const newTokenData = {
|
||||
...tokenData,
|
||||
createdAt: currentTime,
|
||||
expiresAt,
|
||||
};
|
||||
|
||||
return await Token.create(newTokenData);
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while creating token:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a Token document that matches the provided query.
|
||||
*/
|
||||
async function updateToken(
|
||||
query: TokenQuery,
|
||||
updateData: TokenUpdateData,
|
||||
): Promise<IToken | null> {
|
||||
try {
|
||||
const Token = mongoose.models.Token;
|
||||
return await Token.findOneAndUpdate(query, updateData, { new: true });
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while updating token:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes all Token documents that match the provided token, user ID, or email.
|
||||
*/
|
||||
async function deleteTokens(query: TokenQuery): Promise<TokenDeleteResult> {
|
||||
try {
|
||||
const Token = mongoose.models.Token;
|
||||
return await Token.deleteMany({
|
||||
$or: [
|
||||
{ userId: query.userId },
|
||||
{ token: query.token },
|
||||
{ email: query.email },
|
||||
{ identifier: query.identifier },
|
||||
],
|
||||
});
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while deleting tokens:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds a Token document that matches the provided query.
|
||||
*/
|
||||
async function findToken(query: TokenQuery): Promise<IToken | null> {
|
||||
try {
|
||||
const Token = mongoose.models.Token;
|
||||
const conditions = [];
|
||||
|
||||
if (query.userId) {
|
||||
conditions.push({ userId: query.userId });
|
||||
}
|
||||
if (query.token) {
|
||||
conditions.push({ token: query.token });
|
||||
}
|
||||
if (query.email) {
|
||||
conditions.push({ email: query.email });
|
||||
}
|
||||
if (query.identifier) {
|
||||
conditions.push({ identifier: query.identifier });
|
||||
}
|
||||
|
||||
const token = await Token.findOne({
|
||||
$and: conditions,
|
||||
}).lean();
|
||||
|
||||
return token as IToken | null;
|
||||
} catch (error) {
|
||||
logger.debug('An error occurred while finding token:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Return all methods
|
||||
return {
|
||||
findToken,
|
||||
createToken,
|
||||
updateToken,
|
||||
deleteTokens,
|
||||
};
|
||||
}
|
||||
|
||||
export type TokenMethods = ReturnType<typeof createTokenMethods>;
|
||||
174
packages/data-schemas/src/methods/user.ts
Normal file
174
packages/data-schemas/src/methods/user.ts
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
import mongoose, { FilterQuery } from 'mongoose';
|
||||
import type { IUser, BalanceConfig, UserCreateData, UserUpdateResult } from '~/types';
|
||||
import { signPayload } from '~/crypto';
|
||||
|
||||
/** Factory function that takes mongoose instance and returns the methods */
|
||||
export function createUserMethods(mongoose: typeof import('mongoose')) {
|
||||
/**
|
||||
* Search for a single user based on partial data and return matching user document as plain object.
|
||||
*/
|
||||
async function findUser(
|
||||
searchCriteria: FilterQuery<IUser>,
|
||||
fieldsToSelect?: string | string[] | null,
|
||||
): Promise<IUser | null> {
|
||||
const User = mongoose.models.User;
|
||||
const query = User.findOne(searchCriteria);
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
return (await query.lean()) as IUser | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Count the number of user documents in the collection based on the provided filter.
|
||||
*/
|
||||
async function countUsers(filter: FilterQuery<IUser> = {}): Promise<number> {
|
||||
const User = mongoose.models.User;
|
||||
return await User.countDocuments(filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new user, optionally with a TTL of 1 week.
|
||||
*/
|
||||
async function createUser(
|
||||
data: UserCreateData,
|
||||
balanceConfig?: BalanceConfig,
|
||||
disableTTL: boolean = true,
|
||||
returnUser: boolean = false,
|
||||
): Promise<mongoose.Types.ObjectId | Partial<IUser>> {
|
||||
const User = mongoose.models.User;
|
||||
const Balance = mongoose.models.Balance;
|
||||
|
||||
const userData: Partial<IUser> = {
|
||||
...data,
|
||||
expiresAt: disableTTL ? undefined : new Date(Date.now() + 604800 * 1000), // 1 week in milliseconds
|
||||
};
|
||||
|
||||
if (disableTTL) {
|
||||
delete userData.expiresAt;
|
||||
}
|
||||
|
||||
const user = await User.create(userData);
|
||||
|
||||
// If balance is enabled, create or update a balance record for the user
|
||||
if (balanceConfig?.enabled && balanceConfig?.startBalance) {
|
||||
const update: {
|
||||
$inc: { tokenCredits: number };
|
||||
$set?: {
|
||||
autoRefillEnabled: boolean;
|
||||
refillIntervalValue: number;
|
||||
refillIntervalUnit: string;
|
||||
refillAmount: number;
|
||||
};
|
||||
} = {
|
||||
$inc: { tokenCredits: balanceConfig.startBalance },
|
||||
};
|
||||
|
||||
if (
|
||||
balanceConfig.autoRefillEnabled &&
|
||||
balanceConfig.refillIntervalValue != null &&
|
||||
balanceConfig.refillIntervalUnit != null &&
|
||||
balanceConfig.refillAmount != null
|
||||
) {
|
||||
update.$set = {
|
||||
autoRefillEnabled: true,
|
||||
refillIntervalValue: balanceConfig.refillIntervalValue,
|
||||
refillIntervalUnit: balanceConfig.refillIntervalUnit,
|
||||
refillAmount: balanceConfig.refillAmount,
|
||||
};
|
||||
}
|
||||
|
||||
await Balance.findOneAndUpdate({ user: user._id }, update, {
|
||||
upsert: true,
|
||||
new: true,
|
||||
}).lean();
|
||||
}
|
||||
|
||||
if (returnUser) {
|
||||
return user.toObject() as Partial<IUser>;
|
||||
}
|
||||
return user._id as mongoose.Types.ObjectId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a user with new data without overwriting existing properties.
|
||||
*/
|
||||
async function updateUser(userId: string, updateData: Partial<IUser>): Promise<IUser | null> {
|
||||
const User = mongoose.models.User;
|
||||
const updateOperation = {
|
||||
$set: updateData,
|
||||
$unset: { expiresAt: '' }, // Remove the expiresAt field to prevent TTL
|
||||
};
|
||||
return (await User.findByIdAndUpdate(userId, updateOperation, {
|
||||
new: true,
|
||||
runValidators: true,
|
||||
}).lean()) as IUser | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a user by ID and convert the found user document to a plain object.
|
||||
*/
|
||||
async function getUserById(
|
||||
userId: string,
|
||||
fieldsToSelect?: string | string[] | null,
|
||||
): Promise<IUser | null> {
|
||||
const User = mongoose.models.User;
|
||||
const query = User.findById(userId);
|
||||
if (fieldsToSelect) {
|
||||
query.select(fieldsToSelect);
|
||||
}
|
||||
return (await query.lean()) as IUser | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a user by their unique ID.
|
||||
*/
|
||||
async function deleteUserById(userId: string): Promise<UserUpdateResult> {
|
||||
try {
|
||||
const User = mongoose.models.User;
|
||||
const result = await User.deleteOne({ _id: userId });
|
||||
if (result.deletedCount === 0) {
|
||||
return { deletedCount: 0, message: 'No user found with that ID.' };
|
||||
}
|
||||
return { deletedCount: result.deletedCount, message: 'User was deleted successfully.' };
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new Error('Error deleting user: ' + errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a JWT token for a given user.
|
||||
*/
|
||||
async function generateToken(user: IUser): Promise<string> {
|
||||
if (!user) {
|
||||
throw new Error('No user provided');
|
||||
}
|
||||
|
||||
const expires = eval(process.env.SESSION_EXPIRY ?? '0') ?? 1000 * 60 * 15;
|
||||
|
||||
return await signPayload({
|
||||
payload: {
|
||||
id: user._id,
|
||||
username: user.username,
|
||||
provider: user.provider,
|
||||
email: user.email,
|
||||
},
|
||||
secret: process.env.JWT_SECRET,
|
||||
expirationTime: expires / 1000,
|
||||
});
|
||||
}
|
||||
|
||||
// Return all methods
|
||||
return {
|
||||
findUser,
|
||||
countUsers,
|
||||
createUser,
|
||||
updateUser,
|
||||
getUserById,
|
||||
deleteUserById,
|
||||
generateToken,
|
||||
};
|
||||
}
|
||||
|
||||
export type UserMethods = ReturnType<typeof createUserMethods>;
|
||||
9
packages/data-schemas/src/models/action.ts
Normal file
9
packages/data-schemas/src/models/action.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import actionSchema from '~/schema/action';
|
||||
import type { IAction } from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the Action model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createActionModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Action || mongoose.model<IAction>('Action', actionSchema);
|
||||
}
|
||||
9
packages/data-schemas/src/models/agent.ts
Normal file
9
packages/data-schemas/src/models/agent.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import agentSchema from '~/schema/agent';
|
||||
import type { IAgent } from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the Agent model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createAgentModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Agent || mongoose.model<IAgent>('Agent', agentSchema);
|
||||
}
|
||||
9
packages/data-schemas/src/models/assistant.ts
Normal file
9
packages/data-schemas/src/models/assistant.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import assistantSchema from '~/schema/assistant';
|
||||
import type { IAssistant } from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the Assistant model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createAssistantModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Assistant || mongoose.model<IAssistant>('Assistant', assistantSchema);
|
||||
}
|
||||
9
packages/data-schemas/src/models/balance.ts
Normal file
9
packages/data-schemas/src/models/balance.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import balanceSchema from '~/schema/balance';
|
||||
import type * as t from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the Balance model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createBalanceModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Balance || mongoose.model<t.IBalance>('Balance', balanceSchema);
|
||||
}
|
||||
9
packages/data-schemas/src/models/banner.ts
Normal file
9
packages/data-schemas/src/models/banner.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import bannerSchema from '~/schema/banner';
|
||||
import type { IBanner } from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the Banner model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createBannerModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Banner || mongoose.model<IBanner>('Banner', bannerSchema);
|
||||
}
|
||||
11
packages/data-schemas/src/models/conversationTag.ts
Normal file
11
packages/data-schemas/src/models/conversationTag.ts
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import conversationTagSchema, { IConversationTag } from '~/schema/conversationTag';
|
||||
|
||||
/**
|
||||
* Creates or returns the ConversationTag model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createConversationTagModel(mongoose: typeof import('mongoose')) {
|
||||
return (
|
||||
mongoose.models.ConversationTag ||
|
||||
mongoose.model<IConversationTag>('ConversationTag', conversationTagSchema)
|
||||
);
|
||||
}
|
||||
11
packages/data-schemas/src/models/convo.ts
Normal file
11
packages/data-schemas/src/models/convo.ts
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import type * as t from '~/types';
|
||||
import convoSchema from '~/schema/convo';
|
||||
|
||||
/**
|
||||
* Creates or returns the Conversation model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createConversationModel(mongoose: typeof import('mongoose')) {
|
||||
return (
|
||||
mongoose.models.Conversation || mongoose.model<t.IConversation>('Conversation', convoSchema)
|
||||
);
|
||||
}
|
||||
9
packages/data-schemas/src/models/file.ts
Normal file
9
packages/data-schemas/src/models/file.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import fileSchema from '~/schema/file';
|
||||
import type { IMongoFile } from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the File model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createFileModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.File || mongoose.model<IMongoFile>('File', fileSchema);
|
||||
}
|
||||
52
packages/data-schemas/src/models/index.ts
Normal file
52
packages/data-schemas/src/models/index.ts
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
import { createUserModel } from './user';
|
||||
import { createTokenModel } from './token';
|
||||
import { createSessionModel } from './session';
|
||||
import { createBalanceModel } from './balance';
|
||||
import { createConversationModel } from './convo';
|
||||
import { createMessageModel } from './message';
|
||||
import { createAgentModel } from './agent';
|
||||
import { createRoleModel } from './role';
|
||||
import { createActionModel } from './action';
|
||||
import { createAssistantModel } from './assistant';
|
||||
import { createFileModel } from './file';
|
||||
import { createBannerModel } from './banner';
|
||||
import { createProjectModel } from './project';
|
||||
import { createKeyModel } from './key';
|
||||
import { createPluginAuthModel } from './pluginAuth';
|
||||
import { createTransactionModel } from './transaction';
|
||||
import { createPresetModel } from './preset';
|
||||
import { createPromptModel } from './prompt';
|
||||
import { createPromptGroupModel } from './promptGroup';
|
||||
import { createConversationTagModel } from './conversationTag';
|
||||
import { createSharedLinkModel } from './sharedLink';
|
||||
import { createToolCallModel } from './toolCall';
|
||||
|
||||
/**
|
||||
* Creates all database models for all collections
|
||||
*/
|
||||
export function createModels(mongoose: typeof import('mongoose')) {
|
||||
return {
|
||||
User: createUserModel(mongoose),
|
||||
Token: createTokenModel(mongoose),
|
||||
Session: createSessionModel(mongoose),
|
||||
Balance: createBalanceModel(mongoose),
|
||||
Conversation: createConversationModel(mongoose),
|
||||
Message: createMessageModel(mongoose),
|
||||
Agent: createAgentModel(mongoose),
|
||||
Role: createRoleModel(mongoose),
|
||||
Action: createActionModel(mongoose),
|
||||
Assistant: createAssistantModel(mongoose),
|
||||
File: createFileModel(mongoose),
|
||||
Banner: createBannerModel(mongoose),
|
||||
Project: createProjectModel(mongoose),
|
||||
Key: createKeyModel(mongoose),
|
||||
PluginAuth: createPluginAuthModel(mongoose),
|
||||
Transaction: createTransactionModel(mongoose),
|
||||
Preset: createPresetModel(mongoose),
|
||||
Prompt: createPromptModel(mongoose),
|
||||
PromptGroup: createPromptGroupModel(mongoose),
|
||||
ConversationTag: createConversationTagModel(mongoose),
|
||||
SharedLink: createSharedLinkModel(mongoose),
|
||||
ToolCall: createToolCallModel(mongoose),
|
||||
};
|
||||
}
|
||||
8
packages/data-schemas/src/models/key.ts
Normal file
8
packages/data-schemas/src/models/key.ts
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import keySchema, { IKey } from '~/schema/key';
|
||||
|
||||
/**
|
||||
* Creates or returns the Key model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createKeyModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Key || mongoose.model<IKey>('Key', keySchema);
|
||||
}
|
||||
9
packages/data-schemas/src/models/message.ts
Normal file
9
packages/data-schemas/src/models/message.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import messageSchema from '~/schema/message';
|
||||
import type * as t from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the Message model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createMessageModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Message || mongoose.model<t.IMessage>('Message', messageSchema);
|
||||
}
|
||||
8
packages/data-schemas/src/models/pluginAuth.ts
Normal file
8
packages/data-schemas/src/models/pluginAuth.ts
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import pluginAuthSchema, { IPluginAuth } from '~/schema/pluginAuth';
|
||||
|
||||
/**
|
||||
* Creates or returns the PluginAuth model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createPluginAuthModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.PluginAuth || mongoose.model<IPluginAuth>('PluginAuth', pluginAuthSchema);
|
||||
}
|
||||
515
packages/data-schemas/src/models/plugins/mongoMeili.ts
Normal file
515
packages/data-schemas/src/models/plugins/mongoMeili.ts
Normal file
|
|
@ -0,0 +1,515 @@
|
|||
import _ from 'lodash';
|
||||
import { MeiliSearch, Index } from 'meilisearch';
|
||||
import mongoose, { Schema, Document, Model, Query } from 'mongoose';
|
||||
import logger from '~/config/meiliLogger';
|
||||
|
||||
interface MongoMeiliOptions {
|
||||
host: string;
|
||||
apiKey: string;
|
||||
indexName: string;
|
||||
primaryKey: string;
|
||||
}
|
||||
|
||||
interface MeiliIndexable {
|
||||
[key: string]: unknown;
|
||||
_meiliIndex?: boolean;
|
||||
}
|
||||
|
||||
interface ContentItem {
|
||||
type: string;
|
||||
text?: string;
|
||||
}
|
||||
|
||||
interface DocumentWithMeiliIndex extends Document {
|
||||
_meiliIndex?: boolean;
|
||||
preprocessObjectForIndex?: () => Record<string, unknown>;
|
||||
addObjectToMeili?: () => Promise<void>;
|
||||
updateObjectToMeili?: () => Promise<void>;
|
||||
deleteObjectFromMeili?: () => Promise<void>;
|
||||
postSaveHook?: () => void;
|
||||
postUpdateHook?: () => void;
|
||||
postRemoveHook?: () => void;
|
||||
conversationId?: string;
|
||||
content?: ContentItem[];
|
||||
messageId?: string;
|
||||
unfinished?: boolean;
|
||||
messages?: unknown[];
|
||||
title?: string;
|
||||
toJSON(): Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface SchemaWithMeiliMethods extends Model<DocumentWithMeiliIndex> {
|
||||
syncWithMeili(): Promise<void>;
|
||||
setMeiliIndexSettings(settings: Record<string, unknown>): Promise<unknown>;
|
||||
meiliSearch(q: string, params: Record<string, unknown>, populate: boolean): Promise<unknown>;
|
||||
}
|
||||
|
||||
// Environment flags
|
||||
/**
|
||||
* Flag to indicate if search is enabled based on environment variables.
|
||||
*/
|
||||
const searchEnabled = process.env.SEARCH != null && process.env.SEARCH.toLowerCase() === 'true';
|
||||
|
||||
/**
|
||||
* Flag to indicate if MeiliSearch is enabled based on required environment variables.
|
||||
*/
|
||||
const meiliEnabled =
|
||||
process.env.MEILI_HOST != null && process.env.MEILI_MASTER_KEY != null && searchEnabled;
|
||||
|
||||
/**
|
||||
* Local implementation of parseTextParts to avoid dependency on librechat-data-provider
|
||||
* Extracts text content from an array of content items
|
||||
*/
|
||||
const parseTextParts = (content: ContentItem[]): string => {
|
||||
if (!Array.isArray(content)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return content
|
||||
.filter((item) => item.type === 'text' && typeof item.text === 'string')
|
||||
.map((item) => item.text)
|
||||
.join(' ')
|
||||
.trim();
|
||||
};
|
||||
|
||||
/**
|
||||
* Local implementation to handle Bing convoId conversion
|
||||
*/
|
||||
const cleanUpPrimaryKeyValue = (value: string): string => {
|
||||
return value.replace(/--/g, '|');
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates the required options for configuring the mongoMeili plugin.
|
||||
*/
|
||||
const validateOptions = (options: Partial<MongoMeiliOptions>): void => {
|
||||
const requiredKeys: (keyof MongoMeiliOptions)[] = ['host', 'apiKey', 'indexName'];
|
||||
requiredKeys.forEach((key) => {
|
||||
if (!options[key]) {
|
||||
throw new Error(`Missing mongoMeili Option: ${key}`);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Factory function to create a MeiliMongooseModel class which extends a Mongoose model.
|
||||
* This class contains static and instance methods to synchronize and manage the MeiliSearch index
|
||||
* corresponding to the MongoDB collection.
|
||||
*
|
||||
* @param config - Configuration object.
|
||||
* @param config.index - The MeiliSearch index object.
|
||||
* @param config.attributesToIndex - List of attributes to index.
|
||||
* @returns A class definition that will be loaded into the Mongoose schema.
|
||||
*/
|
||||
const createMeiliMongooseModel = ({
|
||||
index,
|
||||
attributesToIndex,
|
||||
}: {
|
||||
index: Index<MeiliIndexable>;
|
||||
attributesToIndex: string[];
|
||||
}) => {
|
||||
const primaryKey = attributesToIndex[0];
|
||||
|
||||
class MeiliMongooseModel {
|
||||
/**
|
||||
* Synchronizes the data between the MongoDB collection and the MeiliSearch index.
|
||||
*
|
||||
* The synchronization process involves:
|
||||
* 1. Fetching all documents from the MongoDB collection and MeiliSearch index.
|
||||
* 2. Comparing documents from both sources.
|
||||
* 3. Deleting documents from MeiliSearch that no longer exist in MongoDB.
|
||||
* 4. Adding documents to MeiliSearch that exist in MongoDB but not in the index.
|
||||
* 5. Updating documents in MeiliSearch if key fields (such as `text` or `title`) differ.
|
||||
* 6. Updating the `_meiliIndex` field in MongoDB to indicate the indexing status.
|
||||
*
|
||||
* Note: The function processes documents in batches because MeiliSearch's
|
||||
* `index.getDocuments` requires an exact limit and `index.addDocuments` does not handle
|
||||
* partial failures in a batch.
|
||||
*
|
||||
* @returns {Promise<void>} Resolves when the synchronization is complete.
|
||||
*/
|
||||
static async syncWithMeili(this: SchemaWithMeiliMethods): Promise<void> {
|
||||
try {
|
||||
let moreDocuments = true;
|
||||
const mongoDocuments = await this.find().lean();
|
||||
|
||||
const format = (doc: Record<string, unknown>) =>
|
||||
_.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
|
||||
|
||||
const mongoMap = new Map(
|
||||
mongoDocuments.map((doc) => {
|
||||
const typedDoc = doc as Record<string, unknown>;
|
||||
return [typedDoc[primaryKey], format(typedDoc)];
|
||||
}),
|
||||
);
|
||||
const indexMap = new Map<unknown, Record<string, unknown>>();
|
||||
let offset = 0;
|
||||
const batchSize = 1000;
|
||||
|
||||
while (moreDocuments) {
|
||||
const batch = await index.getDocuments({ limit: batchSize, offset });
|
||||
if (batch.results.length === 0) {
|
||||
moreDocuments = false;
|
||||
}
|
||||
for (const doc of batch.results) {
|
||||
indexMap.set(doc[primaryKey], format(doc));
|
||||
}
|
||||
offset += batchSize;
|
||||
}
|
||||
|
||||
logger.debug('[syncWithMeili]', { indexMap: indexMap.size, mongoMap: mongoMap.size });
|
||||
|
||||
const updateOps: Array<{
|
||||
updateOne: {
|
||||
filter: Record<string, unknown>;
|
||||
update: { $set: { _meiliIndex: boolean } };
|
||||
};
|
||||
}> = [];
|
||||
|
||||
// Process documents present in the MeiliSearch index
|
||||
for (const [id, doc] of indexMap) {
|
||||
const update: Record<string, unknown> = {};
|
||||
update[primaryKey] = id;
|
||||
if (mongoMap.has(id)) {
|
||||
const mongoDoc = mongoMap.get(id);
|
||||
if (
|
||||
(doc.text && doc.text !== mongoDoc?.text) ||
|
||||
(doc.title && doc.title !== mongoDoc?.title)
|
||||
) {
|
||||
logger.debug(
|
||||
`[syncWithMeili] ${id} had document discrepancy in ${
|
||||
doc.text ? 'text' : 'title'
|
||||
} field`,
|
||||
);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
await index.addDocuments([doc]);
|
||||
}
|
||||
} else {
|
||||
await index.deleteDocument(id as string);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Process documents present in MongoDB
|
||||
for (const [id, doc] of mongoMap) {
|
||||
const update: Record<string, unknown> = {};
|
||||
update[primaryKey] = id;
|
||||
if (!indexMap.has(id)) {
|
||||
await index.addDocuments([doc]);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
} else if (doc._meiliIndex === false) {
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (updateOps.length > 0) {
|
||||
await this.collection.bulkWrite(updateOps);
|
||||
logger.debug(
|
||||
`[syncWithMeili] Finished indexing ${
|
||||
primaryKey === 'messageId' ? 'messages' : 'conversations'
|
||||
}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[syncWithMeili] Error adding document to Meili', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates settings for the MeiliSearch index
|
||||
*/
|
||||
static async setMeiliIndexSettings(settings: Record<string, unknown>): Promise<unknown> {
|
||||
return await index.updateSettings(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the MeiliSearch index and optionally populates results
|
||||
*/
|
||||
static async meiliSearch(
|
||||
this: SchemaWithMeiliMethods,
|
||||
q: string,
|
||||
params: Record<string, unknown>,
|
||||
populate: boolean,
|
||||
): Promise<unknown> {
|
||||
const data = await index.search(q, params);
|
||||
|
||||
if (populate) {
|
||||
const query: Record<string, unknown> = {};
|
||||
query[primaryKey] = _.map(data.hits, (hit) =>
|
||||
cleanUpPrimaryKeyValue(hit[primaryKey] as string),
|
||||
);
|
||||
|
||||
const projection = Object.keys(this.schema.obj).reduce<Record<string, number>>(
|
||||
(results, key) => {
|
||||
if (!key.startsWith('$')) {
|
||||
results[key] = 1;
|
||||
}
|
||||
return results;
|
||||
},
|
||||
{ _id: 1, __v: 1 },
|
||||
);
|
||||
|
||||
const hitsFromMongoose = await this.find(query, projection).lean();
|
||||
|
||||
const populatedHits = data.hits.map((hit) => {
|
||||
const queryObj: Record<string, unknown> = {};
|
||||
queryObj[primaryKey] = hit[primaryKey];
|
||||
const originalHit = _.find(hitsFromMongoose, (item) => {
|
||||
const typedItem = item as Record<string, unknown>;
|
||||
return typedItem[primaryKey] === hit[primaryKey];
|
||||
});
|
||||
|
||||
return {
|
||||
...(originalHit && typeof originalHit === 'object' ? originalHit : {}),
|
||||
...hit,
|
||||
};
|
||||
});
|
||||
data.hits = populatedHits;
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocesses the current document for indexing
|
||||
*/
|
||||
preprocessObjectForIndex(this: DocumentWithMeiliIndex): Record<string, unknown> {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
|
||||
if (
|
||||
object.conversationId &&
|
||||
typeof object.conversationId === 'string' &&
|
||||
object.conversationId.includes('|')
|
||||
) {
|
||||
object.conversationId = object.conversationId.replace(/\|/g, '--');
|
||||
}
|
||||
|
||||
if (object.content && Array.isArray(object.content)) {
|
||||
object.text = parseTextParts(object.content);
|
||||
delete object.content;
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the current document to the MeiliSearch index
|
||||
*/
|
||||
async addObjectToMeili(this: DocumentWithMeiliIndex): Promise<void> {
|
||||
const object = this.preprocessObjectForIndex!();
|
||||
try {
|
||||
await index.addDocuments([object]);
|
||||
} catch (error) {
|
||||
logger.error('[addObjectToMeili] Error adding document to Meili', error);
|
||||
}
|
||||
|
||||
await this.collection.updateMany(
|
||||
{ _id: this._id as mongoose.Types.ObjectId },
|
||||
{ $set: { _meiliIndex: true } },
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the current document in the MeiliSearch index
|
||||
*/
|
||||
async updateObjectToMeili(this: DocumentWithMeiliIndex): Promise<void> {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
await index.updateDocuments([object]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the current document from the MeiliSearch index.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async deleteObjectFromMeili(this: DocumentWithMeiliIndex): Promise<void> {
|
||||
await index.deleteDocument(this._id as string);
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-save hook to synchronize the document with MeiliSearch.
|
||||
*
|
||||
* If the document is already indexed (i.e. `_meiliIndex` is true), it updates it;
|
||||
* otherwise, it adds the document to the index.
|
||||
*/
|
||||
postSaveHook(this: DocumentWithMeiliIndex): void {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili!();
|
||||
} else {
|
||||
this.addObjectToMeili!();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-update hook to update the document in MeiliSearch.
|
||||
*
|
||||
* This hook is triggered after a document update, ensuring that changes are
|
||||
* propagated to the MeiliSearch index if the document is indexed.
|
||||
*/
|
||||
postUpdateHook(this: DocumentWithMeiliIndex): void {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili!();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post-remove hook to delete the document from MeiliSearch.
|
||||
*
|
||||
* This hook is triggered after a document is removed, ensuring that the document
|
||||
* is also removed from the MeiliSearch index if it was previously indexed.
|
||||
*/
|
||||
postRemoveHook(this: DocumentWithMeiliIndex): void {
|
||||
if (this._meiliIndex) {
|
||||
this.deleteObjectFromMeili!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return MeiliMongooseModel;
|
||||
};
|
||||
|
||||
/**
|
||||
* Mongoose plugin to synchronize MongoDB collections with a MeiliSearch index.
|
||||
*
|
||||
* This plugin:
|
||||
* - Validates the provided options.
|
||||
* - Adds a `_meiliIndex` field to the schema to track indexing status.
|
||||
* - Sets up a MeiliSearch client and creates an index if it doesn't already exist.
|
||||
* - Loads class methods for syncing, searching, and managing documents in MeiliSearch.
|
||||
* - Registers Mongoose hooks (post-save, post-update, post-remove, etc.) to maintain index consistency.
|
||||
*
|
||||
* @param schema - The Mongoose schema to which the plugin is applied.
|
||||
* @param options - Configuration options.
|
||||
* @param options.host - The MeiliSearch host.
|
||||
* @param options.apiKey - The MeiliSearch API key.
|
||||
* @param options.indexName - The name of the MeiliSearch index.
|
||||
* @param options.primaryKey - The primary key field for indexing.
|
||||
*/
|
||||
export default function mongoMeili(schema: Schema, options: MongoMeiliOptions): void {
|
||||
validateOptions(options);
|
||||
|
||||
// Add _meiliIndex field to the schema to track if a document has been indexed in MeiliSearch.
|
||||
schema.add({
|
||||
_meiliIndex: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
select: false,
|
||||
default: false,
|
||||
},
|
||||
});
|
||||
|
||||
const { host, apiKey, indexName, primaryKey } = options;
|
||||
|
||||
const client = new MeiliSearch({ host, apiKey });
|
||||
client.createIndex(indexName, { primaryKey });
|
||||
const index = client.index<MeiliIndexable>(indexName);
|
||||
|
||||
// Collect attributes from the schema that should be indexed
|
||||
const attributesToIndex: string[] = [
|
||||
...Object.entries(schema.obj).reduce<string[]>((results, [key, value]) => {
|
||||
const schemaValue = value as { meiliIndex?: boolean };
|
||||
return schemaValue.meiliIndex ? [...results, key] : results;
|
||||
}, []),
|
||||
];
|
||||
|
||||
schema.loadClass(createMeiliMongooseModel({ index, attributesToIndex }));
|
||||
|
||||
// Register Mongoose hooks
|
||||
schema.post('save', function (doc: DocumentWithMeiliIndex) {
|
||||
doc.postSaveHook?.();
|
||||
});
|
||||
|
||||
schema.post('updateOne', function (doc: DocumentWithMeiliIndex) {
|
||||
doc.postUpdateHook?.();
|
||||
});
|
||||
|
||||
schema.post('deleteOne', function (doc: DocumentWithMeiliIndex) {
|
||||
doc.postRemoveHook?.();
|
||||
});
|
||||
|
||||
// Pre-deleteMany hook: remove corresponding documents from MeiliSearch when multiple documents are deleted.
|
||||
schema.pre('deleteMany', async function (next) {
|
||||
if (!meiliEnabled) {
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
const conditions = (this as Query<unknown, unknown>).getQuery();
|
||||
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) {
|
||||
const convoIndex = client.index('convos');
|
||||
const deletedConvos = await mongoose
|
||||
.model('Conversation')
|
||||
.find(conditions as mongoose.FilterQuery<unknown>)
|
||||
.lean();
|
||||
const promises = deletedConvos.map((convo: Record<string, unknown>) =>
|
||||
convoIndex.deleteDocument(convo.conversationId as string),
|
||||
);
|
||||
await Promise.all(promises);
|
||||
}
|
||||
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) {
|
||||
const messageIndex = client.index('messages');
|
||||
const deletedMessages = await mongoose
|
||||
.model('Message')
|
||||
.find(conditions as mongoose.FilterQuery<unknown>)
|
||||
.lean();
|
||||
const promises = deletedMessages.map((message: Record<string, unknown>) =>
|
||||
messageIndex.deleteDocument(message.messageId as string),
|
||||
);
|
||||
await Promise.all(promises);
|
||||
}
|
||||
return next();
|
||||
} catch (error) {
|
||||
if (meiliEnabled) {
|
||||
logger.error(
|
||||
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion. Next startup may be slow due to syncing.',
|
||||
error,
|
||||
);
|
||||
}
|
||||
return next();
|
||||
}
|
||||
});
|
||||
|
||||
// Post-findOneAndUpdate hook
|
||||
schema.post('findOneAndUpdate', async function (doc: DocumentWithMeiliIndex) {
|
||||
if (!meiliEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (doc.unfinished) {
|
||||
return;
|
||||
}
|
||||
|
||||
let meiliDoc: Record<string, unknown> | undefined;
|
||||
if (doc.messages) {
|
||||
try {
|
||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId as string);
|
||||
} catch (error: unknown) {
|
||||
logger.debug(
|
||||
'[MeiliMongooseModel.findOneAndUpdate] Convo not found in MeiliSearch and will index ' +
|
||||
doc.conversationId,
|
||||
error as Record<string, unknown>,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (meiliDoc && meiliDoc.title === doc.title) {
|
||||
return;
|
||||
}
|
||||
|
||||
doc.postSaveHook?.();
|
||||
});
|
||||
}
|
||||
8
packages/data-schemas/src/models/preset.ts
Normal file
8
packages/data-schemas/src/models/preset.ts
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import presetSchema, { IPreset } from '~/schema/preset';
|
||||
|
||||
/**
|
||||
* Creates or returns the Preset model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createPresetModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Preset || mongoose.model<IPreset>('Preset', presetSchema);
|
||||
}
|
||||
8
packages/data-schemas/src/models/project.ts
Normal file
8
packages/data-schemas/src/models/project.ts
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import projectSchema, { IMongoProject } from '~/schema/project';
|
||||
|
||||
/**
|
||||
* Creates or returns the Project model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createProjectModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Project || mongoose.model<IMongoProject>('Project', projectSchema);
|
||||
}
|
||||
8
packages/data-schemas/src/models/prompt.ts
Normal file
8
packages/data-schemas/src/models/prompt.ts
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import promptSchema, { IPrompt } from '~/schema/prompt';
|
||||
|
||||
/**
|
||||
* Creates or returns the Prompt model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createPromptModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Prompt || mongoose.model<IPrompt>('Prompt', promptSchema);
|
||||
}
|
||||
11
packages/data-schemas/src/models/promptGroup.ts
Normal file
11
packages/data-schemas/src/models/promptGroup.ts
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
import promptGroupSchema, { IPromptGroupDocument } from '~/schema/promptGroup';
|
||||
|
||||
/**
|
||||
* Creates or returns the PromptGroup model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createPromptGroupModel(mongoose: typeof import('mongoose')) {
|
||||
return (
|
||||
mongoose.models.PromptGroup ||
|
||||
mongoose.model<IPromptGroupDocument>('PromptGroup', promptGroupSchema)
|
||||
);
|
||||
}
|
||||
9
packages/data-schemas/src/models/role.ts
Normal file
9
packages/data-schemas/src/models/role.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import roleSchema from '~/schema/role';
|
||||
import type { IRole } from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the Role model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createRoleModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Role || mongoose.model<IRole>('Role', roleSchema);
|
||||
}
|
||||
9
packages/data-schemas/src/models/session.ts
Normal file
9
packages/data-schemas/src/models/session.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import sessionSchema from '~/schema/session';
|
||||
import type * as t from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the Session model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createSessionModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Session || mongoose.model<t.ISession>('Session', sessionSchema);
|
||||
}
|
||||
8
packages/data-schemas/src/models/sharedLink.ts
Normal file
8
packages/data-schemas/src/models/sharedLink.ts
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import shareSchema, { ISharedLink } from '~/schema/share';
|
||||
|
||||
/**
|
||||
* Creates or returns the SharedLink model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createSharedLinkModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.SharedLink || mongoose.model<ISharedLink>('SharedLink', shareSchema);
|
||||
}
|
||||
9
packages/data-schemas/src/models/token.ts
Normal file
9
packages/data-schemas/src/models/token.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import tokenSchema from '~/schema/token';
|
||||
import type * as t from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the Token model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createTokenModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.Token || mongoose.model<t.IToken>('Token', tokenSchema);
|
||||
}
|
||||
8
packages/data-schemas/src/models/toolCall.ts
Normal file
8
packages/data-schemas/src/models/toolCall.ts
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
import toolCallSchema, { IToolCallData } from '~/schema/toolCall';
|
||||
|
||||
/**
|
||||
* Creates or returns the ToolCall model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createToolCallModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.ToolCall || mongoose.model<IToolCallData>('ToolCall', toolCallSchema);
|
||||
}
|
||||
10
packages/data-schemas/src/models/transaction.ts
Normal file
10
packages/data-schemas/src/models/transaction.ts
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import transactionSchema, { ITransaction } from '~/schema/transaction';
|
||||
|
||||
/**
|
||||
* Creates or returns the Transaction model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createTransactionModel(mongoose: typeof import('mongoose')) {
|
||||
return (
|
||||
mongoose.models.Transaction || mongoose.model<ITransaction>('Transaction', transactionSchema)
|
||||
);
|
||||
}
|
||||
9
packages/data-schemas/src/models/user.ts
Normal file
9
packages/data-schemas/src/models/user.ts
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
import userSchema from '~/schema/user';
|
||||
import type * as t from '~/types';
|
||||
|
||||
/**
|
||||
* Creates or returns the User model using the provided mongoose instance and schema
|
||||
*/
|
||||
export function createUserModel(mongoose: typeof import('mongoose')) {
|
||||
return mongoose.models.User || mongoose.model<t.IUser>('User', userSchema);
|
||||
}
|
||||
|
|
@ -1,31 +1,5 @@
|
|||
import mongoose, { Schema, Document } from 'mongoose';
|
||||
|
||||
export interface IAction extends Document {
|
||||
user: mongoose.Types.ObjectId;
|
||||
action_id: string;
|
||||
type: string;
|
||||
settings?: unknown;
|
||||
agent_id?: string;
|
||||
assistant_id?: string;
|
||||
metadata: {
|
||||
api_key?: string;
|
||||
auth: {
|
||||
authorization_type?: string;
|
||||
custom_auth_header?: string;
|
||||
type: 'service_http' | 'oauth' | 'none';
|
||||
authorization_content_type?: string;
|
||||
authorization_url?: string;
|
||||
client_url?: string;
|
||||
scope?: string;
|
||||
token_exchange_method: 'default_post' | 'basic_auth_header' | null;
|
||||
};
|
||||
domain: string;
|
||||
privacy_policy_url?: string;
|
||||
raw_spec?: string;
|
||||
oauth_client_id?: string;
|
||||
oauth_client_secret?: string;
|
||||
};
|
||||
}
|
||||
import mongoose, { Schema } from 'mongoose';
|
||||
import type { IAction } from '~/types';
|
||||
|
||||
// Define the Auth sub-schema with type-safety.
|
||||
const AuthSchema = new Schema(
|
||||
|
|
|
|||
|
|
@ -1,33 +1,5 @@
|
|||
import { Schema, Document, Types } from 'mongoose';
|
||||
export interface IAgent extends Omit<Document, 'model'> {
|
||||
id: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
instructions?: string;
|
||||
avatar?: {
|
||||
filepath: string;
|
||||
source: string;
|
||||
};
|
||||
provider: string;
|
||||
model: string;
|
||||
model_parameters?: Record<string, unknown>;
|
||||
artifacts?: string;
|
||||
access_level?: number;
|
||||
recursion_limit?: number;
|
||||
tools?: string[];
|
||||
tool_kwargs?: Array<unknown>;
|
||||
actions?: string[];
|
||||
author: Types.ObjectId;
|
||||
authorName?: string;
|
||||
hide_sequential_outputs?: boolean;
|
||||
end_after_tools?: boolean;
|
||||
agent_ids?: string[];
|
||||
isCollaborative?: boolean;
|
||||
conversation_starters?: string[];
|
||||
tool_resources?: unknown;
|
||||
projectIds?: Types.ObjectId[];
|
||||
versions?: Omit<IAgent, 'versions'>[];
|
||||
}
|
||||
import { Schema } from 'mongoose';
|
||||
import type { IAgent } from '~/types';
|
||||
|
||||
const agentSchema = new Schema<IAgent>(
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,18 +1,5 @@
|
|||
import { Schema, Document, Types } from 'mongoose';
|
||||
|
||||
export interface IAssistant extends Document {
|
||||
user: Types.ObjectId;
|
||||
assistant_id: string;
|
||||
avatar?: {
|
||||
filepath: string;
|
||||
source: string;
|
||||
};
|
||||
conversation_starters?: string[];
|
||||
access_level?: number;
|
||||
file_ids?: string[];
|
||||
actions?: string[];
|
||||
append_current_datetime?: boolean;
|
||||
}
|
||||
import { Schema } from 'mongoose';
|
||||
import type { IAssistant } from '~/types';
|
||||
|
||||
const assistantSchema = new Schema<IAssistant>(
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,17 +1,7 @@
|
|||
import { Schema, Document, Types } from 'mongoose';
|
||||
import { Schema } from 'mongoose';
|
||||
import type * as t from '~/types';
|
||||
|
||||
export interface IBalance extends Document {
|
||||
user: Types.ObjectId;
|
||||
tokenCredits: number;
|
||||
// Automatic refill settings
|
||||
autoRefillEnabled: boolean;
|
||||
refillIntervalValue: number;
|
||||
refillIntervalUnit: 'seconds' | 'minutes' | 'hours' | 'days' | 'weeks' | 'months';
|
||||
lastRefill: Date;
|
||||
refillAmount: number;
|
||||
}
|
||||
|
||||
const balanceSchema = new Schema<IBalance>({
|
||||
const balanceSchema = new Schema<t.IBalance>({
|
||||
user: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
|
|
|
|||
|
|
@ -1,57 +1,7 @@
|
|||
import mongoose, { Schema, Document, Types } from 'mongoose';
|
||||
import { Schema } from 'mongoose';
|
||||
import mongoMeili from '~/models/plugins/mongoMeili';
|
||||
import { conversationPreset } from './defaults';
|
||||
|
||||
// @ts-ignore
|
||||
export interface IConversation extends Document {
|
||||
conversationId: string;
|
||||
title?: string;
|
||||
user?: string;
|
||||
messages?: Types.ObjectId[];
|
||||
agentOptions?: unknown;
|
||||
// Fields provided by conversationPreset (adjust types as needed)
|
||||
endpoint?: string;
|
||||
endpointType?: string;
|
||||
model?: string;
|
||||
region?: string;
|
||||
chatGptLabel?: string;
|
||||
examples?: unknown[];
|
||||
modelLabel?: string;
|
||||
promptPrefix?: string;
|
||||
temperature?: number;
|
||||
top_p?: number;
|
||||
topP?: number;
|
||||
topK?: number;
|
||||
maxOutputTokens?: number;
|
||||
maxTokens?: number;
|
||||
presence_penalty?: number;
|
||||
frequency_penalty?: number;
|
||||
file_ids?: string[];
|
||||
resendImages?: boolean;
|
||||
promptCache?: boolean;
|
||||
thinking?: boolean;
|
||||
thinkingBudget?: number;
|
||||
system?: string;
|
||||
resendFiles?: boolean;
|
||||
imageDetail?: string;
|
||||
agent_id?: string;
|
||||
assistant_id?: string;
|
||||
instructions?: string;
|
||||
stop?: string[];
|
||||
isArchived?: boolean;
|
||||
iconURL?: string;
|
||||
greeting?: string;
|
||||
spec?: string;
|
||||
tags?: string[];
|
||||
tools?: string[];
|
||||
maxContextTokens?: number;
|
||||
max_tokens?: number;
|
||||
reasoning_effort?: string;
|
||||
// Additional fields
|
||||
files?: string[];
|
||||
expiredAt?: Date;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
import { IConversation } from '~/types';
|
||||
|
||||
const convoSchema: Schema<IConversation> = new Schema(
|
||||
{
|
||||
|
|
@ -71,9 +21,9 @@ const convoSchema: Schema<IConversation> = new Schema(
|
|||
type: String,
|
||||
index: true,
|
||||
},
|
||||
messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }],
|
||||
messages: [{ type: Schema.Types.ObjectId, ref: 'Message' }],
|
||||
agentOptions: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
type: Schema.Types.Mixed,
|
||||
},
|
||||
...conversationPreset,
|
||||
agent_id: {
|
||||
|
|
@ -98,4 +48,14 @@ convoSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
|||
convoSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
convoSchema.index({ conversationId: 1, user: 1 }, { unique: true });
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
convoSchema.plugin(mongoMeili, {
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
/** Note: Will get created automatically if it doesn't exist already */
|
||||
indexName: 'convos',
|
||||
primaryKey: 'conversationId',
|
||||
});
|
||||
}
|
||||
|
||||
export default convoSchema;
|
||||
|
|
|
|||
|
|
@ -1,32 +1,6 @@
|
|||
import mongoose, { Schema, Document, Types } from 'mongoose';
|
||||
import mongoose, { Schema } from 'mongoose';
|
||||
import { FileSources } from 'librechat-data-provider';
|
||||
|
||||
// @ts-ignore
|
||||
export interface IMongoFile extends Document {
|
||||
user: Types.ObjectId;
|
||||
conversationId?: string;
|
||||
file_id: string;
|
||||
temp_file_id?: string;
|
||||
bytes: number;
|
||||
text?: string;
|
||||
filename: string;
|
||||
filepath: string;
|
||||
object: 'file';
|
||||
embedded?: boolean;
|
||||
type: string;
|
||||
context?: string;
|
||||
usage: number;
|
||||
source: string;
|
||||
model?: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
metadata?: {
|
||||
fileIdentifier?: string;
|
||||
};
|
||||
expiresAt?: Date;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
import type { IMongoFile } from '~/types';
|
||||
|
||||
const file: Schema<IMongoFile> = new Schema(
|
||||
{
|
||||
|
|
|
|||
23
packages/data-schemas/src/schema/index.ts
Normal file
23
packages/data-schemas/src/schema/index.ts
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
export { default as actionSchema } from './action';
|
||||
export { default as agentSchema } from './agent';
|
||||
export { default as assistantSchema } from './assistant';
|
||||
export { default as balanceSchema } from './balance';
|
||||
export { default as bannerSchema } from './banner';
|
||||
export { default as categoriesSchema } from './categories';
|
||||
export { default as conversationTagSchema } from './conversationTag';
|
||||
export { default as convoSchema } from './convo';
|
||||
export { default as fileSchema } from './file';
|
||||
export { default as keySchema } from './key';
|
||||
export { default as messageSchema } from './message';
|
||||
export { default as pluginAuthSchema } from './pluginAuth';
|
||||
export { default as presetSchema } from './preset';
|
||||
export { default as projectSchema } from './project';
|
||||
export { default as promptSchema } from './prompt';
|
||||
export { default as promptGroupSchema } from './promptGroup';
|
||||
export { default as roleSchema } from './role';
|
||||
export { default as sessionSchema } from './session';
|
||||
export { default as shareSchema } from './share';
|
||||
export { default as tokenSchema } from './token';
|
||||
export { default as toolCallSchema } from './toolCall';
|
||||
export { default as transactionSchema } from './transaction';
|
||||
export { default as userSchema } from './user';
|
||||
|
|
@ -1,47 +1,6 @@
|
|||
import mongoose, { Schema, Document } from 'mongoose';
|
||||
import { TFeedbackRating, TFeedbackTag } from 'librechat-data-provider';
|
||||
|
||||
// @ts-ignore
|
||||
export interface IMessage extends Document {
|
||||
messageId: string;
|
||||
conversationId: string;
|
||||
user: string;
|
||||
model?: string;
|
||||
endpoint?: string;
|
||||
conversationSignature?: string;
|
||||
clientId?: string;
|
||||
invocationId?: number;
|
||||
parentMessageId?: string;
|
||||
tokenCount?: number;
|
||||
summaryTokenCount?: number;
|
||||
sender?: string;
|
||||
text?: string;
|
||||
summary?: string;
|
||||
isCreatedByUser: boolean;
|
||||
unfinished?: boolean;
|
||||
error?: boolean;
|
||||
finish_reason?: string;
|
||||
feedback?: {
|
||||
rating: TFeedbackRating;
|
||||
tag: TFeedbackTag | undefined;
|
||||
text?: string;
|
||||
};
|
||||
_meiliIndex?: boolean;
|
||||
files?: unknown[];
|
||||
plugin?: {
|
||||
latest?: string;
|
||||
inputs?: unknown[];
|
||||
outputs?: string;
|
||||
};
|
||||
plugins?: unknown[];
|
||||
content?: unknown[];
|
||||
thread_id?: string;
|
||||
iconURL?: string;
|
||||
attachments?: unknown[];
|
||||
expiredAt?: Date;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
import mongoose, { Schema } from 'mongoose';
|
||||
import type { IMessage } from '~/types/message';
|
||||
import mongoMeili from '~/models/plugins/mongoMeili';
|
||||
|
||||
const messageSchema: Schema<IMessage> = new Schema(
|
||||
{
|
||||
|
|
@ -207,4 +166,13 @@ messageSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
|||
messageSchema.index({ createdAt: 1 });
|
||||
messageSchema.index({ messageId: 1, user: 1 }, { unique: true });
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
messageSchema.plugin(mongoMeili, {
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
indexName: 'messages',
|
||||
primaryKey: 'messageId',
|
||||
});
|
||||
}
|
||||
|
||||
export default messageSchema;
|
||||
|
|
|
|||
|
|
@ -1,36 +1,6 @@
|
|||
import { Schema, Document } from 'mongoose';
|
||||
import { Schema } from 'mongoose';
|
||||
import { PermissionTypes, Permissions } from 'librechat-data-provider';
|
||||
|
||||
export interface IRole extends Document {
|
||||
name: string;
|
||||
permissions: {
|
||||
[PermissionTypes.BOOKMARKS]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.PROMPTS]?: {
|
||||
[Permissions.SHARED_GLOBAL]?: boolean;
|
||||
[Permissions.USE]?: boolean;
|
||||
[Permissions.CREATE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.AGENTS]?: {
|
||||
[Permissions.SHARED_GLOBAL]?: boolean;
|
||||
[Permissions.USE]?: boolean;
|
||||
[Permissions.CREATE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.MULTI_CONVO]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.TEMPORARY_CHAT]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.RUN_CODE]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.WEB_SEARCH]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
};
|
||||
}
|
||||
import type { IRole } from '~/types';
|
||||
|
||||
// Create a sub-schema for permissions. Notice we disable _id for this subdocument.
|
||||
const rolePermissionsSchema = new Schema(
|
||||
|
|
|
|||
|
|
@ -1,10 +1,5 @@
|
|||
import mongoose, { Schema, Document, Types } from 'mongoose';
|
||||
|
||||
export interface ISession extends Document {
|
||||
refreshTokenHash: string;
|
||||
expiration: Date;
|
||||
user: Types.ObjectId;
|
||||
}
|
||||
import mongoose, { Schema } from 'mongoose';
|
||||
import { ISession } from '~/types';
|
||||
|
||||
const sessionSchema: Schema<ISession> = new Schema({
|
||||
refreshTokenHash: {
|
||||
|
|
|
|||
|
|
@ -1,15 +1,5 @@
|
|||
import { Schema, Document, Types } from 'mongoose';
|
||||
|
||||
export interface IToken extends Document {
|
||||
userId: Types.ObjectId;
|
||||
email?: string;
|
||||
type?: string;
|
||||
identifier?: string;
|
||||
token: string;
|
||||
createdAt: Date;
|
||||
expiresAt: Date;
|
||||
metadata?: Map<string, unknown>;
|
||||
}
|
||||
import { Schema } from 'mongoose';
|
||||
import { IToken } from '~/types';
|
||||
|
||||
const tokenSchema: Schema<IToken> = new Schema({
|
||||
userId: {
|
||||
|
|
|
|||
|
|
@ -1,39 +1,6 @@
|
|||
import { Schema, Document } from 'mongoose';
|
||||
import { Schema } from 'mongoose';
|
||||
import { SystemRoles } from 'librechat-data-provider';
|
||||
|
||||
export interface IUser extends Document {
|
||||
name?: string;
|
||||
username?: string;
|
||||
email: string;
|
||||
emailVerified: boolean;
|
||||
password?: string;
|
||||
avatar?: string;
|
||||
provider: string;
|
||||
role?: string;
|
||||
googleId?: string;
|
||||
facebookId?: string;
|
||||
openidId?: string;
|
||||
samlId?: string;
|
||||
ldapId?: string;
|
||||
githubId?: string;
|
||||
discordId?: string;
|
||||
appleId?: string;
|
||||
plugins?: unknown[];
|
||||
twoFactorEnabled?: boolean;
|
||||
totpSecret?: string;
|
||||
backupCodes?: Array<{
|
||||
codeHash: string;
|
||||
used: boolean;
|
||||
usedAt?: Date | null;
|
||||
}>;
|
||||
refreshToken?: Array<{
|
||||
refreshToken: string;
|
||||
}>;
|
||||
expiresAt?: Date;
|
||||
termsAccepted?: boolean;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
import { IUser } from '~/types';
|
||||
|
||||
// Session sub-schema
|
||||
const SessionSchema = new Schema(
|
||||
|
|
@ -56,7 +23,7 @@ const BackupCodeSchema = new Schema(
|
|||
{ _id: false },
|
||||
);
|
||||
|
||||
const User = new Schema<IUser>(
|
||||
const userSchema = new Schema<IUser>(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
|
|
@ -166,4 +133,4 @@ const User = new Schema<IUser>(
|
|||
{ timestamps: true },
|
||||
);
|
||||
|
||||
export default User;
|
||||
export default userSchema;
|
||||
|
|
|
|||
28
packages/data-schemas/src/types/action.ts
Normal file
28
packages/data-schemas/src/types/action.ts
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import mongoose, { Document } from 'mongoose';
|
||||
|
||||
export interface IAction extends Document {
|
||||
user: mongoose.Types.ObjectId;
|
||||
action_id: string;
|
||||
type: string;
|
||||
settings?: unknown;
|
||||
agent_id?: string;
|
||||
assistant_id?: string;
|
||||
metadata: {
|
||||
api_key?: string;
|
||||
auth: {
|
||||
authorization_type?: string;
|
||||
custom_auth_header?: string;
|
||||
type: 'service_http' | 'oauth' | 'none';
|
||||
authorization_content_type?: string;
|
||||
authorization_url?: string;
|
||||
client_url?: string;
|
||||
scope?: string;
|
||||
token_exchange_method: 'default_post' | 'basic_auth_header' | null;
|
||||
};
|
||||
domain: string;
|
||||
privacy_policy_url?: string;
|
||||
raw_spec?: string;
|
||||
oauth_client_id?: string;
|
||||
oauth_client_secret?: string;
|
||||
};
|
||||
}
|
||||
31
packages/data-schemas/src/types/agent.ts
Normal file
31
packages/data-schemas/src/types/agent.ts
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import { Document, Types } from 'mongoose';
|
||||
|
||||
export interface IAgent extends Omit<Document, 'model'> {
|
||||
id: string;
|
||||
name?: string;
|
||||
description?: string;
|
||||
instructions?: string;
|
||||
avatar?: {
|
||||
filepath: string;
|
||||
source: string;
|
||||
};
|
||||
provider: string;
|
||||
model: string;
|
||||
model_parameters?: Record<string, unknown>;
|
||||
artifacts?: string;
|
||||
access_level?: number;
|
||||
recursion_limit?: number;
|
||||
tools?: string[];
|
||||
tool_kwargs?: Array<unknown>;
|
||||
actions?: string[];
|
||||
author: Types.ObjectId;
|
||||
authorName?: string;
|
||||
hide_sequential_outputs?: boolean;
|
||||
end_after_tools?: boolean;
|
||||
agent_ids?: string[];
|
||||
isCollaborative?: boolean;
|
||||
conversation_starters?: string[];
|
||||
tool_resources?: unknown;
|
||||
projectIds?: Types.ObjectId[];
|
||||
versions?: Omit<IAgent, 'versions'>[];
|
||||
}
|
||||
15
packages/data-schemas/src/types/assistant.ts
Normal file
15
packages/data-schemas/src/types/assistant.ts
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
import { Document, Types } from 'mongoose';
|
||||
|
||||
export interface IAssistant extends Document {
|
||||
user: Types.ObjectId;
|
||||
assistant_id: string;
|
||||
avatar?: {
|
||||
filepath: string;
|
||||
source: string;
|
||||
};
|
||||
conversation_starters?: string[];
|
||||
access_level?: number;
|
||||
file_ids?: string[];
|
||||
actions?: string[];
|
||||
append_current_datetime?: boolean;
|
||||
}
|
||||
12
packages/data-schemas/src/types/balance.ts
Normal file
12
packages/data-schemas/src/types/balance.ts
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import { Document, Types } from 'mongoose';
|
||||
|
||||
export interface IBalance extends Document {
|
||||
user: Types.ObjectId;
|
||||
tokenCredits: number;
|
||||
// Automatic refill settings
|
||||
autoRefillEnabled: boolean;
|
||||
refillIntervalValue: number;
|
||||
refillIntervalUnit: 'seconds' | 'minutes' | 'hours' | 'days' | 'weeks' | 'months';
|
||||
lastRefill: Date;
|
||||
refillAmount: number;
|
||||
}
|
||||
10
packages/data-schemas/src/types/banner.ts
Normal file
10
packages/data-schemas/src/types/banner.ts
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
import type { Document } from 'mongoose';
|
||||
|
||||
export interface IBanner extends Document {
|
||||
bannerId: string;
|
||||
message: string;
|
||||
displayFrom: Date;
|
||||
displayTo?: Date;
|
||||
type: 'banner' | 'popup';
|
||||
isPublic: boolean;
|
||||
}
|
||||
53
packages/data-schemas/src/types/convo.ts
Normal file
53
packages/data-schemas/src/types/convo.ts
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
import type { Document, Types } from 'mongoose';
|
||||
|
||||
// @ts-ignore
|
||||
export interface IConversation extends Document {
|
||||
conversationId: string;
|
||||
title?: string;
|
||||
user?: string;
|
||||
messages?: Types.ObjectId[];
|
||||
agentOptions?: unknown;
|
||||
// Fields provided by conversationPreset (adjust types as needed)
|
||||
endpoint?: string;
|
||||
endpointType?: string;
|
||||
model?: string;
|
||||
region?: string;
|
||||
chatGptLabel?: string;
|
||||
examples?: unknown[];
|
||||
modelLabel?: string;
|
||||
promptPrefix?: string;
|
||||
temperature?: number;
|
||||
top_p?: number;
|
||||
topP?: number;
|
||||
topK?: number;
|
||||
maxOutputTokens?: number;
|
||||
maxTokens?: number;
|
||||
presence_penalty?: number;
|
||||
frequency_penalty?: number;
|
||||
file_ids?: string[];
|
||||
resendImages?: boolean;
|
||||
promptCache?: boolean;
|
||||
thinking?: boolean;
|
||||
thinkingBudget?: number;
|
||||
system?: string;
|
||||
resendFiles?: boolean;
|
||||
imageDetail?: string;
|
||||
agent_id?: string;
|
||||
assistant_id?: string;
|
||||
instructions?: string;
|
||||
stop?: string[];
|
||||
isArchived?: boolean;
|
||||
iconURL?: string;
|
||||
greeting?: string;
|
||||
spec?: string;
|
||||
tags?: string[];
|
||||
tools?: string[];
|
||||
maxContextTokens?: number;
|
||||
max_tokens?: number;
|
||||
reasoning_effort?: string;
|
||||
// Additional fields
|
||||
files?: string[];
|
||||
expiredAt?: Date;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
27
packages/data-schemas/src/types/file.ts
Normal file
27
packages/data-schemas/src/types/file.ts
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
import { Document, Types } from 'mongoose';
|
||||
|
||||
export interface IMongoFile extends Omit<Document, 'model'> {
|
||||
user: Types.ObjectId;
|
||||
conversationId?: string;
|
||||
file_id: string;
|
||||
temp_file_id?: string;
|
||||
bytes: number;
|
||||
text?: string;
|
||||
filename: string;
|
||||
filepath: string;
|
||||
object: 'file';
|
||||
embedded?: boolean;
|
||||
type: string;
|
||||
context?: string;
|
||||
usage: number;
|
||||
source: string;
|
||||
model?: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
metadata?: {
|
||||
fileIdentifier?: string;
|
||||
};
|
||||
expiresAt?: Date;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
12
packages/data-schemas/src/types/index.ts
Normal file
12
packages/data-schemas/src/types/index.ts
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
export * from './user';
|
||||
export * from './token';
|
||||
export * from './convo';
|
||||
export * from './session';
|
||||
export * from './balance';
|
||||
export * from './banner';
|
||||
export * from './message';
|
||||
export * from './agent';
|
||||
export * from './role';
|
||||
export * from './action';
|
||||
export * from './assistant';
|
||||
export * from './file';
|
||||
44
packages/data-schemas/src/types/message.ts
Normal file
44
packages/data-schemas/src/types/message.ts
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import type { Document } from 'mongoose';
|
||||
import type { TFeedbackRating, TFeedbackTag } from 'librechat-data-provider';
|
||||
|
||||
// @ts-ignore
|
||||
export interface IMessage extends Document {
|
||||
messageId: string;
|
||||
conversationId: string;
|
||||
user: string;
|
||||
model?: string;
|
||||
endpoint?: string;
|
||||
conversationSignature?: string;
|
||||
clientId?: string;
|
||||
invocationId?: number;
|
||||
parentMessageId?: string;
|
||||
tokenCount?: number;
|
||||
summaryTokenCount?: number;
|
||||
sender?: string;
|
||||
text?: string;
|
||||
summary?: string;
|
||||
isCreatedByUser: boolean;
|
||||
unfinished?: boolean;
|
||||
error?: boolean;
|
||||
finish_reason?: string;
|
||||
feedback?: {
|
||||
rating: TFeedbackRating;
|
||||
tag: TFeedbackTag | undefined;
|
||||
text?: string;
|
||||
};
|
||||
_meiliIndex?: boolean;
|
||||
files?: unknown[];
|
||||
plugin?: {
|
||||
latest?: string;
|
||||
inputs?: unknown[];
|
||||
outputs?: string;
|
||||
};
|
||||
plugins?: unknown[];
|
||||
content?: unknown[];
|
||||
thread_id?: string;
|
||||
iconURL?: string;
|
||||
attachments?: unknown[];
|
||||
expiredAt?: Date;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
33
packages/data-schemas/src/types/role.ts
Normal file
33
packages/data-schemas/src/types/role.ts
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
import { Document } from 'mongoose';
|
||||
import { PermissionTypes, Permissions } from 'librechat-data-provider';
|
||||
|
||||
export interface IRole extends Document {
|
||||
name: string;
|
||||
permissions: {
|
||||
[PermissionTypes.BOOKMARKS]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.PROMPTS]?: {
|
||||
[Permissions.SHARED_GLOBAL]?: boolean;
|
||||
[Permissions.USE]?: boolean;
|
||||
[Permissions.CREATE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.AGENTS]?: {
|
||||
[Permissions.SHARED_GLOBAL]?: boolean;
|
||||
[Permissions.USE]?: boolean;
|
||||
[Permissions.CREATE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.MULTI_CONVO]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.TEMPORARY_CHAT]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.RUN_CODE]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
[PermissionTypes.WEB_SEARCH]?: {
|
||||
[Permissions.USE]?: boolean;
|
||||
};
|
||||
};
|
||||
}
|
||||
42
packages/data-schemas/src/types/session.ts
Normal file
42
packages/data-schemas/src/types/session.ts
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import type { Document, Types } from 'mongoose';
|
||||
|
||||
export interface ISession extends Document {
|
||||
refreshTokenHash: string;
|
||||
expiration: Date;
|
||||
user: Types.ObjectId;
|
||||
}
|
||||
|
||||
export interface CreateSessionOptions {
|
||||
expiration?: Date;
|
||||
}
|
||||
|
||||
export interface SessionSearchParams {
|
||||
refreshToken?: string;
|
||||
userId?: string;
|
||||
sessionId?: string | { sessionId: string };
|
||||
}
|
||||
|
||||
export interface SessionQueryOptions {
|
||||
lean?: boolean;
|
||||
}
|
||||
|
||||
export interface DeleteSessionParams {
|
||||
refreshToken?: string;
|
||||
sessionId?: string;
|
||||
}
|
||||
|
||||
export interface DeleteAllSessionsOptions {
|
||||
excludeCurrentSession?: boolean;
|
||||
currentSessionId?: string;
|
||||
}
|
||||
|
||||
export interface SessionResult {
|
||||
session: Partial<ISession>;
|
||||
refreshToken: string;
|
||||
}
|
||||
|
||||
export interface SignPayloadParams {
|
||||
payload: Record<string, unknown>;
|
||||
secret?: string;
|
||||
expirationTime: number;
|
||||
}
|
||||
42
packages/data-schemas/src/types/token.ts
Normal file
42
packages/data-schemas/src/types/token.ts
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import { Document, Types } from 'mongoose';
|
||||
|
||||
export interface IToken extends Document {
|
||||
userId: Types.ObjectId;
|
||||
email?: string;
|
||||
type?: string;
|
||||
identifier?: string;
|
||||
token: string;
|
||||
createdAt: Date;
|
||||
expiresAt: Date;
|
||||
metadata?: Map<string, unknown>;
|
||||
}
|
||||
|
||||
export interface TokenCreateData {
|
||||
userId: Types.ObjectId | string;
|
||||
email?: string;
|
||||
type?: string;
|
||||
identifier?: string;
|
||||
token: string;
|
||||
expiresIn: number;
|
||||
metadata?: Map<string, unknown>;
|
||||
}
|
||||
|
||||
export interface TokenQuery {
|
||||
userId?: Types.ObjectId | string;
|
||||
token?: string;
|
||||
email?: string;
|
||||
identifier?: string;
|
||||
}
|
||||
|
||||
export interface TokenUpdateData {
|
||||
email?: string;
|
||||
type?: string;
|
||||
identifier?: string;
|
||||
token?: string;
|
||||
expiresAt?: Date;
|
||||
metadata?: Map<string, unknown>;
|
||||
}
|
||||
|
||||
export interface TokenDeleteResult {
|
||||
deletedCount?: number;
|
||||
}
|
||||
72
packages/data-schemas/src/types/user.ts
Normal file
72
packages/data-schemas/src/types/user.ts
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
import { Document, Types } from 'mongoose';
|
||||
|
||||
export interface IUser extends Document {
|
||||
name?: string;
|
||||
username?: string;
|
||||
email: string;
|
||||
emailVerified: boolean;
|
||||
password?: string;
|
||||
avatar?: string;
|
||||
provider: string;
|
||||
role?: string;
|
||||
googleId?: string;
|
||||
facebookId?: string;
|
||||
openidId?: string;
|
||||
samlId?: string;
|
||||
ldapId?: string;
|
||||
githubId?: string;
|
||||
discordId?: string;
|
||||
appleId?: string;
|
||||
plugins?: unknown[];
|
||||
twoFactorEnabled?: boolean;
|
||||
totpSecret?: string;
|
||||
backupCodes?: Array<{
|
||||
codeHash: string;
|
||||
used: boolean;
|
||||
usedAt?: Date | null;
|
||||
}>;
|
||||
refreshToken?: Array<{
|
||||
refreshToken: string;
|
||||
}>;
|
||||
expiresAt?: Date;
|
||||
termsAccepted?: boolean;
|
||||
createdAt?: Date;
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface BalanceConfig {
|
||||
enabled?: boolean;
|
||||
startBalance?: number;
|
||||
autoRefillEnabled?: boolean;
|
||||
refillIntervalValue?: number;
|
||||
refillIntervalUnit?: string;
|
||||
refillAmount?: number;
|
||||
}
|
||||
|
||||
export interface UserCreateData extends Partial<IUser> {
|
||||
email: string;
|
||||
}
|
||||
|
||||
export interface UserUpdateResult {
|
||||
deletedCount: number;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface UserSearchCriteria {
|
||||
email?: string;
|
||||
username?: string;
|
||||
googleId?: string;
|
||||
facebookId?: string;
|
||||
openidId?: string;
|
||||
samlId?: string;
|
||||
ldapId?: string;
|
||||
githubId?: string;
|
||||
discordId?: string;
|
||||
appleId?: string;
|
||||
_id?: Types.ObjectId | string;
|
||||
}
|
||||
|
||||
export interface UserQueryOptions {
|
||||
fieldsToSelect?: string | string[] | null;
|
||||
lean?: boolean;
|
||||
}
|
||||
|
|
@ -12,7 +12,11 @@
|
|||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"sourceMap": true
|
||||
"sourceMap": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"~/*": ["./src/*"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist", "tests"]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue