mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-09 12:08:50 +01:00
Merge branch 'main' into feat/user-groups
This commit is contained in:
commit
2fd04b6d65
232 changed files with 14368 additions and 5262 deletions
|
|
@ -1,5 +1,5 @@
|
|||
const mongoose = require('mongoose');
|
||||
const actionSchema = require('./schema/action');
|
||||
const { actionSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Action = mongoose.model('action', actionSchema);
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ const {
|
|||
removeAgentFromAllProjects,
|
||||
} = require('./Project');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const agentSchema = require('./schema/agent');
|
||||
const { agentSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Agent = mongoose.model('agent', agentSchema);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const mongoose = require('mongoose');
|
||||
const assistantSchema = require('./schema/assistant');
|
||||
const { assistantSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Assistant = mongoose.model('assistant', assistantSchema);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const mongoose = require('mongoose');
|
||||
const balanceSchema = require('./schema/balance');
|
||||
const { balanceSchema } = require('@librechat/data-schemas');
|
||||
const { getMultiplier } = require('./tx');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,9 @@
|
|||
const Banner = require('./schema/banner');
|
||||
const mongoose = require('mongoose');
|
||||
const logger = require('~/config/winston');
|
||||
const { bannerSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Banner = mongoose.model('Banner', bannerSchema);
|
||||
|
||||
/**
|
||||
* Retrieves the current active banner.
|
||||
* @returns {Promise<Object|null>} The active banner object or null if no active banner is found.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
const { logger } = require('~/config');
|
||||
// const { Categories } = require('./schema/categories');
|
||||
|
||||
const options = [
|
||||
{
|
||||
|
|
|
|||
|
|
@ -104,10 +104,16 @@ module.exports = {
|
|||
update.expiredAt = null;
|
||||
}
|
||||
|
||||
/** @type {{ $set: Partial<TConversation>; $unset?: Record<keyof TConversation, number> }} */
|
||||
const updateOperation = { $set: update };
|
||||
if (metadata && metadata.unsetFields && Object.keys(metadata.unsetFields).length > 0) {
|
||||
updateOperation.$unset = metadata.unsetFields;
|
||||
}
|
||||
|
||||
/** Note: the resulting Model object is necessary for Meilisearch operations */
|
||||
const conversation = await Conversation.findOneAndUpdate(
|
||||
{ conversationId, user: req.user.id },
|
||||
update,
|
||||
updateOperation,
|
||||
{
|
||||
new: true,
|
||||
upsert: true,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
const ConversationTag = require('./schema/conversationTagSchema');
|
||||
const mongoose = require('mongoose');
|
||||
const Conversation = require('./schema/convoSchema');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
const { conversationTagSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const ConversationTag = mongoose.model('ConversationTag', conversationTagSchema);
|
||||
|
||||
/**
|
||||
* Retrieves all conversation tags for a user.
|
||||
* @param {string} user - The user ID.
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const mongoose = require('mongoose');
|
||||
const fileSchema = require('./schema/fileSchema');
|
||||
const { fileSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const File = mongoose.model('File', fileSchema);
|
||||
|
||||
|
|
@ -7,7 +7,7 @@ const File = mongoose.model('File', fileSchema);
|
|||
* Finds a file by its file_id with additional query options.
|
||||
* @param {string} file_id - The unique identifier of the file.
|
||||
* @param {object} options - Query options for filtering, projection, etc.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the file document or null.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the file document or null.
|
||||
*/
|
||||
const findFileById = async (file_id, options = {}) => {
|
||||
return await File.findOne({ file_id, ...options }).lean();
|
||||
|
|
@ -17,7 +17,7 @@ const findFileById = async (file_id, options = {}) => {
|
|||
* Retrieves files matching a given filter, sorted by the most recently updated.
|
||||
* @param {Object} filter - The filter criteria to apply.
|
||||
* @param {Object} [_sortOptions] - Optional sort parameters.
|
||||
* @returns {Promise<Array<MongoFile>>} A promise that resolves to an array of file documents.
|
||||
* @returns {Promise<Array<IMongoFile>>} A promise that resolves to an array of file documents.
|
||||
*/
|
||||
const getFiles = async (filter, _sortOptions) => {
|
||||
const sortOptions = { updatedAt: -1, ..._sortOptions };
|
||||
|
|
@ -26,9 +26,9 @@ const getFiles = async (filter, _sortOptions) => {
|
|||
|
||||
/**
|
||||
* Creates a new file with a TTL of 1 hour.
|
||||
* @param {MongoFile} data - The file data to be created, must contain file_id.
|
||||
* @param {IMongoFile} data - The file data to be created, must contain file_id.
|
||||
* @param {boolean} disableTTL - Whether to disable the TTL.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the created file document.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the created file document.
|
||||
*/
|
||||
const createFile = async (data, disableTTL) => {
|
||||
const fileData = {
|
||||
|
|
@ -48,8 +48,8 @@ const createFile = async (data, disableTTL) => {
|
|||
|
||||
/**
|
||||
* Updates a file identified by file_id with new data and removes the TTL.
|
||||
* @param {MongoFile} data - The data to update, must contain file_id.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
|
||||
* @param {IMongoFile} data - The data to update, must contain file_id.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
|
||||
*/
|
||||
const updateFile = async (data) => {
|
||||
const { file_id, ...update } = data;
|
||||
|
|
@ -62,8 +62,8 @@ const updateFile = async (data) => {
|
|||
|
||||
/**
|
||||
* Increments the usage of a file identified by file_id.
|
||||
* @param {MongoFile} data - The data to update, must contain file_id and the increment value for usage.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the updated file document.
|
||||
* @param {IMongoFile} data - The data to update, must contain file_id and the increment value for usage.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the updated file document.
|
||||
*/
|
||||
const updateFileUsage = async (data) => {
|
||||
const { file_id, inc = 1 } = data;
|
||||
|
|
@ -77,7 +77,7 @@ const updateFileUsage = async (data) => {
|
|||
/**
|
||||
* Deletes a file identified by file_id.
|
||||
* @param {string} file_id - The unique identifier of the file to delete.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
|
||||
*/
|
||||
const deleteFile = async (file_id) => {
|
||||
return await File.findOneAndDelete({ file_id }).lean();
|
||||
|
|
@ -86,7 +86,7 @@ const deleteFile = async (file_id) => {
|
|||
/**
|
||||
* Deletes a file identified by a filter.
|
||||
* @param {object} filter - The filter criteria to apply.
|
||||
* @returns {Promise<MongoFile>} A promise that resolves to the deleted file document or null.
|
||||
* @returns {Promise<IMongoFile>} A promise that resolves to the deleted file document or null.
|
||||
*/
|
||||
const deleteFileByFilter = async (filter) => {
|
||||
return await File.findOneAndDelete(filter).lean();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
const mongoose = require('mongoose');
|
||||
const keySchema = require('./schema/key');
|
||||
const { keySchema } = require('@librechat/data-schemas');
|
||||
|
||||
module.exports = mongoose.model('Key', keySchema);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const { model } = require('mongoose');
|
||||
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
|
||||
const projectSchema = require('~/models/schema/projectSchema');
|
||||
const { projectSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Project = model('Project', projectSchema);
|
||||
|
||||
|
|
@ -9,7 +9,7 @@ const Project = model('Project', projectSchema);
|
|||
*
|
||||
* @param {string} projectId - The ID of the project to find and return as a plain object.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<MongoProject>} A plain object representing the project document, or `null` if no project is found.
|
||||
* @returns {Promise<IMongoProject>} A plain object representing the project document, or `null` if no project is found.
|
||||
*/
|
||||
const getProjectById = async function (projectId, fieldsToSelect = null) {
|
||||
const query = Project.findById(projectId);
|
||||
|
|
@ -27,7 +27,7 @@ const getProjectById = async function (projectId, fieldsToSelect = null) {
|
|||
*
|
||||
* @param {string} projectName - The name of the project to find or create.
|
||||
* @param {string|string[]} [fieldsToSelect] - The fields to include or exclude in the returned document.
|
||||
* @returns {Promise<MongoProject>} A plain object representing the project document.
|
||||
* @returns {Promise<IMongoProject>} A plain object representing the project document.
|
||||
*/
|
||||
const getProjectByName = async function (projectName, fieldsToSelect = null) {
|
||||
const query = { name: projectName };
|
||||
|
|
@ -47,7 +47,7 @@ const getProjectByName = async function (projectName, fieldsToSelect = null) {
|
|||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} promptGroupIds - The array of prompt group IDs to add to the project.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
*/
|
||||
const addGroupIdsToProject = async function (projectId, promptGroupIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
|
|
@ -62,7 +62,7 @@ const addGroupIdsToProject = async function (projectId, promptGroupIds) {
|
|||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} promptGroupIds - The array of prompt group IDs to remove from the project.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
*/
|
||||
const removeGroupIdsFromProject = async function (projectId, promptGroupIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
|
|
@ -87,7 +87,7 @@ const removeGroupFromAllProjects = async (promptGroupId) => {
|
|||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} agentIds - The array of agent IDs to add to the project.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
*/
|
||||
const addAgentIdsToProject = async function (projectId, agentIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
|
|
@ -102,7 +102,7 @@ const addAgentIdsToProject = async function (projectId, agentIds) {
|
|||
*
|
||||
* @param {string} projectId - The ID of the project to update.
|
||||
* @param {string[]} agentIds - The array of agent IDs to remove from the project.
|
||||
* @returns {Promise<MongoProject>} The updated project document.
|
||||
* @returns {Promise<IMongoProject>} The updated project document.
|
||||
*/
|
||||
const removeAgentIdsFromProject = async function (projectId, agentIds) {
|
||||
return await Project.findByIdAndUpdate(
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const mongoose = require('mongoose');
|
||||
const { ObjectId } = require('mongodb');
|
||||
const { SystemRoles, SystemCategories, Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
|
|
@ -6,10 +7,13 @@ const {
|
|||
removeGroupIdsFromProject,
|
||||
removeGroupFromAllProjects,
|
||||
} = require('./Project');
|
||||
const { Prompt, PromptGroup } = require('./schema/promptSchema');
|
||||
const { promptGroupSchema, promptSchema } = require('@librechat/data-schemas');
|
||||
const { escapeRegExp } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
|
||||
const Prompt = mongoose.model('Prompt', promptSchema);
|
||||
|
||||
/**
|
||||
* Create a pipeline for the aggregation to get prompt groups
|
||||
* @param {Object} query
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const mongoose = require('mongoose');
|
||||
const {
|
||||
CacheKeys,
|
||||
SystemRoles,
|
||||
|
|
@ -6,13 +7,17 @@ const {
|
|||
removeNullishValues,
|
||||
agentPermissionsSchema,
|
||||
promptPermissionsSchema,
|
||||
runCodePermissionsSchema,
|
||||
bookmarkPermissionsSchema,
|
||||
multiConvoPermissionsSchema,
|
||||
temporaryChatPermissionsSchema,
|
||||
} = require('librechat-data-provider');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const Role = require('~/models/schema/roleSchema');
|
||||
const { roleSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Role = mongoose.model('Role', roleSchema);
|
||||
|
||||
/**
|
||||
* Retrieve a role by name and convert the found role document to a plain object.
|
||||
* If the role with the given name doesn't exist and the name is a system defined role, create it and return the lean version.
|
||||
|
|
@ -77,6 +82,8 @@ const permissionSchemas = {
|
|||
[PermissionTypes.PROMPTS]: promptPermissionsSchema,
|
||||
[PermissionTypes.BOOKMARKS]: bookmarkPermissionsSchema,
|
||||
[PermissionTypes.MULTI_CONVO]: multiConvoPermissionsSchema,
|
||||
[PermissionTypes.TEMPORARY_CHAT]: temporaryChatPermissionsSchema,
|
||||
[PermissionTypes.RUN_CODE]: runCodePermissionsSchema,
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -164,6 +171,7 @@ const initializeRoles = async function () {
|
|||
}
|
||||
};
|
||||
module.exports = {
|
||||
Role,
|
||||
getRoleByName,
|
||||
initializeRoles,
|
||||
updateRoleByName,
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ const {
|
|||
} = require('librechat-data-provider');
|
||||
const { updateAccessPermissions, initializeRoles } = require('~/models/Role');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const Role = require('~/models/schema/roleSchema');
|
||||
const { Role } = require('~/models/Role');
|
||||
|
||||
// Mock the cache
|
||||
jest.mock('~/cache/getLogStores', () => {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const mongoose = require('mongoose');
|
||||
const signPayload = require('~/server/services/signPayload');
|
||||
const { hashToken } = require('~/server/utils/crypto');
|
||||
const sessionSchema = require('./schema/session');
|
||||
const { sessionSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const Session = mongoose.model('Session', sessionSchema);
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
const mongoose = require('mongoose');
|
||||
const { nanoid } = require('nanoid');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const { Conversation } = require('~/models/Conversation');
|
||||
const SharedLink = require('./schema/shareSchema');
|
||||
const { shareSchema } = require('@librechat/data-schemas');
|
||||
const SharedLink = mongoose.model('SharedLink', shareSchema);
|
||||
const { getMessages } = require('./Message');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const mongoose = require('mongoose');
|
||||
const { encryptV2 } = require('~/server/utils/crypto');
|
||||
const tokenSchema = require('./schema/tokenSchema');
|
||||
const { tokenSchema } = require('@librechat/data-schemas');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
|
|
@ -13,6 +13,13 @@ const Token = mongoose.model('Token', tokenSchema);
|
|||
*/
|
||||
async function fixIndexes() {
|
||||
try {
|
||||
if (
|
||||
process.env.NODE_ENV === 'CI' ||
|
||||
process.env.NODE_ENV === 'development' ||
|
||||
process.env.NODE_ENV === 'test'
|
||||
) {
|
||||
return;
|
||||
}
|
||||
const indexes = await Token.collection.indexes();
|
||||
logger.debug('Existing Token Indexes:', JSON.stringify(indexes, null, 2));
|
||||
const unwantedTTLIndexes = indexes.filter(
|
||||
|
|
|
|||
|
|
@ -1,9 +1,11 @@
|
|||
const ToolCall = require('./schema/toolCallSchema');
|
||||
const mongoose = require('mongoose');
|
||||
const { toolCallSchema } = require('@librechat/data-schemas');
|
||||
const ToolCall = mongoose.model('ToolCall', toolCallSchema);
|
||||
|
||||
/**
|
||||
* Create a new tool call
|
||||
* @param {ToolCallData} toolCallData - The tool call data
|
||||
* @returns {Promise<ToolCallData>} The created tool call document
|
||||
* @param {IToolCallData} toolCallData - The tool call data
|
||||
* @returns {Promise<IToolCallData>} The created tool call document
|
||||
*/
|
||||
async function createToolCall(toolCallData) {
|
||||
try {
|
||||
|
|
@ -16,7 +18,7 @@ async function createToolCall(toolCallData) {
|
|||
/**
|
||||
* Get a tool call by ID
|
||||
* @param {string} id - The tool call document ID
|
||||
* @returns {Promise<ToolCallData|null>} The tool call document or null if not found
|
||||
* @returns {Promise<IToolCallData|null>} The tool call document or null if not found
|
||||
*/
|
||||
async function getToolCallById(id) {
|
||||
try {
|
||||
|
|
@ -44,7 +46,7 @@ async function getToolCallsByMessage(messageId, userId) {
|
|||
* Get tool calls by conversation ID and user
|
||||
* @param {string} conversationId - The conversation ID
|
||||
* @param {string} userId - The user's ObjectId
|
||||
* @returns {Promise<ToolCallData[]>} Array of tool call documents
|
||||
* @returns {Promise<IToolCallData[]>} Array of tool call documents
|
||||
*/
|
||||
async function getToolCallsByConvo(conversationId, userId) {
|
||||
try {
|
||||
|
|
@ -57,8 +59,8 @@ async function getToolCallsByConvo(conversationId, userId) {
|
|||
/**
|
||||
* Update a tool call
|
||||
* @param {string} id - The tool call document ID
|
||||
* @param {Partial<ToolCallData>} updateData - The data to update
|
||||
* @returns {Promise<ToolCallData|null>} The updated tool call document or null if not found
|
||||
* @param {Partial<IToolCallData>} updateData - The data to update
|
||||
* @returns {Promise<IToolCallData|null>} The updated tool call document or null if not found
|
||||
*/
|
||||
async function updateToolCall(id, updateData) {
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const mongoose = require('mongoose');
|
||||
const { isEnabled } = require('~/server/utils/handleText');
|
||||
const transactionSchema = require('./schema/transaction');
|
||||
const { transactionSchema } = require('@librechat/data-schemas');
|
||||
const { getMultiplier, getCacheMultiplier } = require('./tx');
|
||||
const { logger } = require('~/config');
|
||||
const Balance = require('./Balance');
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const mongoose = require('mongoose');
|
||||
const userSchema = require('~/models/schema/userSchema');
|
||||
const { userSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const User = mongoose.model('User', userSchema);
|
||||
|
||||
|
|
|
|||
|
|
@ -4,9 +4,28 @@ const { MeiliSearch } = require('meilisearch');
|
|||
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
|
||||
const logger = require('~/config/meiliLogger');
|
||||
|
||||
// Environment flags
|
||||
/**
|
||||
* Flag to indicate if search is enabled based on environment variables.
|
||||
* @type {boolean}
|
||||
*/
|
||||
const searchEnabled = process.env.SEARCH && process.env.SEARCH.toLowerCase() === 'true';
|
||||
|
||||
/**
|
||||
* Flag to indicate if MeiliSearch is enabled based on required environment variables.
|
||||
* @type {boolean}
|
||||
*/
|
||||
const meiliEnabled = process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY && searchEnabled;
|
||||
|
||||
/**
|
||||
* Validates the required options for configuring the mongoMeili plugin.
|
||||
*
|
||||
* @param {Object} options - The configuration options.
|
||||
* @param {string} options.host - The MeiliSearch host.
|
||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
||||
* @param {string} options.indexName - The name of the index.
|
||||
* @throws {Error} Throws an error if any required option is missing.
|
||||
*/
|
||||
const validateOptions = function (options) {
|
||||
const requiredKeys = ['host', 'apiKey', 'indexName'];
|
||||
requiredKeys.forEach((key) => {
|
||||
|
|
@ -16,53 +35,64 @@ const validateOptions = function (options) {
|
|||
});
|
||||
};
|
||||
|
||||
// const createMeiliMongooseModel = function ({ index, indexName, client, attributesToIndex }) {
|
||||
/**
|
||||
* Factory function to create a MeiliMongooseModel class which extends a Mongoose model.
|
||||
* This class contains static and instance methods to synchronize and manage the MeiliSearch index
|
||||
* corresponding to the MongoDB collection.
|
||||
*
|
||||
* @param {Object} config - Configuration object.
|
||||
* @param {Object} config.index - The MeiliSearch index object.
|
||||
* @param {Array<string>} config.attributesToIndex - List of attributes to index.
|
||||
* @returns {Function} A class definition that will be loaded into the Mongoose schema.
|
||||
*/
|
||||
const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
||||
// The primary key is assumed to be the first attribute in the attributesToIndex array.
|
||||
const primaryKey = attributesToIndex[0];
|
||||
// MeiliMongooseModel is of type Mongoose.Model
|
||||
|
||||
class MeiliMongooseModel {
|
||||
/**
|
||||
* `syncWithMeili`: synchronizes the data between a MongoDB collection and a MeiliSearch index,
|
||||
* only triggered if there's ever a discrepancy determined by `api\lib\db\indexSync.js`.
|
||||
* Synchronizes the data between the MongoDB collection and the MeiliSearch index.
|
||||
*
|
||||
* 1. Fetches all documents from the MongoDB collection and the MeiliSearch index.
|
||||
* 2. Compares the documents from both sources.
|
||||
* 3. If a document exists in MeiliSearch but not in MongoDB, it's deleted from MeiliSearch.
|
||||
* 4. If a document exists in MongoDB but not in MeiliSearch, it's added to MeiliSearch.
|
||||
* 5. If a document exists in both but has different `text` or `title` fields (depending on the `primaryKey`), it's updated in MeiliSearch.
|
||||
* 6. After all operations, it updates the `_meiliIndex` field in MongoDB to indicate whether the document is indexed in MeiliSearch.
|
||||
* The synchronization process involves:
|
||||
* 1. Fetching all documents from the MongoDB collection and MeiliSearch index.
|
||||
* 2. Comparing documents from both sources.
|
||||
* 3. Deleting documents from MeiliSearch that no longer exist in MongoDB.
|
||||
* 4. Adding documents to MeiliSearch that exist in MongoDB but not in the index.
|
||||
* 5. Updating documents in MeiliSearch if key fields (such as `text` or `title`) differ.
|
||||
* 6. Updating the `_meiliIndex` field in MongoDB to indicate the indexing status.
|
||||
*
|
||||
* Note: This strategy does not use batch operations for Meilisearch as the `index.addDocuments` will discard
|
||||
* the entire batch if there's an error with one document, and will not throw an error if there's an issue.
|
||||
* Also, `index.getDocuments` needs an exact limit on the amount of documents to return, so we build the map in batches.
|
||||
* Note: The function processes documents in batches because MeiliSearch's
|
||||
* `index.getDocuments` requires an exact limit and `index.addDocuments` does not handle
|
||||
* partial failures in a batch.
|
||||
*
|
||||
* @returns {Promise} A promise that resolves when the synchronization is complete.
|
||||
*
|
||||
* @throws {Error} Throws an error if there's an issue with adding a document to MeiliSearch.
|
||||
* @returns {Promise<void>} Resolves when the synchronization is complete.
|
||||
*/
|
||||
static async syncWithMeili() {
|
||||
try {
|
||||
let moreDocuments = true;
|
||||
// Retrieve all MongoDB documents from the collection as plain JavaScript objects.
|
||||
const mongoDocuments = await this.find().lean();
|
||||
const format = (doc) => _.pick(doc, attributesToIndex);
|
||||
|
||||
// Prepare for comparison
|
||||
// Helper function to format a document by selecting only the attributes to index
|
||||
// and omitting keys starting with '$'.
|
||||
const format = (doc) =>
|
||||
_.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
|
||||
|
||||
// Build a map of MongoDB documents for quick lookup based on the primary key.
|
||||
const mongoMap = new Map(mongoDocuments.map((doc) => [doc[primaryKey], format(doc)]));
|
||||
const indexMap = new Map();
|
||||
let offset = 0;
|
||||
const batchSize = 1000;
|
||||
|
||||
// Fetch documents from the MeiliSearch index in batches.
|
||||
while (moreDocuments) {
|
||||
const batch = await index.getDocuments({ limit: batchSize, offset });
|
||||
|
||||
if (batch.results.length === 0) {
|
||||
moreDocuments = false;
|
||||
}
|
||||
|
||||
for (const doc of batch.results) {
|
||||
indexMap.set(doc[primaryKey], format(doc));
|
||||
}
|
||||
|
||||
offset += batchSize;
|
||||
}
|
||||
|
||||
|
|
@ -70,13 +100,12 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
|||
|
||||
const updateOps = [];
|
||||
|
||||
// Iterate over Meili index documents
|
||||
// Process documents present in the MeiliSearch index.
|
||||
for (const [id, doc] of indexMap) {
|
||||
const update = {};
|
||||
update[primaryKey] = id;
|
||||
if (mongoMap.has(id)) {
|
||||
// Case: Update
|
||||
// If document also exists in MongoDB, would be update case
|
||||
// If document exists in MongoDB, check for discrepancies in key fields.
|
||||
if (
|
||||
(doc.text && doc.text !== mongoMap.get(id).text) ||
|
||||
(doc.title && doc.title !== mongoMap.get(id).title)
|
||||
|
|
@ -92,8 +121,7 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
|||
await index.addDocuments([doc]);
|
||||
}
|
||||
} else {
|
||||
// Case: Delete
|
||||
// If document does not exist in MongoDB, its a delete case from meili index
|
||||
// If the document does not exist in MongoDB, delete it from MeiliSearch.
|
||||
await index.deleteDocument(id);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
|
||||
|
|
@ -101,24 +129,25 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
|||
}
|
||||
}
|
||||
|
||||
// Iterate over MongoDB documents
|
||||
// Process documents present in MongoDB.
|
||||
for (const [id, doc] of mongoMap) {
|
||||
const update = {};
|
||||
update[primaryKey] = id;
|
||||
// Case: Insert
|
||||
// If document does not exist in Meili Index, Its an insert case
|
||||
// If the document is missing in the Meili index, add it.
|
||||
if (!indexMap.has(id)) {
|
||||
await index.addDocuments([doc]);
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
} else if (doc._meiliIndex === false) {
|
||||
// If the document exists but is marked as not indexed, update the flag.
|
||||
updateOps.push({
|
||||
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Execute bulk update operations in MongoDB to update the _meiliIndex flags.
|
||||
if (updateOps.length > 0) {
|
||||
await this.collection.bulkWrite(updateOps);
|
||||
logger.debug(
|
||||
|
|
@ -132,34 +161,47 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
|||
}
|
||||
}
|
||||
|
||||
// Set one or more settings of the meili index
|
||||
/**
|
||||
* Updates settings for the MeiliSearch index.
|
||||
*
|
||||
* @param {Object} settings - The settings to update on the MeiliSearch index.
|
||||
* @returns {Promise<Object>} Promise resolving to the update result.
|
||||
*/
|
||||
static async setMeiliIndexSettings(settings) {
|
||||
return await index.updateSettings(settings);
|
||||
}
|
||||
|
||||
// Search the index
|
||||
/**
|
||||
* Searches the MeiliSearch index and optionally populates the results with data from MongoDB.
|
||||
*
|
||||
* @param {string} q - The search query.
|
||||
* @param {Object} params - Additional search parameters for MeiliSearch.
|
||||
* @param {boolean} populate - Whether to populate search hits with full MongoDB documents.
|
||||
* @returns {Promise<Object>} The search results with populated hits if requested.
|
||||
*/
|
||||
static async meiliSearch(q, params, populate) {
|
||||
const data = await index.search(q, params);
|
||||
|
||||
// Populate hits with content from mongodb
|
||||
if (populate) {
|
||||
// Find objects into mongodb matching `objectID` from Meili search
|
||||
// Build a query using the primary key values from the search hits.
|
||||
const query = {};
|
||||
// query[primaryKey] = { $in: _.map(data.hits, primaryKey) };
|
||||
query[primaryKey] = _.map(data.hits, (hit) => cleanUpPrimaryKeyValue(hit[primaryKey]));
|
||||
// logger.debug('query', query);
|
||||
const hitsFromMongoose = await this.find(
|
||||
query,
|
||||
_.reduce(
|
||||
this.schema.obj,
|
||||
function (results, value, key) {
|
||||
return { ...results, [key]: 1 };
|
||||
},
|
||||
{ _id: 1, __v: 1 },
|
||||
),
|
||||
).lean();
|
||||
|
||||
// Add additional data from mongodb into Meili search hits
|
||||
// Build a projection object, including only keys that do not start with '$'.
|
||||
const projection = Object.keys(this.schema.obj).reduce(
|
||||
(results, key) => {
|
||||
if (!key.startsWith('$')) {
|
||||
results[key] = 1;
|
||||
}
|
||||
return results;
|
||||
},
|
||||
{ _id: 1, __v: 1 },
|
||||
);
|
||||
|
||||
// Retrieve the full documents from MongoDB.
|
||||
const hitsFromMongoose = await this.find(query, projection).lean();
|
||||
|
||||
// Merge the MongoDB documents with the search hits.
|
||||
const populatedHits = data.hits.map(function (hit) {
|
||||
const query = {};
|
||||
query[primaryKey] = hit[primaryKey];
|
||||
|
|
@ -176,10 +218,21 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
|||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preprocesses the current document for indexing.
|
||||
*
|
||||
* This method:
|
||||
* - Picks only the defined attributes to index.
|
||||
* - Omits any keys starting with '$'.
|
||||
* - Replaces pipe characters ('|') in `conversationId` with '--'.
|
||||
* - Extracts and concatenates text from an array of content items.
|
||||
*
|
||||
* @returns {Object} The preprocessed object ready for indexing.
|
||||
*/
|
||||
preprocessObjectForIndex() {
|
||||
const object = _.pick(this.toJSON(), attributesToIndex);
|
||||
// NOTE: MeiliSearch does not allow | in primary key, so we replace it with - for Bing convoIds
|
||||
// object.conversationId = object.conversationId.replace(/\|/g, '-');
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
if (object.conversationId && object.conversationId.includes('|')) {
|
||||
object.conversationId = object.conversationId.replace(/\|/g, '--');
|
||||
}
|
||||
|
|
@ -195,32 +248,53 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
|||
return object;
|
||||
}
|
||||
|
||||
// Push new document to Meili
|
||||
/**
|
||||
* Adds the current document to the MeiliSearch index.
|
||||
*
|
||||
* The method preprocesses the document, adds it to MeiliSearch, and then updates
|
||||
* the MongoDB document's `_meiliIndex` flag to true.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async addObjectToMeili() {
|
||||
const object = this.preprocessObjectForIndex();
|
||||
try {
|
||||
// logger.debug('Adding document to Meili', object);
|
||||
await index.addDocuments([object]);
|
||||
} catch (error) {
|
||||
// logger.debug('Error adding document to Meili');
|
||||
// logger.error(error);
|
||||
// Error handling can be enhanced as needed.
|
||||
logger.error('[addObjectToMeili] Error adding document to Meili', error);
|
||||
}
|
||||
|
||||
await this.collection.updateMany({ _id: this._id }, { $set: { _meiliIndex: true } });
|
||||
}
|
||||
|
||||
// Update an existing document in Meili
|
||||
/**
|
||||
* Updates the current document in the MeiliSearch index.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async updateObjectToMeili() {
|
||||
const object = _.pick(this.toJSON(), attributesToIndex);
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
await index.updateDocuments([object]);
|
||||
}
|
||||
|
||||
// Delete a document from Meili
|
||||
/**
|
||||
* Deletes the current document from the MeiliSearch index.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async deleteObjectFromMeili() {
|
||||
await index.deleteDocument(this._id);
|
||||
}
|
||||
|
||||
// * schema.post('save')
|
||||
/**
|
||||
* Post-save hook to synchronize the document with MeiliSearch.
|
||||
*
|
||||
* If the document is already indexed (i.e. `_meiliIndex` is true), it updates it;
|
||||
* otherwise, it adds the document to the index.
|
||||
*/
|
||||
postSaveHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili();
|
||||
|
|
@ -229,14 +303,24 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
|||
}
|
||||
}
|
||||
|
||||
// * schema.post('update')
|
||||
/**
|
||||
* Post-update hook to update the document in MeiliSearch.
|
||||
*
|
||||
* This hook is triggered after a document update, ensuring that changes are
|
||||
* propagated to the MeiliSearch index if the document is indexed.
|
||||
*/
|
||||
postUpdateHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.updateObjectToMeili();
|
||||
}
|
||||
}
|
||||
|
||||
// * schema.post('remove')
|
||||
/**
|
||||
* Post-remove hook to delete the document from MeiliSearch.
|
||||
*
|
||||
* This hook is triggered after a document is removed, ensuring that the document
|
||||
* is also removed from the MeiliSearch index if it was previously indexed.
|
||||
*/
|
||||
postRemoveHook() {
|
||||
if (this._meiliIndex) {
|
||||
this.deleteObjectFromMeili();
|
||||
|
|
@ -247,11 +331,27 @@ const createMeiliMongooseModel = function ({ index, attributesToIndex }) {
|
|||
return MeiliMongooseModel;
|
||||
};
|
||||
|
||||
/**
|
||||
* Mongoose plugin to synchronize MongoDB collections with a MeiliSearch index.
|
||||
*
|
||||
* This plugin:
|
||||
* - Validates the provided options.
|
||||
* - Adds a `_meiliIndex` field to the schema to track indexing status.
|
||||
* - Sets up a MeiliSearch client and creates an index if it doesn't already exist.
|
||||
* - Loads class methods for syncing, searching, and managing documents in MeiliSearch.
|
||||
* - Registers Mongoose hooks (post-save, post-update, post-remove, etc.) to maintain index consistency.
|
||||
*
|
||||
* @param {mongoose.Schema} schema - The Mongoose schema to which the plugin is applied.
|
||||
* @param {Object} options - Configuration options.
|
||||
* @param {string} options.host - The MeiliSearch host.
|
||||
* @param {string} options.apiKey - The MeiliSearch API key.
|
||||
* @param {string} options.indexName - The name of the MeiliSearch index.
|
||||
* @param {string} options.primaryKey - The primary key field for indexing.
|
||||
*/
|
||||
module.exports = function mongoMeili(schema, options) {
|
||||
// Vaidate Options for mongoMeili
|
||||
validateOptions(options);
|
||||
|
||||
// Add meiliIndex to schema
|
||||
// Add _meiliIndex field to the schema to track if a document has been indexed in MeiliSearch.
|
||||
schema.add({
|
||||
_meiliIndex: {
|
||||
type: Boolean,
|
||||
|
|
@ -263,69 +363,77 @@ module.exports = function mongoMeili(schema, options) {
|
|||
|
||||
const { host, apiKey, indexName, primaryKey } = options;
|
||||
|
||||
// Setup MeiliSearch Client
|
||||
// Setup the MeiliSearch client.
|
||||
const client = new MeiliSearch({ host, apiKey });
|
||||
|
||||
// Asynchronously create the index
|
||||
// Create the index asynchronously if it doesn't exist.
|
||||
client.createIndex(indexName, { primaryKey });
|
||||
|
||||
// Setup the index to search for this schema
|
||||
// Setup the MeiliSearch index for this schema.
|
||||
const index = client.index(indexName);
|
||||
|
||||
// Collect attributes from the schema that should be indexed.
|
||||
const attributesToIndex = [
|
||||
..._.reduce(
|
||||
schema.obj,
|
||||
function (results, value, key) {
|
||||
return value.meiliIndex ? [...results, key] : results;
|
||||
// }, []), '_id'];
|
||||
},
|
||||
[],
|
||||
),
|
||||
];
|
||||
|
||||
// Load the class methods into the schema.
|
||||
schema.loadClass(createMeiliMongooseModel({ index, indexName, client, attributesToIndex }));
|
||||
|
||||
// Register hooks
|
||||
// Register Mongoose hooks to synchronize with MeiliSearch.
|
||||
|
||||
// Post-save: synchronize after a document is saved.
|
||||
schema.post('save', function (doc) {
|
||||
doc.postSaveHook();
|
||||
});
|
||||
|
||||
// Post-update: synchronize after a document is updated.
|
||||
schema.post('update', function (doc) {
|
||||
doc.postUpdateHook();
|
||||
});
|
||||
|
||||
// Post-remove: synchronize after a document is removed.
|
||||
schema.post('remove', function (doc) {
|
||||
doc.postRemoveHook();
|
||||
});
|
||||
|
||||
// Pre-deleteMany hook: remove corresponding documents from MeiliSearch when multiple documents are deleted.
|
||||
schema.pre('deleteMany', async function (next) {
|
||||
if (!meiliEnabled) {
|
||||
next();
|
||||
return next();
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if the schema has a "messages" field to determine if it's a conversation schema.
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) {
|
||||
const convoIndex = client.index('convos');
|
||||
const deletedConvos = await mongoose.model('Conversation').find(this._conditions).lean();
|
||||
let promises = [];
|
||||
for (const convo of deletedConvos) {
|
||||
promises.push(convoIndex.deleteDocument(convo.conversationId));
|
||||
}
|
||||
const promises = deletedConvos.map((convo) =>
|
||||
convoIndex.deleteDocument(convo.conversationId),
|
||||
);
|
||||
await Promise.all(promises);
|
||||
}
|
||||
|
||||
// Check if the schema has a "messageId" field to determine if it's a message schema.
|
||||
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) {
|
||||
const messageIndex = client.index('messages');
|
||||
const deletedMessages = await mongoose.model('Message').find(this._conditions).lean();
|
||||
let promises = [];
|
||||
for (const message of deletedMessages) {
|
||||
promises.push(messageIndex.deleteDocument(message.messageId));
|
||||
}
|
||||
const promises = deletedMessages.map((message) =>
|
||||
messageIndex.deleteDocument(message.messageId),
|
||||
);
|
||||
await Promise.all(promises);
|
||||
}
|
||||
return next();
|
||||
} catch (error) {
|
||||
if (meiliEnabled) {
|
||||
logger.error(
|
||||
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion, next startup may be slow due to syncing',
|
||||
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion. Next startup may be slow due to syncing.',
|
||||
error,
|
||||
);
|
||||
}
|
||||
|
|
@ -333,17 +441,19 @@ module.exports = function mongoMeili(schema, options) {
|
|||
}
|
||||
});
|
||||
|
||||
// Post-findOneAndUpdate hook: update MeiliSearch index after a document is updated via findOneAndUpdate.
|
||||
schema.post('findOneAndUpdate', async function (doc) {
|
||||
if (!meiliEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the document is unfinished, do not update the index.
|
||||
if (doc.unfinished) {
|
||||
return;
|
||||
}
|
||||
|
||||
let meiliDoc;
|
||||
// Doc is a Conversation
|
||||
// For conversation documents, try to fetch the document from the "convos" index.
|
||||
if (doc.messages) {
|
||||
try {
|
||||
meiliDoc = await client.index('convos').getDocument(doc.conversationId);
|
||||
|
|
@ -356,10 +466,12 @@ module.exports = function mongoMeili(schema, options) {
|
|||
}
|
||||
}
|
||||
|
||||
// If the MeiliSearch document exists and the title is unchanged, do nothing.
|
||||
if (meiliDoc && meiliDoc.title === doc.title) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, trigger a post-save hook to synchronize the document.
|
||||
doc.postSaveHook();
|
||||
});
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,60 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const AuthSchema = new Schema(
|
||||
{
|
||||
authorization_type: String,
|
||||
custom_auth_header: String,
|
||||
type: {
|
||||
type: String,
|
||||
enum: ['service_http', 'oauth', 'none'],
|
||||
},
|
||||
authorization_content_type: String,
|
||||
authorization_url: String,
|
||||
client_url: String,
|
||||
scope: String,
|
||||
token_exchange_method: {
|
||||
type: String,
|
||||
enum: ['default_post', 'basic_auth_header', null],
|
||||
},
|
||||
},
|
||||
{ _id: false },
|
||||
);
|
||||
|
||||
const actionSchema = new Schema({
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
action_id: {
|
||||
type: String,
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
default: 'action_prototype',
|
||||
},
|
||||
settings: Schema.Types.Mixed,
|
||||
agent_id: String,
|
||||
assistant_id: String,
|
||||
metadata: {
|
||||
api_key: String, // private, encrypted
|
||||
auth: AuthSchema,
|
||||
domain: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
// json_schema: Schema.Types.Mixed,
|
||||
privacy_policy_url: String,
|
||||
raw_spec: String,
|
||||
oauth_client_id: String, // private, encrypted
|
||||
oauth_client_secret: String, // private, encrypted
|
||||
},
|
||||
});
|
||||
// }, { minimize: false }); // Prevent removal of empty objects
|
||||
|
||||
module.exports = actionSchema;
|
||||
|
|
@ -1,96 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const agentSchema = mongoose.Schema(
|
||||
{
|
||||
id: {
|
||||
type: String,
|
||||
index: true,
|
||||
unique: true,
|
||||
required: true,
|
||||
},
|
||||
name: {
|
||||
type: String,
|
||||
},
|
||||
description: {
|
||||
type: String,
|
||||
},
|
||||
instructions: {
|
||||
type: String,
|
||||
},
|
||||
avatar: {
|
||||
type: {
|
||||
filepath: String,
|
||||
source: String,
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
provider: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
model: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
model_parameters: {
|
||||
type: Object,
|
||||
},
|
||||
artifacts: {
|
||||
type: String,
|
||||
},
|
||||
access_level: {
|
||||
type: Number,
|
||||
},
|
||||
tools: {
|
||||
type: [String],
|
||||
default: undefined,
|
||||
},
|
||||
tool_kwargs: {
|
||||
type: [{ type: mongoose.Schema.Types.Mixed }],
|
||||
},
|
||||
actions: {
|
||||
type: [String],
|
||||
default: undefined,
|
||||
},
|
||||
author: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
authorName: {
|
||||
type: String,
|
||||
default: undefined,
|
||||
},
|
||||
hide_sequential_outputs: {
|
||||
type: Boolean,
|
||||
},
|
||||
end_after_tools: {
|
||||
type: Boolean,
|
||||
},
|
||||
agent_ids: {
|
||||
type: [String],
|
||||
},
|
||||
isCollaborative: {
|
||||
type: Boolean,
|
||||
default: undefined,
|
||||
},
|
||||
conversation_starters: {
|
||||
type: [String],
|
||||
default: [],
|
||||
},
|
||||
tool_resources: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
default: {},
|
||||
},
|
||||
projectIds: {
|
||||
type: [mongoose.Schema.Types.ObjectId],
|
||||
ref: 'Project',
|
||||
index: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = agentSchema;
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const assistantSchema = mongoose.Schema(
|
||||
{
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
assistant_id: {
|
||||
type: String,
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
avatar: {
|
||||
type: {
|
||||
filepath: String,
|
||||
source: String,
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
conversation_starters: {
|
||||
type: [String],
|
||||
default: [],
|
||||
},
|
||||
access_level: {
|
||||
type: Number,
|
||||
},
|
||||
file_ids: { type: [String], default: undefined },
|
||||
actions: { type: [String], default: undefined },
|
||||
append_current_datetime: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = assistantSchema;
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const balanceSchema = mongoose.Schema({
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
// 1000 tokenCredits = 1 mill ($0.001 USD)
|
||||
tokenCredits: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
});
|
||||
|
||||
module.exports = balanceSchema;
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const bannerSchema = mongoose.Schema(
|
||||
{
|
||||
bannerId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
message: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
displayFrom: {
|
||||
type: Date,
|
||||
required: true,
|
||||
default: Date.now,
|
||||
},
|
||||
displayTo: {
|
||||
type: Date,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
enum: ['banner', 'popup'],
|
||||
default: 'banner',
|
||||
},
|
||||
isPublic: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
const Banner = mongoose.model('Banner', bannerSchema);
|
||||
module.exports = Banner;
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
const Schema = mongoose.Schema;
|
||||
|
||||
const categoriesSchema = new Schema({
|
||||
label: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
value: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
});
|
||||
|
||||
const categories = mongoose.model('categories', categoriesSchema);
|
||||
|
||||
module.exports = { Categories: categories };
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const conversationTagSchema = mongoose.Schema(
|
||||
{
|
||||
tag: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
description: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
count: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
position: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
index: true,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
conversationTagSchema.index({ tag: 1, user: 1 }, { unique: true });
|
||||
|
||||
module.exports = mongoose.model('ConversationTag', conversationTagSchema);
|
||||
|
|
@ -1,63 +1,18 @@
|
|||
const mongoose = require('mongoose');
|
||||
const mongoMeili = require('../plugins/mongoMeili');
|
||||
const { conversationPreset } = require('./defaults');
|
||||
const convoSchema = mongoose.Schema(
|
||||
{
|
||||
conversationId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
required: true,
|
||||
index: true,
|
||||
meiliIndex: true,
|
||||
},
|
||||
title: {
|
||||
type: String,
|
||||
default: 'New Chat',
|
||||
meiliIndex: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }],
|
||||
// google only
|
||||
examples: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
agentOptions: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
},
|
||||
...conversationPreset,
|
||||
agent_id: {
|
||||
type: String,
|
||||
},
|
||||
tags: {
|
||||
type: [String],
|
||||
default: [],
|
||||
meiliIndex: true,
|
||||
},
|
||||
files: {
|
||||
type: [String],
|
||||
},
|
||||
expiredAt: {
|
||||
type: Date,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
const { convoSchema } = require('@librechat/data-schemas');
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
convoSchema.plugin(mongoMeili, {
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
indexName: 'convos', // Will get created automatically if it doesn't exist already
|
||||
/** Note: Will get created automatically if it doesn't exist already */
|
||||
indexName: 'convos',
|
||||
primaryKey: 'conversationId',
|
||||
});
|
||||
}
|
||||
|
||||
// Create TTL index
|
||||
convoSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
||||
convoSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
convoSchema.index({ conversationId: 1, user: 1 }, { unique: true });
|
||||
|
||||
const Conversation = mongoose.models.Conversation || mongoose.model('Conversation', convoSchema);
|
||||
|
||||
module.exports = Conversation;
|
||||
|
|
|
|||
|
|
@ -1,178 +0,0 @@
|
|||
const conversationPreset = {
|
||||
// endpoint: [azureOpenAI, openAI, anthropic, chatGPTBrowser]
|
||||
endpoint: {
|
||||
type: String,
|
||||
default: null,
|
||||
required: true,
|
||||
},
|
||||
endpointType: {
|
||||
type: String,
|
||||
},
|
||||
// for azureOpenAI, openAI, chatGPTBrowser only
|
||||
model: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
// for bedrock only
|
||||
region: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
// for azureOpenAI, openAI only
|
||||
chatGptLabel: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
// for google only
|
||||
modelLabel: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
promptPrefix: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
temperature: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
top_p: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
// for google only
|
||||
topP: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
topK: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
maxOutputTokens: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
presence_penalty: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
frequency_penalty: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
file_ids: { type: [{ type: String }], default: undefined },
|
||||
// deprecated
|
||||
resendImages: {
|
||||
type: Boolean,
|
||||
},
|
||||
/* Anthropic only */
|
||||
promptCache: {
|
||||
type: Boolean,
|
||||
},
|
||||
system: {
|
||||
type: String,
|
||||
},
|
||||
// files
|
||||
resendFiles: {
|
||||
type: Boolean,
|
||||
},
|
||||
imageDetail: {
|
||||
type: String,
|
||||
},
|
||||
/* agents */
|
||||
agent_id: {
|
||||
type: String,
|
||||
},
|
||||
/* assistants */
|
||||
assistant_id: {
|
||||
type: String,
|
||||
},
|
||||
instructions: {
|
||||
type: String,
|
||||
},
|
||||
stop: { type: [{ type: String }], default: undefined },
|
||||
isArchived: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
/* UI Components */
|
||||
iconURL: {
|
||||
type: String,
|
||||
},
|
||||
greeting: {
|
||||
type: String,
|
||||
},
|
||||
spec: {
|
||||
type: String,
|
||||
},
|
||||
tags: {
|
||||
type: [String],
|
||||
default: [],
|
||||
},
|
||||
tools: { type: [{ type: String }], default: undefined },
|
||||
maxContextTokens: {
|
||||
type: Number,
|
||||
},
|
||||
max_tokens: {
|
||||
type: Number,
|
||||
},
|
||||
/** omni models only */
|
||||
reasoning_effort: {
|
||||
type: String,
|
||||
},
|
||||
};
|
||||
|
||||
const agentOptions = {
|
||||
model: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
// for azureOpenAI, openAI only
|
||||
chatGptLabel: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
modelLabel: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
promptPrefix: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
temperature: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
top_p: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
// for google only
|
||||
topP: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
topK: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
maxOutputTokens: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
presence_penalty: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
frequency_penalty: {
|
||||
type: Number,
|
||||
required: false,
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
conversationPreset,
|
||||
agentOptions,
|
||||
};
|
||||
|
|
@ -1,111 +0,0 @@
|
|||
const { FileSources } = require('librechat-data-provider');
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoFile
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {number} [__v] - MongoDB Version Key
|
||||
* @property {ObjectId} user - User ID
|
||||
* @property {string} [conversationId] - Optional conversation ID
|
||||
* @property {string} file_id - File identifier
|
||||
* @property {string} [temp_file_id] - Temporary File identifier
|
||||
* @property {number} bytes - Size of the file in bytes
|
||||
* @property {string} filename - Name of the file
|
||||
* @property {string} filepath - Location of the file
|
||||
* @property {'file'} object - Type of object, always 'file'
|
||||
* @property {string} type - Type of file
|
||||
* @property {number} [usage=0] - Number of uses of the file
|
||||
* @property {string} [context] - Context of the file origin
|
||||
* @property {boolean} [embedded=false] - Whether or not the file is embedded in vector db
|
||||
* @property {string} [model] - The model to identify the group region of the file (for Azure OpenAI hosting)
|
||||
* @property {string} [source] - The source of the file (e.g., from FileSources)
|
||||
* @property {number} [width] - Optional width of the file
|
||||
* @property {number} [height] - Optional height of the file
|
||||
* @property {Object} [metadata] - Metadata related to the file
|
||||
* @property {string} [metadata.fileIdentifier] - Unique identifier for the file in metadata
|
||||
* @property {Date} [expiresAt] - Optional expiration date of the file
|
||||
* @property {Date} [createdAt] - Date when the file was created
|
||||
* @property {Date} [updatedAt] - Date when the file was updated
|
||||
*/
|
||||
|
||||
/** @type {MongooseSchema<MongoFile>} */
|
||||
const fileSchema = mongoose.Schema(
|
||||
{
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
conversationId: {
|
||||
type: String,
|
||||
ref: 'Conversation',
|
||||
index: true,
|
||||
},
|
||||
file_id: {
|
||||
type: String,
|
||||
// required: true,
|
||||
index: true,
|
||||
},
|
||||
temp_file_id: {
|
||||
type: String,
|
||||
// required: true,
|
||||
},
|
||||
bytes: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
filename: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
filepath: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
object: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: 'file',
|
||||
},
|
||||
embedded: {
|
||||
type: Boolean,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
context: {
|
||||
type: String,
|
||||
// required: true,
|
||||
},
|
||||
usage: {
|
||||
type: Number,
|
||||
required: true,
|
||||
default: 0,
|
||||
},
|
||||
source: {
|
||||
type: String,
|
||||
default: FileSources.local,
|
||||
},
|
||||
model: {
|
||||
type: String,
|
||||
},
|
||||
width: Number,
|
||||
height: Number,
|
||||
metadata: {
|
||||
fileIdentifier: String,
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
expires: 3600, // 1 hour in seconds
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
fileSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
|
||||
module.exports = fileSchema;
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const keySchema = mongoose.Schema({
|
||||
userId: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
value: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
},
|
||||
});
|
||||
|
||||
keySchema.index({ expiresAt: 1 }, { expireAfterSeconds: 0 });
|
||||
|
||||
module.exports = keySchema;
|
||||
|
|
@ -1,145 +1,6 @@
|
|||
const mongoose = require('mongoose');
|
||||
const mongoMeili = require('~/models/plugins/mongoMeili');
|
||||
const messageSchema = mongoose.Schema(
|
||||
{
|
||||
messageId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
required: true,
|
||||
index: true,
|
||||
meiliIndex: true,
|
||||
},
|
||||
conversationId: {
|
||||
type: String,
|
||||
index: true,
|
||||
required: true,
|
||||
meiliIndex: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
required: true,
|
||||
default: null,
|
||||
},
|
||||
model: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
endpoint: {
|
||||
type: String,
|
||||
},
|
||||
conversationSignature: {
|
||||
type: String,
|
||||
},
|
||||
clientId: {
|
||||
type: String,
|
||||
},
|
||||
invocationId: {
|
||||
type: Number,
|
||||
},
|
||||
parentMessageId: {
|
||||
type: String,
|
||||
},
|
||||
tokenCount: {
|
||||
type: Number,
|
||||
},
|
||||
summaryTokenCount: {
|
||||
type: Number,
|
||||
},
|
||||
sender: {
|
||||
type: String,
|
||||
meiliIndex: true,
|
||||
},
|
||||
text: {
|
||||
type: String,
|
||||
meiliIndex: true,
|
||||
},
|
||||
summary: {
|
||||
type: String,
|
||||
},
|
||||
isCreatedByUser: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: false,
|
||||
},
|
||||
unfinished: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
error: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
finish_reason: {
|
||||
type: String,
|
||||
},
|
||||
_meiliIndex: {
|
||||
type: Boolean,
|
||||
required: false,
|
||||
select: false,
|
||||
default: false,
|
||||
},
|
||||
files: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
plugin: {
|
||||
type: {
|
||||
latest: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
inputs: {
|
||||
type: [mongoose.Schema.Types.Mixed],
|
||||
required: false,
|
||||
default: undefined,
|
||||
},
|
||||
outputs: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
default: undefined,
|
||||
},
|
||||
plugins: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
content: {
|
||||
type: [{ type: mongoose.Schema.Types.Mixed }],
|
||||
default: undefined,
|
||||
meiliIndex: true,
|
||||
},
|
||||
thread_id: {
|
||||
type: String,
|
||||
},
|
||||
/* frontend components */
|
||||
iconURL: {
|
||||
type: String,
|
||||
},
|
||||
attachments: { type: [{ type: mongoose.Schema.Types.Mixed }], default: undefined },
|
||||
/*
|
||||
attachments: {
|
||||
type: [
|
||||
{
|
||||
file_id: String,
|
||||
filename: String,
|
||||
filepath: String,
|
||||
expiresAt: Date,
|
||||
width: Number,
|
||||
height: Number,
|
||||
type: String,
|
||||
conversationId: String,
|
||||
messageId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
toolCallId: String,
|
||||
},
|
||||
],
|
||||
default: undefined,
|
||||
},
|
||||
*/
|
||||
expiredAt: {
|
||||
type: Date,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
const { messageSchema } = require('@librechat/data-schemas');
|
||||
|
||||
if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
||||
messageSchema.plugin(mongoMeili, {
|
||||
|
|
@ -149,11 +10,7 @@ if (process.env.MEILI_HOST && process.env.MEILI_MASTER_KEY) {
|
|||
primaryKey: 'messageId',
|
||||
});
|
||||
}
|
||||
messageSchema.index({ expiredAt: 1 }, { expireAfterSeconds: 0 });
|
||||
messageSchema.index({ createdAt: 1 });
|
||||
messageSchema.index({ messageId: 1, user: 1 }, { unique: true });
|
||||
|
||||
/** @type {mongoose.Model<TMessage>} */
|
||||
const Message = mongoose.models.Message || mongoose.model('Message', messageSchema);
|
||||
|
||||
module.exports = Message;
|
||||
|
|
|
|||
|
|
@ -1,25 +1,5 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const pluginAuthSchema = mongoose.Schema(
|
||||
{
|
||||
authField: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
value: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
userId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
pluginKey: {
|
||||
type: String,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
const { pluginAuthSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const PluginAuth = mongoose.models.Plugin || mongoose.model('PluginAuth', pluginAuthSchema);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,38 +1,5 @@
|
|||
const mongoose = require('mongoose');
|
||||
const { conversationPreset } = require('./defaults');
|
||||
const presetSchema = mongoose.Schema(
|
||||
{
|
||||
presetId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
title: {
|
||||
type: String,
|
||||
default: 'New Chat',
|
||||
meiliIndex: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
default: null,
|
||||
},
|
||||
defaultPreset: {
|
||||
type: Boolean,
|
||||
},
|
||||
order: {
|
||||
type: Number,
|
||||
},
|
||||
// google only
|
||||
examples: [{ type: mongoose.Schema.Types.Mixed }],
|
||||
...conversationPreset,
|
||||
agentOptions: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
default: null,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
const { presetSchema } = require('@librechat/data-schemas');
|
||||
|
||||
const Preset = mongoose.models.Preset || mongoose.model('Preset', presetSchema);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,35 +0,0 @@
|
|||
const { Schema } = require('mongoose');
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoProject
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {string} name - The name of the project
|
||||
* @property {ObjectId[]} promptGroupIds - Array of PromptGroup IDs associated with the project
|
||||
* @property {Date} [createdAt] - Date when the project was created (added by timestamps)
|
||||
* @property {Date} [updatedAt] - Date when the project was last updated (added by timestamps)
|
||||
*/
|
||||
|
||||
const projectSchema = new Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
promptGroupIds: {
|
||||
type: [Schema.Types.ObjectId],
|
||||
ref: 'PromptGroup',
|
||||
default: [],
|
||||
},
|
||||
agentIds: {
|
||||
type: [String],
|
||||
ref: 'Agent',
|
||||
default: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = projectSchema;
|
||||
|
|
@ -1,118 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const Schema = mongoose.Schema;
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoPromptGroup
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {string} name - The name of the prompt group
|
||||
* @property {ObjectId} author - The author of the prompt group
|
||||
* @property {ObjectId} [projectId=null] - The project ID of the prompt group
|
||||
* @property {ObjectId} [productionId=null] - The project ID of the prompt group
|
||||
* @property {string} authorName - The name of the author of the prompt group
|
||||
* @property {number} [numberOfGenerations=0] - Number of generations the prompt group has
|
||||
* @property {string} [oneliner=''] - Oneliner description of the prompt group
|
||||
* @property {string} [category=''] - Category of the prompt group
|
||||
* @property {string} [command] - Command for the prompt group
|
||||
* @property {Date} [createdAt] - Date when the prompt group was created (added by timestamps)
|
||||
* @property {Date} [updatedAt] - Date when the prompt group was last updated (added by timestamps)
|
||||
*/
|
||||
|
||||
const promptGroupSchema = new Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
numberOfGenerations: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
},
|
||||
oneliner: {
|
||||
type: String,
|
||||
default: '',
|
||||
},
|
||||
category: {
|
||||
type: String,
|
||||
default: '',
|
||||
index: true,
|
||||
},
|
||||
projectIds: {
|
||||
type: [Schema.Types.ObjectId],
|
||||
ref: 'Project',
|
||||
index: true,
|
||||
},
|
||||
productionId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'Prompt',
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
author: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
authorName: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
command: {
|
||||
type: String,
|
||||
index: true,
|
||||
validate: {
|
||||
validator: function (v) {
|
||||
return v === undefined || v === null || v === '' || /^[a-z0-9-]+$/.test(v);
|
||||
},
|
||||
message: (props) =>
|
||||
`${props.value} is not a valid command. Only lowercase alphanumeric characters and highfins (') are allowed.`,
|
||||
},
|
||||
maxlength: [
|
||||
Constants.COMMANDS_MAX_LENGTH,
|
||||
`Command cannot be longer than ${Constants.COMMANDS_MAX_LENGTH} characters`,
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
const PromptGroup = mongoose.model('PromptGroup', promptGroupSchema);
|
||||
|
||||
const promptSchema = new Schema(
|
||||
{
|
||||
groupId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'PromptGroup',
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
author: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
prompt: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
enum: ['text', 'chat'],
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
const Prompt = mongoose.model('Prompt', promptSchema);
|
||||
|
||||
promptSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
promptGroupSchema.index({ createdAt: 1, updatedAt: 1 });
|
||||
|
||||
module.exports = { Prompt, PromptGroup };
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
const { PermissionTypes, Permissions } = require('librechat-data-provider');
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
const roleSchema = new mongoose.Schema({
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
index: true,
|
||||
},
|
||||
[PermissionTypes.BOOKMARKS]: {
|
||||
[Permissions.USE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
[PermissionTypes.PROMPTS]: {
|
||||
[Permissions.SHARED_GLOBAL]: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
[Permissions.USE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
[Permissions.CREATE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
[PermissionTypes.AGENTS]: {
|
||||
[Permissions.SHARED_GLOBAL]: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
[Permissions.USE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
[Permissions.CREATE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
[PermissionTypes.MULTI_CONVO]: {
|
||||
[Permissions.USE]: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const Role = mongoose.model('Role', roleSchema);
|
||||
|
||||
module.exports = Role;
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const sessionSchema = mongoose.Schema({
|
||||
refreshTokenHash: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
expiration: {
|
||||
type: Date,
|
||||
required: true,
|
||||
expires: 0,
|
||||
},
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
});
|
||||
|
||||
module.exports = sessionSchema;
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const shareSchema = mongoose.Schema(
|
||||
{
|
||||
conversationId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
title: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
user: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }],
|
||||
shareId: {
|
||||
type: String,
|
||||
index: true,
|
||||
},
|
||||
isPublic: {
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
module.exports = mongoose.model('SharedLink', shareSchema);
|
||||
|
|
@ -1,38 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
const Schema = mongoose.Schema;
|
||||
|
||||
const tokenSchema = new Schema({
|
||||
userId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
required: true,
|
||||
ref: 'user',
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
},
|
||||
type: String,
|
||||
identifier: {
|
||||
type: String,
|
||||
},
|
||||
token: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
createdAt: {
|
||||
type: Date,
|
||||
required: true,
|
||||
default: Date.now,
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
required: true,
|
||||
},
|
||||
metadata: {
|
||||
type: Map,
|
||||
of: Schema.Types.Mixed,
|
||||
},
|
||||
});
|
||||
|
||||
tokenSchema.index({ expiresAt: 1 }, { expireAfterSeconds: 0 });
|
||||
|
||||
module.exports = tokenSchema;
|
||||
|
|
@ -1,54 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
/**
|
||||
* @typedef {Object} ToolCallData
|
||||
* @property {string} conversationId - The ID of the conversation
|
||||
* @property {string} messageId - The ID of the message
|
||||
* @property {string} toolId - The ID of the tool
|
||||
* @property {string | ObjectId} user - The user's ObjectId
|
||||
* @property {unknown} [result] - Optional result data
|
||||
* @property {TAttachment[]} [attachments] - Optional attachments data
|
||||
* @property {number} [blockIndex] - Optional code block index
|
||||
* @property {number} [partIndex] - Optional part index
|
||||
*/
|
||||
|
||||
/** @type {MongooseSchema<ToolCallData>} */
|
||||
const toolCallSchema = mongoose.Schema(
|
||||
{
|
||||
conversationId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
messageId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
toolId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
required: true,
|
||||
},
|
||||
result: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
},
|
||||
attachments: {
|
||||
type: mongoose.Schema.Types.Mixed,
|
||||
},
|
||||
blockIndex: {
|
||||
type: Number,
|
||||
},
|
||||
partIndex: {
|
||||
type: Number,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
toolCallSchema.index({ messageId: 1, user: 1 });
|
||||
toolCallSchema.index({ conversationId: 1, user: 1 });
|
||||
|
||||
module.exports = mongoose.model('ToolCall', toolCallSchema);
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const transactionSchema = mongoose.Schema(
|
||||
{
|
||||
user: {
|
||||
type: mongoose.Schema.Types.ObjectId,
|
||||
ref: 'User',
|
||||
index: true,
|
||||
required: true,
|
||||
},
|
||||
conversationId: {
|
||||
type: String,
|
||||
ref: 'Conversation',
|
||||
index: true,
|
||||
},
|
||||
tokenType: {
|
||||
type: String,
|
||||
enum: ['prompt', 'completion', 'credits'],
|
||||
required: true,
|
||||
},
|
||||
model: {
|
||||
type: String,
|
||||
},
|
||||
context: {
|
||||
type: String,
|
||||
},
|
||||
valueKey: {
|
||||
type: String,
|
||||
},
|
||||
rate: Number,
|
||||
rawAmount: Number,
|
||||
tokenValue: Number,
|
||||
inputTokens: { type: Number },
|
||||
writeTokens: { type: Number },
|
||||
readTokens: { type: Number },
|
||||
},
|
||||
{
|
||||
timestamps: true,
|
||||
},
|
||||
);
|
||||
|
||||
module.exports = transactionSchema;
|
||||
|
|
@ -1,151 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
const { SystemRoles } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoSession
|
||||
* @property {string} [refreshToken] - The refresh token
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} MongoUser
|
||||
* @property {ObjectId} [_id] - MongoDB Document ID
|
||||
* @property {string} [name] - The user's name
|
||||
* @property {string} [username] - The user's username, in lowercase
|
||||
* @property {string} email - The user's email address
|
||||
* @property {boolean} emailVerified - Whether the user's email is verified
|
||||
* @property {string} [password] - The user's password, trimmed with 8-128 characters
|
||||
* @property {string} [avatar] - The URL of the user's avatar
|
||||
* @property {string} provider - The provider of the user's account (e.g., 'local', 'google')
|
||||
* @property {string} [role='USER'] - The role of the user
|
||||
* @property {string} [googleId] - Optional Google ID for the user
|
||||
* @property {string} [facebookId] - Optional Facebook ID for the user
|
||||
* @property {string} [openidId] - Optional OpenID ID for the user
|
||||
* @property {string} [ldapId] - Optional LDAP ID for the user
|
||||
* @property {string} [githubId] - Optional GitHub ID for the user
|
||||
* @property {string} [discordId] - Optional Discord ID for the user
|
||||
* @property {string} [appleId] - Optional Apple ID for the user
|
||||
* @property {Array} [plugins=[]] - List of plugins used by the user
|
||||
* @property {Array.<MongoSession>} [refreshToken] - List of sessions with refresh tokens
|
||||
* @property {Date} [expiresAt] - Optional expiration date of the file
|
||||
* @property {Date} [createdAt] - Date when the user was created (added by timestamps)
|
||||
* @property {Date} [updatedAt] - Date when the user was last updated (added by timestamps)
|
||||
*/
|
||||
|
||||
/** @type {MongooseSchema<MongoSession>} */
|
||||
const Session = mongoose.Schema({
|
||||
refreshToken: {
|
||||
type: String,
|
||||
default: '',
|
||||
},
|
||||
});
|
||||
|
||||
const backupCodeSchema = mongoose.Schema({
|
||||
codeHash: { type: String, required: true },
|
||||
used: { type: Boolean, default: false },
|
||||
usedAt: { type: Date, default: null },
|
||||
});
|
||||
|
||||
/** @type {MongooseSchema<MongoUser>} */
|
||||
const userSchema = mongoose.Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
},
|
||||
username: {
|
||||
type: String,
|
||||
lowercase: true,
|
||||
default: '',
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
required: [true, 'can\'t be blank'],
|
||||
lowercase: true,
|
||||
unique: true,
|
||||
match: [/\S+@\S+\.\S+/, 'is invalid'],
|
||||
index: true,
|
||||
},
|
||||
emailVerified: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: false,
|
||||
},
|
||||
password: {
|
||||
type: String,
|
||||
trim: true,
|
||||
minlength: 8,
|
||||
maxlength: 128,
|
||||
},
|
||||
avatar: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
provider: {
|
||||
type: String,
|
||||
required: true,
|
||||
default: 'local',
|
||||
},
|
||||
role: {
|
||||
type: String,
|
||||
default: SystemRoles.USER,
|
||||
},
|
||||
googleId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
facebookId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
openidId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
ldapId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
githubId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
discordId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
appleId: {
|
||||
type: String,
|
||||
unique: true,
|
||||
sparse: true,
|
||||
},
|
||||
plugins: {
|
||||
type: Array,
|
||||
},
|
||||
totpSecret: {
|
||||
type: String,
|
||||
},
|
||||
backupCodes: {
|
||||
type: [backupCodeSchema],
|
||||
},
|
||||
refreshToken: {
|
||||
type: [Session],
|
||||
},
|
||||
expiresAt: {
|
||||
type: Date,
|
||||
expires: 604800, // 7 days in seconds
|
||||
},
|
||||
termsAccepted: {
|
||||
type: Boolean,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
module.exports = userSchema;
|
||||
|
|
@ -79,6 +79,7 @@ const tokenValues = Object.assign(
|
|||
'o1-mini': { prompt: 1.1, completion: 4.4 },
|
||||
'o1-preview': { prompt: 15, completion: 60 },
|
||||
o1: { prompt: 15, completion: 60 },
|
||||
'gpt-4.5': { prompt: 75, completion: 150 },
|
||||
'gpt-4o-mini': { prompt: 0.15, completion: 0.6 },
|
||||
'gpt-4o': { prompt: 2.5, completion: 10 },
|
||||
'gpt-4o-2024-05-13': { prompt: 5, completion: 15 },
|
||||
|
|
@ -88,6 +89,8 @@ const tokenValues = Object.assign(
|
|||
'claude-3-sonnet': { prompt: 3, completion: 15 },
|
||||
'claude-3-5-sonnet': { prompt: 3, completion: 15 },
|
||||
'claude-3.5-sonnet': { prompt: 3, completion: 15 },
|
||||
'claude-3-7-sonnet': { prompt: 3, completion: 15 },
|
||||
'claude-3.7-sonnet': { prompt: 3, completion: 15 },
|
||||
'claude-3-5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3.5-haiku': { prompt: 0.8, completion: 4 },
|
||||
'claude-3-haiku': { prompt: 0.25, completion: 1.25 },
|
||||
|
|
@ -110,6 +113,14 @@ const tokenValues = Object.assign(
|
|||
'gemini-1.5': { prompt: 2.5, completion: 10 },
|
||||
'gemini-pro-vision': { prompt: 0.5, completion: 1.5 },
|
||||
gemini: { prompt: 0.5, completion: 1.5 },
|
||||
'grok-2-vision-1212': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2-vision-latest': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2-vision': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-vision-beta': { prompt: 5.0, completion: 15.0 },
|
||||
'grok-2-1212': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2-latest': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-2': { prompt: 2.0, completion: 10.0 },
|
||||
'grok-beta': { prompt: 5.0, completion: 15.0 },
|
||||
},
|
||||
bedrockValues,
|
||||
);
|
||||
|
|
@ -121,6 +132,8 @@ const tokenValues = Object.assign(
|
|||
* @type {Object.<string, {write: number, read: number }>}
|
||||
*/
|
||||
const cacheTokenValues = {
|
||||
'claude-3.7-sonnet': { write: 3.75, read: 0.3 },
|
||||
'claude-3-7-sonnet': { write: 3.75, read: 0.3 },
|
||||
'claude-3.5-sonnet': { write: 3.75, read: 0.3 },
|
||||
'claude-3-5-sonnet': { write: 3.75, read: 0.3 },
|
||||
'claude-3.5-haiku': { write: 1, read: 0.08 },
|
||||
|
|
@ -155,6 +168,8 @@ const getValueKey = (model, endpoint) => {
|
|||
return 'o1-mini';
|
||||
} else if (modelName.includes('o1')) {
|
||||
return 'o1';
|
||||
} else if (modelName.includes('gpt-4.5')) {
|
||||
return 'gpt-4.5';
|
||||
} else if (modelName.includes('gpt-4o-2024-05-13')) {
|
||||
return 'gpt-4o-2024-05-13';
|
||||
} else if (modelName.includes('gpt-4o-mini')) {
|
||||
|
|
|
|||
|
|
@ -50,6 +50,16 @@ describe('getValueKey', () => {
|
|||
expect(getValueKey('gpt-4-0125')).toBe('gpt-4-1106');
|
||||
});
|
||||
|
||||
it('should return "gpt-4.5" for model type of "gpt-4.5"', () => {
|
||||
expect(getValueKey('gpt-4.5-preview')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-2024-08-06')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-2024-08-06-0718')).toBe('gpt-4.5');
|
||||
expect(getValueKey('openai/gpt-4.5')).toBe('gpt-4.5');
|
||||
expect(getValueKey('openai/gpt-4.5-2024-08-06')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-turbo')).toBe('gpt-4.5');
|
||||
expect(getValueKey('gpt-4.5-0125')).toBe('gpt-4.5');
|
||||
});
|
||||
|
||||
it('should return "gpt-4o" for model type of "gpt-4o"', () => {
|
||||
expect(getValueKey('gpt-4o-2024-08-06')).toBe('gpt-4o');
|
||||
expect(getValueKey('gpt-4o-2024-08-06-0718')).toBe('gpt-4o');
|
||||
|
|
@ -80,6 +90,20 @@ describe('getValueKey', () => {
|
|||
expect(getValueKey('chatgpt-4o-latest-0718')).toBe('gpt-4o');
|
||||
});
|
||||
|
||||
it('should return "claude-3-7-sonnet" for model type of "claude-3-7-sonnet-"', () => {
|
||||
expect(getValueKey('claude-3-7-sonnet-20240620')).toBe('claude-3-7-sonnet');
|
||||
expect(getValueKey('anthropic/claude-3-7-sonnet')).toBe('claude-3-7-sonnet');
|
||||
expect(getValueKey('claude-3-7-sonnet-turbo')).toBe('claude-3-7-sonnet');
|
||||
expect(getValueKey('claude-3-7-sonnet-0125')).toBe('claude-3-7-sonnet');
|
||||
});
|
||||
|
||||
it('should return "claude-3.7-sonnet" for model type of "claude-3.7-sonnet-"', () => {
|
||||
expect(getValueKey('claude-3.7-sonnet-20240620')).toBe('claude-3.7-sonnet');
|
||||
expect(getValueKey('anthropic/claude-3.7-sonnet')).toBe('claude-3.7-sonnet');
|
||||
expect(getValueKey('claude-3.7-sonnet-turbo')).toBe('claude-3.7-sonnet');
|
||||
expect(getValueKey('claude-3.7-sonnet-0125')).toBe('claude-3.7-sonnet');
|
||||
});
|
||||
|
||||
it('should return "claude-3-5-sonnet" for model type of "claude-3-5-sonnet-"', () => {
|
||||
expect(getValueKey('claude-3-5-sonnet-20240620')).toBe('claude-3-5-sonnet');
|
||||
expect(getValueKey('anthropic/claude-3-5-sonnet')).toBe('claude-3-5-sonnet');
|
||||
|
|
@ -458,3 +482,30 @@ describe('Google Model Tests', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Grok Model Tests - Pricing', () => {
|
||||
describe('getMultiplier', () => {
|
||||
test('should return correct prompt and completion rates for Grok vision models', () => {
|
||||
const models = ['grok-2-vision-1212', 'grok-2-vision', 'grok-2-vision-latest'];
|
||||
models.forEach((model) => {
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(2.0);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(10.0);
|
||||
});
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok text models', () => {
|
||||
const models = ['grok-2-1212', 'grok-2', 'grok-2-latest'];
|
||||
models.forEach((model) => {
|
||||
expect(getMultiplier({ model, tokenType: 'prompt' })).toBe(2.0);
|
||||
expect(getMultiplier({ model, tokenType: 'completion' })).toBe(10.0);
|
||||
});
|
||||
});
|
||||
|
||||
test('should return correct prompt and completion rates for Grok beta models', () => {
|
||||
expect(getMultiplier({ model: 'grok-vision-beta', tokenType: 'prompt' })).toBe(5.0);
|
||||
expect(getMultiplier({ model: 'grok-vision-beta', tokenType: 'completion' })).toBe(15.0);
|
||||
expect(getMultiplier({ model: 'grok-beta', tokenType: 'prompt' })).toBe(5.0);
|
||||
expect(getMultiplier({ model: 'grok-beta', tokenType: 'completion' })).toBe(15.0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue