mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-23 20:00:15 +01:00
* 🧠 feat: User Memories for Conversational Context
chore: mcp typing, use `t`
WIP: first pass, Memories UI
- Added MemoryViewer component for displaying, editing, and deleting user memories.
- Integrated data provider hooks for fetching, updating, and deleting memories.
- Implemented pagination and loading states for better user experience.
- Created unit tests for MemoryViewer to ensure functionality and interaction with data provider.
- Updated translation files to include new UI strings related to memories.
chore: move mcp-related files to own directory
chore: rename librechat-mcp to librechat-api
WIP: first pass, memory processing and data schemas
chore: linting in fileSearch.js query description
chore: rename librechat-api to @librechat/api across the project
WIP: first pass, functional memory agent
feat: add MemoryEditDialog and MemoryViewer components for managing user memories
- Introduced MemoryEditDialog for editing memory entries with validation and toast notifications.
- Updated MemoryViewer to support editing and deleting memories, including pagination and loading states.
- Enhanced data provider to handle memory updates with optional original key for better management.
- Added new localization strings for memory-related UI elements.
feat: add memory permissions management
- Implemented memory permissions in the backend, allowing roles to have specific permissions for using, creating, updating, and reading memories.
- Added new API endpoints for updating memory permissions associated with roles.
- Created a new AdminSettings component for managing memory permissions in the frontend.
- Integrated memory permissions into the existing roles and permissions schemas.
- Updated the interface to include memory settings and permissions.
- Enhanced the MemoryViewer component to conditionally render admin settings based on user roles.
- Added localization support for memory permissions in the translation files.
feat: move AdminSettings component to a new position in MemoryViewer for better visibility
refactor: clean up commented code in MemoryViewer component
feat: enhance MemoryViewer with search functionality and improve MemoryEditDialog integration
- Added a search input to filter memories in the MemoryViewer component.
- Refactored MemoryEditDialog to accept children for better customization.
- Updated MemoryViewer to utilize the new EditMemoryButton and DeleteMemoryButton components for editing and deleting memories.
- Improved localization support by adding new strings for memory filtering and deletion confirmation.
refactor: optimize memory filtering in MemoryViewer using match-sorter
- Replaced manual filtering logic with match-sorter for improved search functionality.
- Enhanced performance and readability of the filteredMemories computation.
feat: enhance MemoryEditDialog with triggerRef and improve updateMemory mutation handling
feat: implement access control for MemoryEditDialog and MemoryViewer components
refactor: remove commented out code and create runMemory method
refactor: rename role based files
feat: implement access control for memory usage in AgentClient
refactor: simplify checkVisionRequest method in AgentClient by removing commented-out code
refactor: make `agents` dir in api package
refactor: migrate Azure utilities to TypeScript and consolidate imports
refactor: move sanitizeFilename function to a new file and update imports, add related tests
refactor: update LLM configuration types and consolidate Azure options in the API package
chore: linting
chore: import order
refactor: replace getLLMConfig with getOpenAIConfig and remove unused LLM configuration file
chore: update winston-daily-rotate-file to version 5.0.0 and add object-hash dependency in package-lock.json
refactor: move primeResources and optionalChainWithEmptyCheck functions to resources.ts and update imports
refactor: move createRun function to a new run.ts file and update related imports
fix: ensure safeAttachments is correctly typed as an array of TFile
chore: add node-fetch dependency and refactor fetch-related functions into packages/api/utils, removing the old generators file
refactor: enhance TEndpointOption type by using Pick to streamline endpoint fields and add new properties for model parameters and client options
feat: implement initializeOpenAIOptions function and update OpenAI types for enhanced configuration handling
fix: update types due to new TEndpointOption typing
fix: ensure safe access to group parameters in initializeOpenAIOptions function
fix: remove redundant API key validation comment in initializeOpenAIOptions function
refactor: rename initializeOpenAIOptions to initializeOpenAI for consistency and update related documentation
refactor: decouple req.body fields and tool loading from initializeAgentOptions
chore: linting
refactor: adjust column widths in MemoryViewer for improved layout
refactor: simplify agent initialization by creating loadAgent function and removing unused code
feat: add memory configuration loading and validation functions
WIP: first pass, memory processing with config
feat: implement memory callback and artifact handling
feat: implement memory artifacts display and processing updates
feat: add memory configuration options and schema validation for validKeys
fix: update MemoryEditDialog and MemoryViewer to handle memory state and display improvements
refactor: remove padding from BookmarkTable and MemoryViewer headers for consistent styling
WIP: initial tokenLimit config and move Tokenizer to @librechat/api
refactor: update mongoMeili plugin methods to use callback for better error handling
feat: enhance memory management with token tracking and usage metrics
- Added token counting for memory entries to enforce limits and provide usage statistics.
- Updated memory retrieval and update routes to include total token usage and limit.
- Enhanced MemoryEditDialog and MemoryViewer components to display memory usage and token information.
- Refactored memory processing functions to handle token limits and provide feedback on memory capacity.
feat: implement memory artifact handling in attachment handler
- Enhanced useAttachmentHandler to process memory artifacts when receiving updates.
- Introduced handleMemoryArtifact utility to manage memory updates and deletions.
- Updated query client to reflect changes in memory state based on incoming data.
refactor: restructure web search key extraction logic
- Moved the logic for extracting API keys from the webSearchAuth configuration into a dedicated function, getWebSearchKeys.
- Updated webSearchKeys to utilize the new function for improved clarity and maintainability.
- Prevents build time errors
feat: add personalization settings and memory preferences management
- Introduced a new Personalization tab in settings to manage user memory preferences.
- Implemented API endpoints and client-side logic for updating memory preferences.
- Enhanced user interface components to reflect personalization options and memory usage.
- Updated permissions to allow users to opt out of memory features.
- Added localization support for new settings and messages related to personalization.
style: personalization switch class
feat: add PersonalizationIcon and align Side Panel UI
feat: implement memory creation functionality
- Added a new API endpoint for creating memory entries, including validation for key and value.
- Introduced MemoryCreateDialog component for user interface to facilitate memory creation.
- Integrated token limit checks to prevent exceeding user memory capacity.
- Updated MemoryViewer to include a button for opening the memory creation dialog.
- Enhanced localization support for new messages related to memory creation.
feat: enhance message processing with configurable window size
- Updated AgentClient to use a configurable message window size for processing messages.
- Introduced messageWindowSize option in memory configuration schema with a default value of 5.
- Improved logic for selecting messages to process based on the configured window size.
chore: update librechat-data-provider version to 0.7.87 in package.json and package-lock.json
chore: remove OpenAPIPlugin and its associated tests
chore: remove MIGRATION_README.md as migration tasks are completed
ci: fix backend tests
chore: remove unused translation keys from localization file
chore: remove problematic test file and unused var in AgentClient
chore: remove unused import and import directly for JSDoc
* feat: add api package build stage in Dockerfile for improved modularity
* docs: reorder build steps in contributing guide for clarity
564 lines
18 KiB
TypeScript
564 lines
18 KiB
TypeScript
import _ from 'lodash';
|
|
import { MeiliSearch, Index } from 'meilisearch';
|
|
import type {
|
|
CallbackWithoutResultAndOptionalError,
|
|
FilterQuery,
|
|
Document,
|
|
Schema,
|
|
Query,
|
|
Types,
|
|
Model,
|
|
} from 'mongoose';
|
|
import logger from '~/config/meiliLogger';
|
|
|
|
interface MongoMeiliOptions {
|
|
host: string;
|
|
apiKey: string;
|
|
indexName: string;
|
|
primaryKey: string;
|
|
mongoose: typeof import('mongoose');
|
|
}
|
|
|
|
interface MeiliIndexable {
|
|
[key: string]: unknown;
|
|
_meiliIndex?: boolean;
|
|
}
|
|
|
|
interface ContentItem {
|
|
type: string;
|
|
text?: string;
|
|
}
|
|
|
|
interface DocumentWithMeiliIndex extends Document {
|
|
_meiliIndex?: boolean;
|
|
preprocessObjectForIndex?: () => Record<string, unknown>;
|
|
addObjectToMeili?: (next: CallbackWithoutResultAndOptionalError) => Promise<void>;
|
|
updateObjectToMeili?: (next: CallbackWithoutResultAndOptionalError) => Promise<void>;
|
|
deleteObjectFromMeili?: (next: CallbackWithoutResultAndOptionalError) => Promise<void>;
|
|
postSaveHook?: (next: CallbackWithoutResultAndOptionalError) => void;
|
|
postUpdateHook?: (next: CallbackWithoutResultAndOptionalError) => void;
|
|
postRemoveHook?: (next: CallbackWithoutResultAndOptionalError) => void;
|
|
conversationId?: string;
|
|
content?: ContentItem[];
|
|
messageId?: string;
|
|
unfinished?: boolean;
|
|
messages?: unknown[];
|
|
title?: string;
|
|
toJSON(): Record<string, unknown>;
|
|
}
|
|
|
|
interface SchemaWithMeiliMethods extends Model<DocumentWithMeiliIndex> {
|
|
syncWithMeili(): Promise<void>;
|
|
setMeiliIndexSettings(settings: Record<string, unknown>): Promise<unknown>;
|
|
meiliSearch(q: string, params: Record<string, unknown>, populate: boolean): Promise<unknown>;
|
|
}
|
|
|
|
// Environment flags
|
|
/**
|
|
* Flag to indicate if search is enabled based on environment variables.
|
|
*/
|
|
const searchEnabled = process.env.SEARCH != null && process.env.SEARCH.toLowerCase() === 'true';
|
|
|
|
/**
|
|
* Flag to indicate if MeiliSearch is enabled based on required environment variables.
|
|
*/
|
|
const meiliEnabled =
|
|
process.env.MEILI_HOST != null && process.env.MEILI_MASTER_KEY != null && searchEnabled;
|
|
|
|
/**
|
|
* Local implementation of parseTextParts to avoid dependency on librechat-data-provider
|
|
* Extracts text content from an array of content items
|
|
*/
|
|
const parseTextParts = (content: ContentItem[]): string => {
|
|
if (!Array.isArray(content)) {
|
|
return '';
|
|
}
|
|
|
|
return content
|
|
.filter((item) => item.type === 'text' && typeof item.text === 'string')
|
|
.map((item) => item.text)
|
|
.join(' ')
|
|
.trim();
|
|
};
|
|
|
|
/**
|
|
* Local implementation to handle Bing convoId conversion
|
|
*/
|
|
const cleanUpPrimaryKeyValue = (value: string): string => {
|
|
return value.replace(/--/g, '|');
|
|
};
|
|
|
|
/**
|
|
* Validates the required options for configuring the mongoMeili plugin.
|
|
*/
|
|
const validateOptions = (options: Partial<MongoMeiliOptions>): void => {
|
|
const requiredKeys: (keyof MongoMeiliOptions)[] = ['host', 'apiKey', 'indexName'];
|
|
requiredKeys.forEach((key) => {
|
|
if (!options[key]) {
|
|
throw new Error(`Missing mongoMeili Option: ${key}`);
|
|
}
|
|
});
|
|
};
|
|
|
|
/**
|
|
* Factory function to create a MeiliMongooseModel class which extends a Mongoose model.
|
|
* This class contains static and instance methods to synchronize and manage the MeiliSearch index
|
|
* corresponding to the MongoDB collection.
|
|
*
|
|
* @param config - Configuration object.
|
|
* @param config.index - The MeiliSearch index object.
|
|
* @param config.attributesToIndex - List of attributes to index.
|
|
* @returns A class definition that will be loaded into the Mongoose schema.
|
|
*/
|
|
const createMeiliMongooseModel = ({
|
|
index,
|
|
attributesToIndex,
|
|
}: {
|
|
index: Index<MeiliIndexable>;
|
|
attributesToIndex: string[];
|
|
}) => {
|
|
const primaryKey = attributesToIndex[0];
|
|
|
|
class MeiliMongooseModel {
|
|
/**
|
|
* Synchronizes the data between the MongoDB collection and the MeiliSearch index.
|
|
*
|
|
* The synchronization process involves:
|
|
* 1. Fetching all documents from the MongoDB collection and MeiliSearch index.
|
|
* 2. Comparing documents from both sources.
|
|
* 3. Deleting documents from MeiliSearch that no longer exist in MongoDB.
|
|
* 4. Adding documents to MeiliSearch that exist in MongoDB but not in the index.
|
|
* 5. Updating documents in MeiliSearch if key fields (such as `text` or `title`) differ.
|
|
* 6. Updating the `_meiliIndex` field in MongoDB to indicate the indexing status.
|
|
*
|
|
* Note: The function processes documents in batches because MeiliSearch's
|
|
* `index.getDocuments` requires an exact limit and `index.addDocuments` does not handle
|
|
* partial failures in a batch.
|
|
*
|
|
* @returns {Promise<void>} Resolves when the synchronization is complete.
|
|
*/
|
|
static async syncWithMeili(this: SchemaWithMeiliMethods): Promise<void> {
|
|
try {
|
|
let moreDocuments = true;
|
|
const mongoDocuments = await this.find().lean();
|
|
|
|
const format = (doc: Record<string, unknown>) =>
|
|
_.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$'));
|
|
|
|
const mongoMap = new Map(
|
|
mongoDocuments.map((doc) => {
|
|
const typedDoc = doc as Record<string, unknown>;
|
|
return [typedDoc[primaryKey], format(typedDoc)];
|
|
}),
|
|
);
|
|
const indexMap = new Map<unknown, Record<string, unknown>>();
|
|
let offset = 0;
|
|
const batchSize = 1000;
|
|
|
|
while (moreDocuments) {
|
|
const batch = await index.getDocuments({ limit: batchSize, offset });
|
|
if (batch.results.length === 0) {
|
|
moreDocuments = false;
|
|
}
|
|
for (const doc of batch.results) {
|
|
indexMap.set(doc[primaryKey], format(doc));
|
|
}
|
|
offset += batchSize;
|
|
}
|
|
|
|
logger.debug('[syncWithMeili]', { indexMap: indexMap.size, mongoMap: mongoMap.size });
|
|
|
|
const updateOps: Array<{
|
|
updateOne: {
|
|
filter: Record<string, unknown>;
|
|
update: { $set: { _meiliIndex: boolean } };
|
|
};
|
|
}> = [];
|
|
|
|
// Process documents present in the MeiliSearch index
|
|
for (const [id, doc] of indexMap) {
|
|
const update: Record<string, unknown> = {};
|
|
update[primaryKey] = id;
|
|
if (mongoMap.has(id)) {
|
|
const mongoDoc = mongoMap.get(id);
|
|
if (
|
|
(doc.text && doc.text !== mongoDoc?.text) ||
|
|
(doc.title && doc.title !== mongoDoc?.title)
|
|
) {
|
|
logger.debug(
|
|
`[syncWithMeili] ${id} had document discrepancy in ${
|
|
doc.text ? 'text' : 'title'
|
|
} field`,
|
|
);
|
|
updateOps.push({
|
|
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
|
});
|
|
await index.addDocuments([doc]);
|
|
}
|
|
} else {
|
|
await index.deleteDocument(id as string);
|
|
updateOps.push({
|
|
updateOne: { filter: update, update: { $set: { _meiliIndex: false } } },
|
|
});
|
|
}
|
|
}
|
|
|
|
// Process documents present in MongoDB
|
|
for (const [id, doc] of mongoMap) {
|
|
const update: Record<string, unknown> = {};
|
|
update[primaryKey] = id;
|
|
if (!indexMap.has(id)) {
|
|
await index.addDocuments([doc]);
|
|
updateOps.push({
|
|
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
|
});
|
|
} else if (doc._meiliIndex === false) {
|
|
updateOps.push({
|
|
updateOne: { filter: update, update: { $set: { _meiliIndex: true } } },
|
|
});
|
|
}
|
|
}
|
|
|
|
if (updateOps.length > 0) {
|
|
await this.collection.bulkWrite(updateOps);
|
|
logger.debug(
|
|
`[syncWithMeili] Finished indexing ${
|
|
primaryKey === 'messageId' ? 'messages' : 'conversations'
|
|
}`,
|
|
);
|
|
}
|
|
} catch (error) {
|
|
logger.error('[syncWithMeili] Error adding document to Meili:', error);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Updates settings for the MeiliSearch index
|
|
*/
|
|
static async setMeiliIndexSettings(settings: Record<string, unknown>): Promise<unknown> {
|
|
return await index.updateSettings(settings);
|
|
}
|
|
|
|
/**
|
|
* Searches the MeiliSearch index and optionally populates results
|
|
*/
|
|
static async meiliSearch(
|
|
this: SchemaWithMeiliMethods,
|
|
q: string,
|
|
params: Record<string, unknown>,
|
|
populate: boolean,
|
|
): Promise<unknown> {
|
|
const data = await index.search(q, params);
|
|
|
|
if (populate) {
|
|
const query: Record<string, unknown> = {};
|
|
query[primaryKey] = _.map(data.hits, (hit) =>
|
|
cleanUpPrimaryKeyValue(hit[primaryKey] as string),
|
|
);
|
|
|
|
const projection = Object.keys(this.schema.obj).reduce<Record<string, number>>(
|
|
(results, key) => {
|
|
if (!key.startsWith('$')) {
|
|
results[key] = 1;
|
|
}
|
|
return results;
|
|
},
|
|
{ _id: 1, __v: 1 },
|
|
);
|
|
|
|
const hitsFromMongoose = await this.find(query, projection).lean();
|
|
|
|
const populatedHits = data.hits.map((hit) => {
|
|
const queryObj: Record<string, unknown> = {};
|
|
queryObj[primaryKey] = hit[primaryKey];
|
|
const originalHit = _.find(hitsFromMongoose, (item) => {
|
|
const typedItem = item as Record<string, unknown>;
|
|
return typedItem[primaryKey] === hit[primaryKey];
|
|
});
|
|
|
|
return {
|
|
...(originalHit && typeof originalHit === 'object' ? originalHit : {}),
|
|
...hit,
|
|
};
|
|
});
|
|
data.hits = populatedHits;
|
|
}
|
|
|
|
return data;
|
|
}
|
|
|
|
/**
|
|
* Preprocesses the current document for indexing
|
|
*/
|
|
preprocessObjectForIndex(this: DocumentWithMeiliIndex): Record<string, unknown> {
|
|
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
|
k.startsWith('$'),
|
|
);
|
|
|
|
if (
|
|
object.conversationId &&
|
|
typeof object.conversationId === 'string' &&
|
|
object.conversationId.includes('|')
|
|
) {
|
|
object.conversationId = object.conversationId.replace(/\|/g, '--');
|
|
}
|
|
|
|
if (object.content && Array.isArray(object.content)) {
|
|
object.text = parseTextParts(object.content);
|
|
delete object.content;
|
|
}
|
|
|
|
return object;
|
|
}
|
|
|
|
/**
|
|
* Adds the current document to the MeiliSearch index
|
|
*/
|
|
async addObjectToMeili(
|
|
this: DocumentWithMeiliIndex,
|
|
next: CallbackWithoutResultAndOptionalError,
|
|
): Promise<void> {
|
|
const object = this.preprocessObjectForIndex!();
|
|
try {
|
|
await index.addDocuments([object]);
|
|
} catch (error) {
|
|
logger.error('[addObjectToMeili] Error adding document to Meili:', error);
|
|
return next();
|
|
}
|
|
|
|
try {
|
|
await this.collection.updateMany(
|
|
{ _id: this._id as Types.ObjectId },
|
|
{ $set: { _meiliIndex: true } },
|
|
);
|
|
} catch (error) {
|
|
logger.error('[addObjectToMeili] Error updating _meiliIndex field:', error);
|
|
return next();
|
|
}
|
|
|
|
next();
|
|
}
|
|
|
|
/**
|
|
* Updates the current document in the MeiliSearch index
|
|
*/
|
|
async updateObjectToMeili(
|
|
this: DocumentWithMeiliIndex,
|
|
next: CallbackWithoutResultAndOptionalError,
|
|
): Promise<void> {
|
|
try {
|
|
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
|
k.startsWith('$'),
|
|
);
|
|
await index.updateDocuments([object]);
|
|
next();
|
|
} catch (error) {
|
|
logger.error('[updateObjectToMeili] Error updating document in Meili:', error);
|
|
return next();
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Deletes the current document from the MeiliSearch index.
|
|
*
|
|
* @returns {Promise<void>}
|
|
*/
|
|
async deleteObjectFromMeili(
|
|
this: DocumentWithMeiliIndex,
|
|
next: CallbackWithoutResultAndOptionalError,
|
|
): Promise<void> {
|
|
try {
|
|
await index.deleteDocument(this._id as string);
|
|
next();
|
|
} catch (error) {
|
|
logger.error('[deleteObjectFromMeili] Error deleting document from Meili:', error);
|
|
return next();
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Post-save hook to synchronize the document with MeiliSearch.
|
|
*
|
|
* If the document is already indexed (i.e. `_meiliIndex` is true), it updates it;
|
|
* otherwise, it adds the document to the index.
|
|
*/
|
|
postSaveHook(this: DocumentWithMeiliIndex, next: CallbackWithoutResultAndOptionalError): void {
|
|
if (this._meiliIndex) {
|
|
this.updateObjectToMeili!(next);
|
|
} else {
|
|
this.addObjectToMeili!(next);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Post-update hook to update the document in MeiliSearch.
|
|
*
|
|
* This hook is triggered after a document update, ensuring that changes are
|
|
* propagated to the MeiliSearch index if the document is indexed.
|
|
*/
|
|
postUpdateHook(
|
|
this: DocumentWithMeiliIndex,
|
|
next: CallbackWithoutResultAndOptionalError,
|
|
): void {
|
|
if (this._meiliIndex) {
|
|
this.updateObjectToMeili!(next);
|
|
} else {
|
|
next();
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Post-remove hook to delete the document from MeiliSearch.
|
|
*
|
|
* This hook is triggered after a document is removed, ensuring that the document
|
|
* is also removed from the MeiliSearch index if it was previously indexed.
|
|
*/
|
|
postRemoveHook(
|
|
this: DocumentWithMeiliIndex,
|
|
next: CallbackWithoutResultAndOptionalError,
|
|
): void {
|
|
if (this._meiliIndex) {
|
|
this.deleteObjectFromMeili!(next);
|
|
} else {
|
|
next();
|
|
}
|
|
}
|
|
}
|
|
|
|
return MeiliMongooseModel;
|
|
};
|
|
|
|
/**
|
|
* Mongoose plugin to synchronize MongoDB collections with a MeiliSearch index.
|
|
*
|
|
* This plugin:
|
|
* - Validates the provided options.
|
|
* - Adds a `_meiliIndex` field to the schema to track indexing status.
|
|
* - Sets up a MeiliSearch client and creates an index if it doesn't already exist.
|
|
* - Loads class methods for syncing, searching, and managing documents in MeiliSearch.
|
|
* - Registers Mongoose hooks (post-save, post-update, post-remove, etc.) to maintain index consistency.
|
|
*
|
|
* @param schema - The Mongoose schema to which the plugin is applied.
|
|
* @param options - Configuration options.
|
|
* @param options.host - The MeiliSearch host.
|
|
* @param options.apiKey - The MeiliSearch API key.
|
|
* @param options.indexName - The name of the MeiliSearch index.
|
|
* @param options.primaryKey - The primary key field for indexing.
|
|
*/
|
|
export default function mongoMeili(schema: Schema, options: MongoMeiliOptions): void {
|
|
const mongoose = options.mongoose;
|
|
validateOptions(options);
|
|
|
|
// Add _meiliIndex field to the schema to track if a document has been indexed in MeiliSearch.
|
|
schema.add({
|
|
_meiliIndex: {
|
|
type: Boolean,
|
|
required: false,
|
|
select: false,
|
|
default: false,
|
|
},
|
|
});
|
|
|
|
const { host, apiKey, indexName, primaryKey } = options;
|
|
|
|
const client = new MeiliSearch({ host, apiKey });
|
|
client.createIndex(indexName, { primaryKey });
|
|
const index = client.index<MeiliIndexable>(indexName);
|
|
|
|
// Collect attributes from the schema that should be indexed
|
|
const attributesToIndex: string[] = [
|
|
...Object.entries(schema.obj).reduce<string[]>((results, [key, value]) => {
|
|
const schemaValue = value as { meiliIndex?: boolean };
|
|
return schemaValue.meiliIndex ? [...results, key] : results;
|
|
}, []),
|
|
];
|
|
|
|
schema.loadClass(createMeiliMongooseModel({ index, attributesToIndex }));
|
|
|
|
// Register Mongoose hooks
|
|
schema.post('save', function (doc: DocumentWithMeiliIndex, next) {
|
|
doc.postSaveHook?.(next);
|
|
});
|
|
|
|
schema.post('updateOne', function (doc: DocumentWithMeiliIndex, next) {
|
|
doc.postUpdateHook?.(next);
|
|
});
|
|
|
|
schema.post('deleteOne', function (doc: DocumentWithMeiliIndex, next) {
|
|
doc.postRemoveHook?.(next);
|
|
});
|
|
|
|
// Pre-deleteMany hook: remove corresponding documents from MeiliSearch when multiple documents are deleted.
|
|
schema.pre('deleteMany', async function (next) {
|
|
if (!meiliEnabled) {
|
|
return next();
|
|
}
|
|
|
|
try {
|
|
const conditions = (this as Query<unknown, unknown>).getQuery();
|
|
|
|
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) {
|
|
const convoIndex = client.index('convos');
|
|
const deletedConvos = await mongoose
|
|
.model('Conversation')
|
|
.find(conditions as FilterQuery<unknown>)
|
|
.lean();
|
|
const promises = deletedConvos.map((convo: Record<string, unknown>) =>
|
|
convoIndex.deleteDocument(convo.conversationId as string),
|
|
);
|
|
await Promise.all(promises);
|
|
}
|
|
|
|
if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) {
|
|
const messageIndex = client.index('messages');
|
|
const deletedMessages = await mongoose
|
|
.model('Message')
|
|
.find(conditions as FilterQuery<unknown>)
|
|
.lean();
|
|
const promises = deletedMessages.map((message: Record<string, unknown>) =>
|
|
messageIndex.deleteDocument(message.messageId as string),
|
|
);
|
|
await Promise.all(promises);
|
|
}
|
|
return next();
|
|
} catch (error) {
|
|
if (meiliEnabled) {
|
|
logger.error(
|
|
'[MeiliMongooseModel.deleteMany] There was an issue deleting conversation indexes upon deletion. Next startup may be slow due to syncing.',
|
|
error,
|
|
);
|
|
}
|
|
return next();
|
|
}
|
|
});
|
|
|
|
// Post-findOneAndUpdate hook
|
|
schema.post('findOneAndUpdate', async function (doc: DocumentWithMeiliIndex, next) {
|
|
if (!meiliEnabled) {
|
|
return next();
|
|
}
|
|
|
|
if (doc.unfinished) {
|
|
return next();
|
|
}
|
|
|
|
let meiliDoc: Record<string, unknown> | undefined;
|
|
if (doc.messages) {
|
|
try {
|
|
meiliDoc = await client.index('convos').getDocument(doc.conversationId as string);
|
|
} catch (error: unknown) {
|
|
logger.debug(
|
|
'[MeiliMongooseModel.findOneAndUpdate] Convo not found in MeiliSearch and will index ' +
|
|
doc.conversationId,
|
|
error as Record<string, unknown>,
|
|
);
|
|
}
|
|
}
|
|
|
|
if (meiliDoc && meiliDoc.title === doc.title) {
|
|
return next();
|
|
}
|
|
|
|
doc.postSaveHook?.(next);
|
|
});
|
|
}
|