mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 06:00:56 +02:00

* Basic implementation of ChatGPT conversation import * remove debug code * Handle citations * Fix updatedAt in import * update default model * Use job scheduler to handle import requests * import job status endpoint * Add wrapper around Agenda * Rate limits for import endpoint * rename import api path * Batch save import to mongo * Improve naming * Add documenting comments * Test for importers * Change button for importing conversations * Frontend changes * Import job status endpoint * Import endpoint response * Add translations to new phrases * Fix conversations refreshing * cleanup unused functions * set timeout for import job status polling * Add documentation * get extra spaces back * Improve error message * Fix translation files after merge * fix translation files 2 * Add zh translation for import functionality * Sync mailisearch index after import * chore: add dummy uri for jest tests, as MONGO_URI should only be real for E2E tests * docs: fix links * docs: fix conversationsImport section * fix: user role issue for librechat imports * refactor: import conversations from json - organize imports - add additional jsdocs - use multer with diskStorage to avoid loading file into memory outside of job - use filepath instead of loading data string for imports - replace console logs and some logger.info() with logger.debug - only use multer for import route * fix: undefined metadata edge case and replace ChatGtp -> ChatGpt * Refactor importChatGptConvo function to handle undefined metadata edge case and replace ChatGtp with ChatGpt * fix: chatgpt importer * feat: maintain tree relationship for librechat messages * chore: use enum * refactor: saveMessage to use single object arg, replace console logs, add userId to log message * chore: additional comment * chore: multer edge case * feat: first pass, maintain tree relationship * chore: organize * chore: remove log * ci: add heirarchy test for chatgpt * ci: test maintaining of heirarchy for librechat * wip: allow non-text content type messages * refactor: import content part object json string * refactor: more content types to format * chore: consolidate messageText formatting * docs: update on changes, bump data-provider/config versions, update readme * refactor(indexSync): singleton pattern for MeiliSearchClient * refactor: debug log after batch is done * chore: add back indexSync error handling --------- Co-authored-by: jakubmieszczak <jakub.mieszczak@zendesk.com> Co-authored-by: Danny Avila <danny@librechat.ai>
153 lines
5.3 KiB
JavaScript
153 lines
5.3 KiB
JavaScript
const Conversation = require('./schema/convoSchema');
|
|
const { getMessages, deleteMessages } = require('./Message');
|
|
const logger = require('~/config/winston');
|
|
|
|
const getConvo = async (user, conversationId) => {
|
|
try {
|
|
return await Conversation.findOne({ user, conversationId }).lean();
|
|
} catch (error) {
|
|
logger.error('[getConvo] Error getting single conversation', error);
|
|
return { message: 'Error getting single conversation' };
|
|
}
|
|
};
|
|
|
|
module.exports = {
|
|
Conversation,
|
|
saveConvo: async (user, { conversationId, newConversationId, ...convo }) => {
|
|
try {
|
|
const messages = await getMessages({ conversationId });
|
|
const update = { ...convo, messages, user };
|
|
if (newConversationId) {
|
|
update.conversationId = newConversationId;
|
|
}
|
|
|
|
return await Conversation.findOneAndUpdate({ conversationId: conversationId, user }, update, {
|
|
new: true,
|
|
upsert: true,
|
|
});
|
|
} catch (error) {
|
|
logger.error('[saveConvo] Error saving conversation', error);
|
|
return { message: 'Error saving conversation' };
|
|
}
|
|
},
|
|
bulkSaveConvos: async (conversations) => {
|
|
try {
|
|
const bulkOps = conversations.map((convo) => ({
|
|
updateOne: {
|
|
filter: { conversationId: convo.conversationId, user: convo.user },
|
|
update: convo,
|
|
upsert: true,
|
|
timestamps: false,
|
|
},
|
|
}));
|
|
|
|
const result = await Conversation.bulkWrite(bulkOps);
|
|
return result;
|
|
} catch (error) {
|
|
logger.error('[saveBulkConversations] Error saving conversations in bulk', error);
|
|
throw new Error('Failed to save conversations in bulk.');
|
|
}
|
|
},
|
|
getConvosByPage: async (user, pageNumber = 1, pageSize = 25) => {
|
|
try {
|
|
const totalConvos = (await Conversation.countDocuments({ user })) || 1;
|
|
const totalPages = Math.ceil(totalConvos / pageSize);
|
|
const convos = await Conversation.find({ user })
|
|
.sort({ updatedAt: -1 })
|
|
.skip((pageNumber - 1) * pageSize)
|
|
.limit(pageSize)
|
|
.lean();
|
|
return { conversations: convos, pages: totalPages, pageNumber, pageSize };
|
|
} catch (error) {
|
|
logger.error('[getConvosByPage] Error getting conversations', error);
|
|
return { message: 'Error getting conversations' };
|
|
}
|
|
},
|
|
getConvosQueried: async (user, convoIds, pageNumber = 1, pageSize = 25) => {
|
|
try {
|
|
if (!convoIds || convoIds.length === 0) {
|
|
return { conversations: [], pages: 1, pageNumber, pageSize };
|
|
}
|
|
|
|
const cache = {};
|
|
const convoMap = {};
|
|
const promises = [];
|
|
|
|
convoIds.forEach((convo) =>
|
|
promises.push(
|
|
Conversation.findOne({
|
|
user,
|
|
conversationId: convo.conversationId,
|
|
}).lean(),
|
|
),
|
|
);
|
|
|
|
const results = (await Promise.all(promises)).filter(Boolean);
|
|
|
|
results.forEach((convo, i) => {
|
|
const page = Math.floor(i / pageSize) + 1;
|
|
if (!cache[page]) {
|
|
cache[page] = [];
|
|
}
|
|
cache[page].push(convo);
|
|
convoMap[convo.conversationId] = convo;
|
|
});
|
|
|
|
const totalPages = Math.ceil(results.length / pageSize);
|
|
cache.pages = totalPages;
|
|
cache.pageSize = pageSize;
|
|
return {
|
|
cache,
|
|
conversations: cache[pageNumber] || [],
|
|
pages: totalPages || 1,
|
|
pageNumber,
|
|
pageSize,
|
|
convoMap,
|
|
};
|
|
} catch (error) {
|
|
logger.error('[getConvosQueried] Error getting conversations', error);
|
|
return { message: 'Error fetching conversations' };
|
|
}
|
|
},
|
|
getConvo,
|
|
/* chore: this method is not properly error handled */
|
|
getConvoTitle: async (user, conversationId) => {
|
|
try {
|
|
const convo = await getConvo(user, conversationId);
|
|
/* ChatGPT Browser was triggering error here due to convo being saved later */
|
|
if (convo && !convo.title) {
|
|
return null;
|
|
} else {
|
|
// TypeError: Cannot read properties of null (reading 'title')
|
|
return convo?.title || 'New Chat';
|
|
}
|
|
} catch (error) {
|
|
logger.error('[getConvoTitle] Error getting conversation title', error);
|
|
return { message: 'Error getting conversation title' };
|
|
}
|
|
},
|
|
/**
|
|
* Asynchronously deletes conversations and associated messages for a given user and filter.
|
|
*
|
|
* @async
|
|
* @function
|
|
* @param {string|ObjectId} user - The user's ID.
|
|
* @param {Object} filter - Additional filter criteria for the conversations to be deleted.
|
|
* @returns {Promise<{ n: number, ok: number, deletedCount: number, messages: { n: number, ok: number, deletedCount: number } }>}
|
|
* An object containing the count of deleted conversations and associated messages.
|
|
* @throws {Error} Throws an error if there's an issue with the database operations.
|
|
*
|
|
* @example
|
|
* const user = 'someUserId';
|
|
* const filter = { someField: 'someValue' };
|
|
* const result = await deleteConvos(user, filter);
|
|
* logger.error(result); // { n: 5, ok: 1, deletedCount: 5, messages: { n: 10, ok: 1, deletedCount: 10 } }
|
|
*/
|
|
deleteConvos: async (user, filter) => {
|
|
let toRemove = await Conversation.find({ ...filter, user }).select('conversationId');
|
|
const ids = toRemove.map((instance) => instance.conversationId);
|
|
let deleteCount = await Conversation.deleteMany({ ...filter, user });
|
|
deleteCount.messages = await deleteMessages({ conversationId: { $in: ids } });
|
|
return deleteCount;
|
|
},
|
|
};
|