mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 08:12:00 +02:00

* ✨ feat: improve Nav/Conversations/Convo/NewChat component performance * ✨ feat: implement cursor-based pagination for conversations API * 🔧 refactor: remove createdAt from conversation selection in API and type definitions * 🔧 refactor: include createdAt in conversation selection and update related types * ✨ fix: search functionality and bugs with loadMoreConversations * feat: move ArchivedChats to cursor and DataTable standard * 🔧 refactor: add InfiniteQueryObserverResult type import in Nav component * feat: enhance conversation listing with pagination, sorting, and search capabilities * 🔧 refactor: remove unnecessary comment regarding lodash/debounce in ArchivedChatsTable * 🔧 refactor: remove unused translation keys for archived chats and search results * 🔧 fix: Archived Chats, Delete Convo, Duplicate Convo * 🔧 refactor: improve conversation components with layout adjustments and new translations * 🔧 refactor: simplify archive conversation mutation and improve unarchive handling; fix: update fork mutation * 🔧 refactor: decode search query parameter in conversation route; improve error handling in unarchive mutation; clean up DataTable component styles * 🔧 refactor: remove unused translation key for empty archived chats * 🚀 fix: `archivedConversation` query key not updated correctly while archiving * 🧠 feat: Bedrock Anthropic Reasoning & Update Endpoint Handling (#6163) * feat: Add thinking and thinkingBudget parameters for Bedrock Anthropic models * chore: Update @librechat/agents to version 2.1.8 * refactor: change region order in params * refactor: Add maxTokens parameter to conversation preset schema * refactor: Update agent client to use bedrockInputSchema and improve error handling for model parameters * refactor: streamline/optimize llmConfig initialization and saving for bedrock * fix: ensure config titleModel is used for all endpoints * refactor: enhance OpenAIClient and agent initialization to support endpoint checks for OpenRouter * chore: bump @google/generative-ai * ✨ feat: improve Nav/Conversations/Convo/NewChat component performance * 🔧 refactor: remove unnecessary comment regarding lodash/debounce in ArchivedChatsTable * 🔧 refactor: update translation keys for clarity; simplify conversation query parameters and improve sorting functionality in SharedLinks component * 🔧 refactor: optimize conversation loading logic and improve search handling in Nav component * fix: package-lock * fix: package-lock 2 * fix: package lock 3 * refactor: remove unused utility files and exports to clean up the codebase * refactor: remove i18n and useAuthRedirect modules to streamline codebase * refactor: optimize Conversations component and remove unused ToggleContext * refactor(Convo): add RenameForm and ConvoLink components; enhance Conversations component with responsive design * fix: add missing @azure/storage-blob dependency in package.json * refactor(Search): add error handling with toast notification for search errors * refactor: make createdAt and updatedAt fields of tConvoUpdateSchema less restrictive if timestamps are missing * chore: update @azure/storage-blob dependency to version 12.27.0, ensure package-lock is correct * refactor(Search): improve conversation handling server side * fix: eslint warning and errors * refactor(Search): improved search loading state and overall UX * Refactors conversation cache management Centralizes conversation mutation logic into dedicated utility functions for adding, updating, and removing conversations from query caches. Improves reliability and maintainability by: - Consolidating duplicate cache manipulation code - Adding type safety for infinite query data structures - Implementing consistent cache update patterns across all conversation operations - Removing obsolete conversation helper functions in favor of standardized utilities * fix: conversation handling and SSE event processing - Optimizes conversation state management with useMemo and proper hook ordering - Improves SSE event handler documentation and error handling - Adds reset guard flag for conversation changes - Removes redundant navigation call - Cleans up cursor handling logic and document structure Improves code maintainability and prevents potential race conditions in conversation state updates * refactor: add type for SearchBar `onChange` * fix: type tags * style: rounded to xl all Header buttons * fix: activeConvo in Convo not working * style(Bookmarks): improved UI * a11y(AccountSettings): fixed hover style not visible when using light theme * style(SettingsTabs): improved tab switchers and dropdowns * feat: add translations keys for Speech * chore: fix package-lock * fix(mutations): legacy import after rebase * feat: refactor conversation navigation for accessibility * fix(search): convo and message create/update date not returned * fix(search): show correct iconURL and endpoint for searched messages * fix: small UI improvements * chore: console.log cleanup * chore: fix tests * fix(ChatForm): improve conversation ID handling and clean up useMemo dependencies * chore: improve typing * chore: improve typing * fix(useSSE): clear conversation ID on submission to prevent draft restoration * refactor(OpenAIClient): clean up abort handler * refactor(abortMiddleware): change handleAbort to use function expression * feat: add PENDING_CONVO constant and update conversation ID checks * fix: final event handling on abort * fix: improve title sync and query cache sync on final event * fix: prevent overwriting cached conversation data if it already exists --------- Co-authored-by: Danny Avila <danny@librechat.ai>
223 lines
6.9 KiB
JavaScript
223 lines
6.9 KiB
JavaScript
const multer = require('multer');
|
|
const express = require('express');
|
|
const { CacheKeys, EModelEndpoint } = require('librechat-data-provider');
|
|
const { getConvosByCursor, deleteConvos, getConvo, saveConvo } = require('~/models/Conversation');
|
|
const { forkConversation, duplicateConversation } = require('~/server/utils/import/fork');
|
|
const { storage, importFileFilter } = require('~/server/routes/files/multer');
|
|
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
|
const { importConversations } = require('~/server/utils/import');
|
|
const { createImportLimiters } = require('~/server/middleware');
|
|
const { deleteToolCalls } = require('~/models/ToolCall');
|
|
const { isEnabled, sleep } = require('~/server/utils');
|
|
const getLogStores = require('~/cache/getLogStores');
|
|
const { logger } = require('~/config');
|
|
|
|
const assistantClients = {
|
|
[EModelEndpoint.azureAssistants]: require('~/server/services/Endpoints/azureAssistants'),
|
|
[EModelEndpoint.assistants]: require('~/server/services/Endpoints/assistants'),
|
|
};
|
|
|
|
const router = express.Router();
|
|
router.use(requireJwtAuth);
|
|
|
|
router.get('/', async (req, res) => {
|
|
const limit = parseInt(req.query.limit, 10) || 25;
|
|
const cursor = req.query.cursor;
|
|
const isArchived = isEnabled(req.query.isArchived);
|
|
const search = req.query.search ? decodeURIComponent(req.query.search) : undefined;
|
|
const order = req.query.order || 'desc';
|
|
|
|
let tags;
|
|
if (req.query.tags) {
|
|
tags = Array.isArray(req.query.tags) ? req.query.tags : [req.query.tags];
|
|
}
|
|
|
|
try {
|
|
const result = await getConvosByCursor(req.user.id, {
|
|
cursor,
|
|
limit,
|
|
isArchived,
|
|
tags,
|
|
search,
|
|
order,
|
|
});
|
|
res.status(200).json(result);
|
|
} catch (error) {
|
|
res.status(500).json({ error: 'Error fetching conversations' });
|
|
}
|
|
});
|
|
|
|
router.get('/:conversationId', async (req, res) => {
|
|
const { conversationId } = req.params;
|
|
const convo = await getConvo(req.user.id, conversationId);
|
|
|
|
if (convo) {
|
|
res.status(200).json(convo);
|
|
} else {
|
|
res.status(404).end();
|
|
}
|
|
});
|
|
|
|
router.post('/gen_title', async (req, res) => {
|
|
const { conversationId } = req.body;
|
|
const titleCache = getLogStores(CacheKeys.GEN_TITLE);
|
|
const key = `${req.user.id}-${conversationId}`;
|
|
let title = await titleCache.get(key);
|
|
|
|
if (!title) {
|
|
await sleep(2500);
|
|
title = await titleCache.get(key);
|
|
}
|
|
|
|
if (title) {
|
|
await titleCache.delete(key);
|
|
res.status(200).json({ title });
|
|
} else {
|
|
res.status(404).json({
|
|
message: 'Title not found or method not implemented for the conversation\'s endpoint',
|
|
});
|
|
}
|
|
});
|
|
|
|
router.delete('/', async (req, res) => {
|
|
let filter = {};
|
|
const { conversationId, source, thread_id, endpoint } = req.body.arg;
|
|
|
|
// Prevent deletion of all conversations
|
|
if (!conversationId && !source && !thread_id && !endpoint) {
|
|
return res.status(400).json({
|
|
error: 'no parameters provided',
|
|
});
|
|
}
|
|
|
|
if (conversationId) {
|
|
filter = { conversationId };
|
|
} else if (source === 'button') {
|
|
return res.status(200).send('No conversationId provided');
|
|
}
|
|
|
|
if (
|
|
typeof endpoint !== 'undefined' &&
|
|
Object.prototype.propertyIsEnumerable.call(assistantClients, endpoint)
|
|
) {
|
|
/** @type {{ openai: OpenAI }} */
|
|
const { openai } = await assistantClients[endpoint].initializeClient({ req, res });
|
|
try {
|
|
const response = await openai.beta.threads.del(thread_id);
|
|
logger.debug('Deleted OpenAI thread:', response);
|
|
} catch (error) {
|
|
logger.error('Error deleting OpenAI thread:', error);
|
|
}
|
|
}
|
|
|
|
try {
|
|
const dbResponse = await deleteConvos(req.user.id, filter);
|
|
await deleteToolCalls(req.user.id, filter.conversationId);
|
|
res.status(201).json(dbResponse);
|
|
} catch (error) {
|
|
logger.error('Error clearing conversations', error);
|
|
res.status(500).send('Error clearing conversations');
|
|
}
|
|
});
|
|
|
|
router.delete('/all', async (req, res) => {
|
|
try {
|
|
const dbResponse = await deleteConvos(req.user.id, {});
|
|
await deleteToolCalls(req.user.id);
|
|
res.status(201).json(dbResponse);
|
|
} catch (error) {
|
|
logger.error('Error clearing conversations', error);
|
|
res.status(500).send('Error clearing conversations');
|
|
}
|
|
});
|
|
|
|
router.post('/update', async (req, res) => {
|
|
const update = req.body.arg;
|
|
|
|
if (!update.conversationId) {
|
|
return res.status(400).json({ error: 'conversationId is required' });
|
|
}
|
|
|
|
try {
|
|
const dbResponse = await saveConvo(req, update, {
|
|
context: `POST /api/convos/update ${update.conversationId}`,
|
|
});
|
|
res.status(201).json(dbResponse);
|
|
} catch (error) {
|
|
logger.error('Error updating conversation', error);
|
|
res.status(500).send('Error updating conversation');
|
|
}
|
|
});
|
|
|
|
const { importIpLimiter, importUserLimiter } = createImportLimiters();
|
|
const upload = multer({ storage: storage, fileFilter: importFileFilter });
|
|
|
|
/**
|
|
* Imports a conversation from a JSON file and saves it to the database.
|
|
* @route POST /import
|
|
* @param {Express.Multer.File} req.file - The JSON file to import.
|
|
* @returns {object} 201 - success response - application/json
|
|
*/
|
|
router.post(
|
|
'/import',
|
|
importIpLimiter,
|
|
importUserLimiter,
|
|
upload.single('file'),
|
|
async (req, res) => {
|
|
try {
|
|
/* TODO: optimize to return imported conversations and add manually */
|
|
await importConversations({ filepath: req.file.path, requestUserId: req.user.id });
|
|
res.status(201).json({ message: 'Conversation(s) imported successfully' });
|
|
} catch (error) {
|
|
logger.error('Error processing file', error);
|
|
res.status(500).send('Error processing file');
|
|
}
|
|
},
|
|
);
|
|
|
|
/**
|
|
* POST /fork
|
|
* This route handles forking a conversation based on the TForkConvoRequest and responds with TForkConvoResponse.
|
|
* @route POST /fork
|
|
* @param {express.Request<{}, TForkConvoResponse, TForkConvoRequest>} req - Express request object.
|
|
* @param {express.Response<TForkConvoResponse>} res - Express response object.
|
|
* @returns {Promise<void>} - The response after forking the conversation.
|
|
*/
|
|
router.post('/fork', async (req, res) => {
|
|
try {
|
|
/** @type {TForkConvoRequest} */
|
|
const { conversationId, messageId, option, splitAtTarget, latestMessageId } = req.body;
|
|
const result = await forkConversation({
|
|
requestUserId: req.user.id,
|
|
originalConvoId: conversationId,
|
|
targetMessageId: messageId,
|
|
latestMessageId,
|
|
records: true,
|
|
splitAtTarget,
|
|
option,
|
|
});
|
|
|
|
res.json(result);
|
|
} catch (error) {
|
|
logger.error('Error forking conversation:', error);
|
|
res.status(500).send('Error forking conversation');
|
|
}
|
|
});
|
|
|
|
router.post('/duplicate', async (req, res) => {
|
|
const { conversationId, title } = req.body;
|
|
|
|
try {
|
|
const result = await duplicateConversation({
|
|
userId: req.user.id,
|
|
conversationId,
|
|
title,
|
|
});
|
|
res.status(201).json(result);
|
|
} catch (error) {
|
|
logger.error('Error duplicating conversation:', error);
|
|
res.status(500).send('Error duplicating conversation');
|
|
}
|
|
});
|
|
|
|
module.exports = router;
|