mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-16 23:45:33 +01:00
Merge branch 'main' into feature/entra-id-azure-integration
This commit is contained in:
commit
a7cf1ae27b
241 changed files with 25653 additions and 3303 deletions
|
|
@ -350,6 +350,9 @@ function disposeClient(client) {
|
|||
if (client.agentConfigs) {
|
||||
client.agentConfigs = null;
|
||||
}
|
||||
if (client.agentIdMap) {
|
||||
client.agentIdMap = null;
|
||||
}
|
||||
if (client.artifactPromises) {
|
||||
client.artifactPromises = null;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -82,7 +82,15 @@ const refreshController = async (req, res) => {
|
|||
if (error || !user) {
|
||||
return res.status(401).redirect('/login');
|
||||
}
|
||||
const token = setOpenIDAuthTokens(tokenset, res, user._id.toString());
|
||||
const token = setOpenIDAuthTokens(tokenset, res, user._id.toString(), refreshToken);
|
||||
|
||||
user.federatedTokens = {
|
||||
access_token: tokenset.access_token,
|
||||
id_token: tokenset.id_token,
|
||||
refresh_token: refreshToken,
|
||||
expires_at: claims.exp,
|
||||
};
|
||||
|
||||
return res.status(200).send({ token, user });
|
||||
} catch (error) {
|
||||
logger.error('[refreshController] OpenID token refresh error', error);
|
||||
|
|
|
|||
|
|
@ -3,32 +3,45 @@ const { Tools, CacheKeys, Constants, FileSources } = require('librechat-data-pro
|
|||
const {
|
||||
MCPOAuthHandler,
|
||||
MCPTokenStorage,
|
||||
mcpServersRegistry,
|
||||
normalizeHttpError,
|
||||
extractWebSearchEnvVars,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
getFiles,
|
||||
findToken,
|
||||
updateUser,
|
||||
deleteFiles,
|
||||
deleteConvos,
|
||||
deletePresets,
|
||||
deleteMessages,
|
||||
deleteUserById,
|
||||
deleteAllSharedLinks,
|
||||
deleteAllUserSessions,
|
||||
deleteAllSharedLinks,
|
||||
deleteUserById,
|
||||
deleteMessages,
|
||||
deletePresets,
|
||||
deleteConvos,
|
||||
deleteFiles,
|
||||
updateUser,
|
||||
findToken,
|
||||
getFiles,
|
||||
} = require('~/models');
|
||||
const {
|
||||
ConversationTag,
|
||||
Transaction,
|
||||
MemoryEntry,
|
||||
Assistant,
|
||||
AclEntry,
|
||||
Balance,
|
||||
Action,
|
||||
Group,
|
||||
Token,
|
||||
User,
|
||||
} = require('~/db/models');
|
||||
const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService');
|
||||
const { updateUserPluginsService, deleteUserKey } = require('~/server/services/UserService');
|
||||
const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService');
|
||||
const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
|
||||
const { processDeleteRequest } = require('~/server/services/Files/process');
|
||||
const { Transaction, Balance, User, Token } = require('~/db/models');
|
||||
const { getMCPManager, getFlowStateManager } = require('~/config');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
const { deleteToolCalls } = require('~/models/ToolCall');
|
||||
const { deleteUserPrompts } = require('~/models/Prompt');
|
||||
const { deleteUserAgents } = require('~/models/Agent');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { mcpServersRegistry } = require('@librechat/api');
|
||||
|
||||
const getUserController = async (req, res) => {
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
|
|
@ -237,7 +250,6 @@ const deleteUserController = async (req, res) => {
|
|||
await deleteUserKey({ userId: user.id, all: true }); // delete user keys
|
||||
await Balance.deleteMany({ user: user._id }); // delete user balances
|
||||
await deletePresets(user.id); // delete user presets
|
||||
/* TODO: Delete Assistant Threads */
|
||||
try {
|
||||
await deleteConvos(user.id); // delete user convos
|
||||
} catch (error) {
|
||||
|
|
@ -249,7 +261,19 @@ const deleteUserController = async (req, res) => {
|
|||
await deleteUserFiles(req); // delete user files
|
||||
await deleteFiles(null, user.id); // delete database files in case of orphaned files from previous steps
|
||||
await deleteToolCalls(user.id); // delete user tool calls
|
||||
/* TODO: queue job for cleaning actions and assistants of non-existant users */
|
||||
await deleteUserAgents(user.id); // delete user agents
|
||||
await Assistant.deleteMany({ user: user.id }); // delete user assistants
|
||||
await ConversationTag.deleteMany({ user: user.id }); // delete user conversation tags
|
||||
await MemoryEntry.deleteMany({ userId: user.id }); // delete user memory entries
|
||||
await deleteUserPrompts(req, user.id); // delete user prompts
|
||||
await Action.deleteMany({ user: user.id }); // delete user actions
|
||||
await Token.deleteMany({ userId: user.id }); // delete user OAuth tokens
|
||||
await Group.updateMany(
|
||||
// remove user from all groups
|
||||
{ memberIds: user.id },
|
||||
{ $pull: { memberIds: user.id } },
|
||||
);
|
||||
await AclEntry.deleteMany({ principalId: user._id }); // delete user ACL entries
|
||||
logger.info(`User deleted account. Email: ${user.email} ID: ${user.id}`);
|
||||
res.status(200).send({ message: 'User deleted' });
|
||||
} catch (err) {
|
||||
|
|
|
|||
|
|
@ -39,9 +39,9 @@ class ModelEndHandler {
|
|||
* @param {ModelEndData | undefined} data
|
||||
* @param {Record<string, unknown> | undefined} metadata
|
||||
* @param {StandardGraph} graph
|
||||
* @returns
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
handle(event, data, metadata, graph) {
|
||||
async handle(event, data, metadata, graph) {
|
||||
if (!graph || !metadata) {
|
||||
console.warn(`Graph or metadata not found in ${event} event`);
|
||||
return;
|
||||
|
|
@ -79,7 +79,7 @@ class ModelEndHandler {
|
|||
}
|
||||
}
|
||||
if (isGoogle || streamingDisabled || hasUnprocessedToolCalls) {
|
||||
handleToolCalls(toolCalls, metadata, graph);
|
||||
await handleToolCalls(toolCalls, metadata, graph);
|
||||
}
|
||||
|
||||
const usage = data?.output?.usage_metadata;
|
||||
|
|
@ -101,7 +101,7 @@ class ModelEndHandler {
|
|||
const stepKey = graph.getStepKey(metadata);
|
||||
const message_id = getMessageId(stepKey, graph) ?? '';
|
||||
if (message_id) {
|
||||
graph.dispatchRunStep(stepKey, {
|
||||
await graph.dispatchRunStep(stepKey, {
|
||||
type: StepTypes.MESSAGE_CREATION,
|
||||
message_creation: {
|
||||
message_id,
|
||||
|
|
@ -111,7 +111,7 @@ class ModelEndHandler {
|
|||
const stepId = graph.getStepIdByKey(stepKey);
|
||||
const content = data.output.content;
|
||||
if (typeof content === 'string') {
|
||||
graph.dispatchMessageDelta(stepId, {
|
||||
await graph.dispatchMessageDelta(stepId, {
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
|
|
@ -120,7 +120,7 @@ class ModelEndHandler {
|
|||
],
|
||||
});
|
||||
} else if (content.every((c) => c.type?.startsWith('text'))) {
|
||||
graph.dispatchMessageDelta(stepId, {
|
||||
await graph.dispatchMessageDelta(stepId, {
|
||||
content,
|
||||
});
|
||||
}
|
||||
|
|
@ -162,7 +162,7 @@ function getDefaultHandlers({ res, aggregateContent, toolEndCallback, collectedU
|
|||
}
|
||||
const handlers = {
|
||||
[GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(collectedUsage),
|
||||
[GraphEvents.TOOL_END]: new ToolEndHandler(toolEndCallback),
|
||||
[GraphEvents.TOOL_END]: new ToolEndHandler(toolEndCallback, logger),
|
||||
[GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
|
||||
[GraphEvents.ON_RUN_STEP]: {
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ const {
|
|||
logAxiosError,
|
||||
sanitizeTitle,
|
||||
resolveHeaders,
|
||||
createSafeUser,
|
||||
getBalanceConfig,
|
||||
memoryInstructions,
|
||||
getTransactionsConfig,
|
||||
|
|
@ -20,6 +21,7 @@ const {
|
|||
Providers,
|
||||
TitleMethod,
|
||||
formatMessage,
|
||||
labelContentByAgent,
|
||||
formatAgentMessages,
|
||||
getTokenCountForMessage,
|
||||
createMetadataAggregator,
|
||||
|
|
@ -92,6 +94,61 @@ function logToolError(graph, error, toolId) {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies agent labeling to conversation history when multi-agent patterns are detected.
|
||||
* Labels content parts by their originating agent to prevent identity confusion.
|
||||
*
|
||||
* @param {TMessage[]} orderedMessages - The ordered conversation messages
|
||||
* @param {Agent} primaryAgent - The primary agent configuration
|
||||
* @param {Map<string, Agent>} agentConfigs - Map of additional agent configurations
|
||||
* @returns {TMessage[]} Messages with agent labels applied where appropriate
|
||||
*/
|
||||
function applyAgentLabelsToHistory(orderedMessages, primaryAgent, agentConfigs) {
|
||||
const shouldLabelByAgent = (primaryAgent.edges?.length ?? 0) > 0 || (agentConfigs?.size ?? 0) > 0;
|
||||
|
||||
if (!shouldLabelByAgent) {
|
||||
return orderedMessages;
|
||||
}
|
||||
|
||||
const processedMessages = [];
|
||||
|
||||
for (let i = 0; i < orderedMessages.length; i++) {
|
||||
const message = orderedMessages[i];
|
||||
|
||||
/** @type {Record<string, string>} */
|
||||
const agentNames = { [primaryAgent.id]: primaryAgent.name || 'Assistant' };
|
||||
|
||||
if (agentConfigs) {
|
||||
for (const [agentId, agentConfig] of agentConfigs.entries()) {
|
||||
agentNames[agentId] = agentConfig.name || agentConfig.id;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
!message.isCreatedByUser &&
|
||||
message.metadata?.agentIdMap &&
|
||||
Array.isArray(message.content)
|
||||
) {
|
||||
try {
|
||||
const labeledContent = labelContentByAgent(
|
||||
message.content,
|
||||
message.metadata.agentIdMap,
|
||||
agentNames,
|
||||
);
|
||||
|
||||
processedMessages.push({ ...message, content: labeledContent });
|
||||
} catch (error) {
|
||||
logger.error('[AgentClient] Error applying agent labels to message:', error);
|
||||
processedMessages.push(message);
|
||||
}
|
||||
} else {
|
||||
processedMessages.push(message);
|
||||
}
|
||||
}
|
||||
|
||||
return processedMessages;
|
||||
}
|
||||
|
||||
class AgentClient extends BaseClient {
|
||||
constructor(options = {}) {
|
||||
super(null, options);
|
||||
|
|
@ -141,6 +198,8 @@ class AgentClient extends BaseClient {
|
|||
this.indexTokenCountMap = {};
|
||||
/** @type {(messages: BaseMessage[]) => Promise<void>} */
|
||||
this.processMemory;
|
||||
/** @type {Record<number, string> | null} */
|
||||
this.agentIdMap = null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -233,6 +292,12 @@ class AgentClient extends BaseClient {
|
|||
summary: this.shouldSummarize,
|
||||
});
|
||||
|
||||
orderedMessages = applyAgentLabelsToHistory(
|
||||
orderedMessages,
|
||||
this.options.agent,
|
||||
this.agentConfigs,
|
||||
);
|
||||
|
||||
let payload;
|
||||
/** @type {number | undefined} */
|
||||
let promptTokens;
|
||||
|
|
@ -612,7 +677,11 @@ class AgentClient extends BaseClient {
|
|||
userMCPAuthMap: opts.userMCPAuthMap,
|
||||
abortController: opts.abortController,
|
||||
});
|
||||
return filterMalformedContentParts(this.contentParts);
|
||||
|
||||
const completion = filterMalformedContentParts(this.contentParts);
|
||||
const metadata = this.agentIdMap ? { agentIdMap: this.agentIdMap } : undefined;
|
||||
|
||||
return { completion, metadata };
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -788,7 +857,7 @@ class AgentClient extends BaseClient {
|
|||
conversationId: this.conversationId,
|
||||
parentMessageId: this.parentMessageId,
|
||||
},
|
||||
user: this.options.req.user,
|
||||
user: createSafeUser(this.options.req.user),
|
||||
},
|
||||
recursionLimit: agentsEConfig?.recursionLimit ?? 25,
|
||||
signal: abortController.signal,
|
||||
|
|
@ -864,6 +933,7 @@ class AgentClient extends BaseClient {
|
|||
signal: abortController.signal,
|
||||
customHandlers: this.options.eventHandlers,
|
||||
requestBody: config.configurable.requestBody,
|
||||
user: createSafeUser(this.options.req?.user),
|
||||
tokenCounter: createTokenCounter(this.getEncoding()),
|
||||
});
|
||||
|
||||
|
|
@ -902,6 +972,24 @@ class AgentClient extends BaseClient {
|
|||
);
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
/** Capture agent ID map if we have edges or multiple agents */
|
||||
const shouldStoreAgentMap =
|
||||
(this.options.agent.edges?.length ?? 0) > 0 || (this.agentConfigs?.size ?? 0) > 0;
|
||||
if (shouldStoreAgentMap && run?.Graph) {
|
||||
const contentPartAgentMap = run.Graph.getContentPartAgentMap();
|
||||
if (contentPartAgentMap && contentPartAgentMap.size > 0) {
|
||||
this.agentIdMap = Object.fromEntries(contentPartAgentMap);
|
||||
logger.debug('[AgentClient] Captured agent ID map:', {
|
||||
totalParts: this.contentParts.length,
|
||||
mappedParts: Object.keys(this.agentIdMap).length,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[AgentClient] Error capturing agent ID map:', error);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
|
||||
|
|
@ -935,6 +1023,9 @@ class AgentClient extends BaseClient {
|
|||
err,
|
||||
);
|
||||
}
|
||||
run = null;
|
||||
config = null;
|
||||
memoryPromise = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1063,6 +1154,7 @@ class AgentClient extends BaseClient {
|
|||
if (clientOptions?.configuration?.defaultHeaders != null) {
|
||||
clientOptions.configuration.defaultHeaders = resolveHeaders({
|
||||
headers: clientOptions.configuration.defaultHeaders,
|
||||
user: createSafeUser(this.options.req?.user),
|
||||
body: {
|
||||
messageId: this.responseMessageId,
|
||||
conversationId: this.conversationId,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
sendEvent,
|
||||
sanitizeFileForTransmit,
|
||||
sanitizeMessageForTransmit,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
handleAbortError,
|
||||
createAbortController,
|
||||
|
|
@ -224,13 +228,13 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
|||
conversation.title =
|
||||
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
||||
|
||||
// Process files if needed
|
||||
// Process files if needed (sanitize to remove large text fields before transmission)
|
||||
if (req.body.files && client.options?.attachments) {
|
||||
userMessage.files = [];
|
||||
const messageFiles = new Set(req.body.files.map((file) => file.file_id));
|
||||
for (let attachment of client.options.attachments) {
|
||||
for (const attachment of client.options.attachments) {
|
||||
if (messageFiles.has(attachment.file_id)) {
|
||||
userMessage.files.push({ ...attachment });
|
||||
userMessage.files.push(sanitizeFileForTransmit(attachment));
|
||||
}
|
||||
}
|
||||
delete userMessage.image_urls;
|
||||
|
|
@ -245,7 +249,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
|||
final: true,
|
||||
conversation,
|
||||
title: conversation.title,
|
||||
requestMessage: userMessage,
|
||||
requestMessage: sanitizeMessageForTransmit(userMessage),
|
||||
responseMessage: finalResponse,
|
||||
});
|
||||
res.end();
|
||||
|
|
@ -273,7 +277,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
|||
final: true,
|
||||
conversation,
|
||||
title: conversation.title,
|
||||
requestMessage: userMessage,
|
||||
requestMessage: sanitizeMessageForTransmit(userMessage),
|
||||
responseMessage: finalResponse,
|
||||
error: { message: 'Request was aborted during completion' },
|
||||
});
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ const {
|
|||
const {
|
||||
Tools,
|
||||
Constants,
|
||||
SystemRoles,
|
||||
FileSources,
|
||||
ResourceType,
|
||||
AccessRoleIds,
|
||||
|
|
@ -20,6 +19,8 @@ const {
|
|||
PermissionBits,
|
||||
actionDelimiter,
|
||||
removeNullishValues,
|
||||
CacheKeys,
|
||||
Time,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
getListAgentsByAccess,
|
||||
|
|
@ -45,6 +46,7 @@ const { updateAction, getActions } = require('~/models/Action');
|
|||
const { getCachedTools } = require('~/server/services/Config');
|
||||
const { deleteFileByFilter } = require('~/models/File');
|
||||
const { getCategoriesWithCounts } = require('~/models');
|
||||
const { getLogStores } = require('~/cache');
|
||||
|
||||
const systemTools = {
|
||||
[Tools.execute_code]: true,
|
||||
|
|
@ -52,6 +54,49 @@ const systemTools = {
|
|||
[Tools.web_search]: true,
|
||||
};
|
||||
|
||||
const MAX_SEARCH_LEN = 100;
|
||||
const escapeRegex = (str = '') => str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
|
||||
/**
|
||||
* Opportunistically refreshes S3-backed avatars for agent list responses.
|
||||
* Only list responses are refreshed because they're the highest-traffic surface and
|
||||
* the avatar URLs have a short-lived TTL. The refresh is cached per-user for 30 minutes
|
||||
* via {@link CacheKeys.S3_EXPIRY_INTERVAL} so we refresh once per interval at most.
|
||||
* @param {Array} agents - Agents being enriched with S3-backed avatars
|
||||
* @param {string} userId - User identifier used for the cache refresh key
|
||||
*/
|
||||
const refreshListAvatars = async (agents, userId) => {
|
||||
if (!agents?.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
const cache = getLogStores(CacheKeys.S3_EXPIRY_INTERVAL);
|
||||
const refreshKey = `${userId}:agents_list`;
|
||||
const alreadyChecked = await cache.get(refreshKey);
|
||||
if (alreadyChecked) {
|
||||
return;
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
agents.map(async (agent) => {
|
||||
if (agent?.avatar?.source !== FileSources.s3 || !agent?.avatar?.filepath) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const newPath = await refreshS3Url(agent.avatar);
|
||||
if (newPath && newPath !== agent.avatar.filepath) {
|
||||
agent.avatar = { ...agent.avatar, filepath: newPath };
|
||||
}
|
||||
} catch (err) {
|
||||
logger.debug('[/Agents] Avatar refresh error for list item', err);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
await cache.set(refreshKey, true, Time.THIRTY_MINUTES);
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates an Agent.
|
||||
* @route POST /Agents
|
||||
|
|
@ -142,10 +187,13 @@ const getAgentHandler = async (req, res, expandProperties = false) => {
|
|||
agent.version = agent.versions ? agent.versions.length : 0;
|
||||
|
||||
if (agent.avatar && agent.avatar?.source === FileSources.s3) {
|
||||
const originalUrl = agent.avatar.filepath;
|
||||
agent.avatar.filepath = await refreshS3Url(agent.avatar);
|
||||
if (originalUrl !== agent.avatar.filepath) {
|
||||
await updateAgent({ id }, { avatar: agent.avatar }, { updatingUserId: req.user.id });
|
||||
try {
|
||||
agent.avatar = {
|
||||
...agent.avatar,
|
||||
filepath: await refreshS3Url(agent.avatar),
|
||||
};
|
||||
} catch (e) {
|
||||
logger.warn('[/Agents/:id] Failed to refresh S3 URL', e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -209,7 +257,12 @@ const updateAgentHandler = async (req, res) => {
|
|||
try {
|
||||
const id = req.params.id;
|
||||
const validatedData = agentUpdateSchema.parse(req.body);
|
||||
const { _id, ...updateData } = removeNullishValues(validatedData);
|
||||
// Preserve explicit null for avatar to allow resetting the avatar
|
||||
const { avatar: avatarField, _id, ...rest } = validatedData;
|
||||
const updateData = removeNullishValues(rest);
|
||||
if (avatarField === null) {
|
||||
updateData.avatar = avatarField;
|
||||
}
|
||||
|
||||
// Convert OCR to context in incoming updateData
|
||||
convertOcrToContextInPlace(updateData);
|
||||
|
|
@ -342,21 +395,21 @@ const duplicateAgentHandler = async (req, res) => {
|
|||
const [domain] = action.action_id.split(actionDelimiter);
|
||||
const fullActionId = `${domain}${actionDelimiter}${newActionId}`;
|
||||
|
||||
// Sanitize sensitive metadata before persisting
|
||||
const filteredMetadata = { ...(action.metadata || {}) };
|
||||
for (const field of sensitiveFields) {
|
||||
delete filteredMetadata[field];
|
||||
}
|
||||
|
||||
const newAction = await updateAction(
|
||||
{ action_id: newActionId },
|
||||
{
|
||||
metadata: action.metadata,
|
||||
metadata: filteredMetadata,
|
||||
agent_id: newAgentId,
|
||||
user: userId,
|
||||
},
|
||||
);
|
||||
|
||||
const filteredMetadata = { ...newAction.metadata };
|
||||
for (const field of sensitiveFields) {
|
||||
delete filteredMetadata[field];
|
||||
}
|
||||
|
||||
newAction.metadata = filteredMetadata;
|
||||
newActionsList.push(newAction);
|
||||
return fullActionId;
|
||||
};
|
||||
|
|
@ -463,13 +516,13 @@ const getListAgentsHandler = async (req, res) => {
|
|||
filter.is_promoted = { $ne: true };
|
||||
}
|
||||
|
||||
// Handle search filter
|
||||
// Handle search filter (escape regex and cap length)
|
||||
if (search && search.trim() !== '') {
|
||||
filter.$or = [
|
||||
{ name: { $regex: search.trim(), $options: 'i' } },
|
||||
{ description: { $regex: search.trim(), $options: 'i' } },
|
||||
];
|
||||
const safeSearch = escapeRegex(search.trim().slice(0, MAX_SEARCH_LEN));
|
||||
const regex = new RegExp(safeSearch, 'i');
|
||||
filter.$or = [{ name: regex }, { description: regex }];
|
||||
}
|
||||
|
||||
// Get agent IDs the user has VIEW access to via ACL
|
||||
const accessibleIds = await findAccessibleResources({
|
||||
userId,
|
||||
|
|
@ -477,10 +530,12 @@ const getListAgentsHandler = async (req, res) => {
|
|||
resourceType: ResourceType.AGENT,
|
||||
requiredPermissions: requiredPermission,
|
||||
});
|
||||
|
||||
const publiclyAccessibleIds = await findPubliclyAccessibleResources({
|
||||
resourceType: ResourceType.AGENT,
|
||||
requiredPermissions: PermissionBits.VIEW,
|
||||
});
|
||||
|
||||
// Use the new ACL-aware function
|
||||
const data = await getListAgentsByAccess({
|
||||
accessibleIds,
|
||||
|
|
@ -488,13 +543,31 @@ const getListAgentsHandler = async (req, res) => {
|
|||
limit,
|
||||
after: cursor,
|
||||
});
|
||||
if (data?.data?.length) {
|
||||
data.data = data.data.map((agent) => {
|
||||
if (publiclyAccessibleIds.some((id) => id.equals(agent._id))) {
|
||||
|
||||
const agents = data?.data ?? [];
|
||||
if (!agents.length) {
|
||||
return res.json(data);
|
||||
}
|
||||
|
||||
const publicSet = new Set(publiclyAccessibleIds.map((oid) => oid.toString()));
|
||||
|
||||
data.data = agents.map((agent) => {
|
||||
try {
|
||||
if (agent?._id && publicSet.has(agent._id.toString())) {
|
||||
agent.isPublic = true;
|
||||
}
|
||||
return agent;
|
||||
});
|
||||
} catch (e) {
|
||||
// Silently ignore mapping errors
|
||||
void e;
|
||||
}
|
||||
return agent;
|
||||
});
|
||||
|
||||
// Opportunistically refresh S3 avatar URLs for list results with caching
|
||||
try {
|
||||
await refreshListAvatars(data.data, req.user.id);
|
||||
} catch (err) {
|
||||
logger.debug('[/Agents] Skipping avatar refresh for list', err);
|
||||
}
|
||||
return res.json(data);
|
||||
} catch (error) {
|
||||
|
|
@ -517,28 +590,21 @@ const getListAgentsHandler = async (req, res) => {
|
|||
const uploadAgentAvatarHandler = async (req, res) => {
|
||||
try {
|
||||
const appConfig = req.config;
|
||||
if (!req.file) {
|
||||
return res.status(400).json({ message: 'No file uploaded' });
|
||||
}
|
||||
filterFile({ req, file: req.file, image: true, isAvatar: true });
|
||||
const { agent_id } = req.params;
|
||||
if (!agent_id) {
|
||||
return res.status(400).json({ message: 'Agent ID is required' });
|
||||
}
|
||||
|
||||
const isAdmin = req.user.role === SystemRoles.ADMIN;
|
||||
const existingAgent = await getAgent({ id: agent_id });
|
||||
|
||||
if (!existingAgent) {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id.toString();
|
||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
||||
|
||||
if (!hasEditPermission) {
|
||||
return res.status(403).json({
|
||||
error: 'You do not have permission to modify this non-collaborative agent',
|
||||
});
|
||||
}
|
||||
|
||||
const buffer = await fs.readFile(req.file.path);
|
||||
const fileStrategy = getFileStrategy(appConfig, { isAvatar: true });
|
||||
const resizedBuffer = await resizeAvatar({
|
||||
|
|
@ -571,8 +637,6 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
|||
}
|
||||
}
|
||||
|
||||
const promises = [];
|
||||
|
||||
const data = {
|
||||
avatar: {
|
||||
filepath: image.filepath,
|
||||
|
|
@ -580,17 +644,16 @@ const uploadAgentAvatarHandler = async (req, res) => {
|
|||
},
|
||||
};
|
||||
|
||||
promises.push(
|
||||
await updateAgent({ id: agent_id }, data, {
|
||||
updatingUserId: req.user.id,
|
||||
}),
|
||||
);
|
||||
|
||||
const resolved = await Promise.all(promises);
|
||||
res.status(201).json(resolved[0]);
|
||||
const updatedAgent = await updateAgent({ id: agent_id }, data, {
|
||||
updatingUserId: req.user.id,
|
||||
});
|
||||
res.status(201).json(updatedAgent);
|
||||
} catch (error) {
|
||||
const message = 'An error occurred while updating the Agent Avatar';
|
||||
logger.error(message, error);
|
||||
logger.error(
|
||||
`[/:agent_id/avatar] ${message} (${req.params?.agent_id ?? 'unknown agent'})`,
|
||||
error,
|
||||
);
|
||||
res.status(500).json({ message });
|
||||
} finally {
|
||||
try {
|
||||
|
|
@ -629,21 +692,13 @@ const revertAgentVersionHandler = async (req, res) => {
|
|||
return res.status(400).json({ error: 'version_index is required' });
|
||||
}
|
||||
|
||||
const isAdmin = req.user.role === SystemRoles.ADMIN;
|
||||
const existingAgent = await getAgent({ id });
|
||||
|
||||
if (!existingAgent) {
|
||||
return res.status(404).json({ error: 'Agent not found' });
|
||||
}
|
||||
|
||||
const isAuthor = existingAgent.author.toString() === req.user.id.toString();
|
||||
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
|
||||
|
||||
if (!hasEditPermission) {
|
||||
return res.status(403).json({
|
||||
error: 'You do not have permission to modify this non-collaborative agent',
|
||||
});
|
||||
}
|
||||
// Permissions are enforced via route middleware (ACL EDIT)
|
||||
|
||||
const updatedAgent = await revertAgentVersion({ id }, version_index);
|
||||
|
||||
|
|
|
|||
|
|
@ -47,6 +47,7 @@ jest.mock('~/server/services/PermissionService', () => ({
|
|||
findPubliclyAccessibleResources: jest.fn().mockResolvedValue([]),
|
||||
grantPermission: jest.fn(),
|
||||
hasPublicPermission: jest.fn().mockResolvedValue(false),
|
||||
checkPermission: jest.fn().mockResolvedValue(true),
|
||||
}));
|
||||
|
||||
jest.mock('~/models', () => ({
|
||||
|
|
@ -573,6 +574,68 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
|
|||
expect(updatedAgent.version).toBe(agentInDb.versions.length);
|
||||
});
|
||||
|
||||
test('should allow resetting avatar when value is explicitly null', async () => {
|
||||
await Agent.updateOne(
|
||||
{ id: existingAgentId },
|
||||
{
|
||||
avatar: {
|
||||
filepath: 'https://example.com/avatar.png',
|
||||
source: 's3',
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
avatar: null,
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
const updatedAgent = mockRes.json.mock.calls[0][0];
|
||||
expect(updatedAgent.avatar).toBeNull();
|
||||
|
||||
const agentInDb = await Agent.findOne({ id: existingAgentId });
|
||||
expect(agentInDb.avatar).toBeNull();
|
||||
});
|
||||
|
||||
test('should ignore avatar field when value is undefined', async () => {
|
||||
const originalAvatar = {
|
||||
filepath: 'https://example.com/original.png',
|
||||
source: 's3',
|
||||
};
|
||||
await Agent.updateOne({ id: existingAgentId }, { avatar: originalAvatar });
|
||||
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
avatar: undefined,
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
const agentInDb = await Agent.findOne({ id: existingAgentId });
|
||||
expect(agentInDb.avatar.filepath).toBe(originalAvatar.filepath);
|
||||
expect(agentInDb.avatar.source).toBe(originalAvatar.source);
|
||||
});
|
||||
|
||||
test('should not bump version when no mutable fields change', async () => {
|
||||
const existingAgent = await Agent.findOne({ id: existingAgentId });
|
||||
const originalVersionCount = existingAgent.versions.length;
|
||||
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
mockReq.body = {
|
||||
avatar: undefined,
|
||||
};
|
||||
|
||||
await updateAgentHandler(mockReq, mockRes);
|
||||
|
||||
const agentInDb = await Agent.findOne({ id: existingAgentId });
|
||||
expect(agentInDb.versions.length).toBe(originalVersionCount);
|
||||
});
|
||||
|
||||
test('should handle validation errors properly', async () => {
|
||||
mockReq.user.id = existingAgentAuthorId.toString();
|
||||
mockReq.params.id = existingAgentId;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const { v4 } = require('uuid');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { sendEvent, getBalanceConfig, getModelMaxTokens } = require('@librechat/api');
|
||||
const { sendEvent, getBalanceConfig, getModelMaxTokens, countTokens } = require('@librechat/api');
|
||||
const {
|
||||
Time,
|
||||
Constants,
|
||||
|
|
@ -33,7 +33,6 @@ const { getTransactions } = require('~/models/Transaction');
|
|||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { countTokens } = require('~/server/utils');
|
||||
const { getOpenAIClient } = require('./helpers');
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const { v4 } = require('uuid');
|
||||
const { sleep } = require('@librechat/agents');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { sendEvent, getBalanceConfig, getModelMaxTokens } = require('@librechat/api');
|
||||
const { sendEvent, getBalanceConfig, getModelMaxTokens, countTokens } = require('@librechat/api');
|
||||
const {
|
||||
Time,
|
||||
Constants,
|
||||
|
|
@ -30,7 +30,6 @@ const { getTransactions } = require('~/models/Transaction');
|
|||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { countTokens } = require('~/server/utils');
|
||||
const { getOpenAIClient } = require('./helpers');
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -44,7 +44,13 @@ const getMCPTools = async (req, res) => {
|
|||
continue;
|
||||
}
|
||||
|
||||
const serverTools = await mcpManager.getServerToolFunctions(userId, serverName);
|
||||
let serverTools;
|
||||
try {
|
||||
serverTools = await mcpManager.getServerToolFunctions(userId, serverName);
|
||||
} catch (error) {
|
||||
logger.error(`[getMCPTools] Error fetching tools for server ${serverName}:`, error);
|
||||
continue;
|
||||
}
|
||||
if (!serverTools) {
|
||||
logger.debug(`[getMCPTools] No tools found for server ${serverName}`);
|
||||
continue;
|
||||
|
|
|
|||
417
api/server/experimental.js
Normal file
417
api/server/experimental.js
Normal file
|
|
@ -0,0 +1,417 @@
|
|||
require('dotenv').config();
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
require('module-alias')({ base: path.resolve(__dirname, '..') });
|
||||
const cluster = require('cluster');
|
||||
const Redis = require('ioredis');
|
||||
const cors = require('cors');
|
||||
const axios = require('axios');
|
||||
const express = require('express');
|
||||
const passport = require('passport');
|
||||
const compression = require('compression');
|
||||
const cookieParser = require('cookie-parser');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const mongoSanitize = require('express-mongo-sanitize');
|
||||
const {
|
||||
isEnabled,
|
||||
ErrorController,
|
||||
performStartupChecks,
|
||||
handleJsonParseError,
|
||||
initializeFileStorage,
|
||||
} = require('@librechat/api');
|
||||
const { connectDb, indexSync } = require('~/db');
|
||||
const initializeOAuthReconnectManager = require('./services/initializeOAuthReconnectManager');
|
||||
const createValidateImageRequest = require('./middleware/validateImageRequest');
|
||||
const { jwtLogin, ldapLogin, passportLogin } = require('~/strategies');
|
||||
const { updateInterfacePermissions } = require('~/models/interface');
|
||||
const { checkMigrations } = require('./services/start/migration');
|
||||
const initializeMCPs = require('./services/initializeMCPs');
|
||||
const configureSocialLogins = require('./socialLogins');
|
||||
const { getAppConfig } = require('./services/Config');
|
||||
const staticCache = require('./utils/staticCache');
|
||||
const noIndex = require('./middleware/noIndex');
|
||||
const { seedDatabase } = require('~/models');
|
||||
const routes = require('./routes');
|
||||
|
||||
const { PORT, HOST, ALLOW_SOCIAL_LOGIN, DISABLE_COMPRESSION, TRUST_PROXY } = process.env ?? {};
|
||||
|
||||
/** Allow PORT=0 to be used for automatic free port assignment */
|
||||
const port = isNaN(Number(PORT)) ? 3080 : Number(PORT);
|
||||
const host = HOST || 'localhost';
|
||||
const trusted_proxy = Number(TRUST_PROXY) || 1;
|
||||
|
||||
/** Number of worker processes to spawn (simulating multiple pods) */
|
||||
const workers = Number(process.env.CLUSTER_WORKERS) || 4;
|
||||
|
||||
/** Helper to wrap log messages for better visibility */
|
||||
const wrapLogMessage = (msg) => {
|
||||
return `\n${'='.repeat(50)}\n${msg}\n${'='.repeat(50)}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Flushes the Redis cache on startup
|
||||
* This ensures a clean state for testing multi-pod MCP connection issues
|
||||
*/
|
||||
const flushRedisCache = async () => {
|
||||
/** Skip cache flush if Redis is not enabled */
|
||||
if (!isEnabled(process.env.USE_REDIS)) {
|
||||
logger.info('Redis is not enabled, skipping cache flush');
|
||||
return;
|
||||
}
|
||||
|
||||
const redisConfig = {
|
||||
host: process.env.REDIS_HOST || 'localhost',
|
||||
port: process.env.REDIS_PORT || 6379,
|
||||
};
|
||||
|
||||
if (process.env.REDIS_PASSWORD) {
|
||||
redisConfig.password = process.env.REDIS_PASSWORD;
|
||||
}
|
||||
|
||||
/** Handle Redis Cluster configuration */
|
||||
if (isEnabled(process.env.USE_REDIS_CLUSTER) || process.env.REDIS_URI?.includes(',')) {
|
||||
logger.info('Detected Redis Cluster configuration');
|
||||
const uris = process.env.REDIS_URI?.split(',').map((uri) => {
|
||||
const url = new URL(uri.trim());
|
||||
return {
|
||||
host: url.hostname,
|
||||
port: parseInt(url.port || '6379', 10),
|
||||
};
|
||||
});
|
||||
const redis = new Redis.Cluster(uris, {
|
||||
redisOptions: {
|
||||
password: process.env.REDIS_PASSWORD,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
logger.info('Attempting to connect to Redis Cluster...');
|
||||
await redis.ping();
|
||||
logger.info('Connected to Redis Cluster. Executing flushall...');
|
||||
const result = await Promise.race([
|
||||
redis.flushall(),
|
||||
new Promise((_, reject) => setTimeout(() => reject(new Error('Flush timeout')), 10000)),
|
||||
]);
|
||||
logger.info('Redis Cluster cache flushed successfully', { result });
|
||||
} catch (err) {
|
||||
logger.error('Error while flushing Redis Cluster cache:', err);
|
||||
throw err;
|
||||
} finally {
|
||||
redis.disconnect();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
/** Handle single Redis instance */
|
||||
const redis = new Redis(redisConfig);
|
||||
|
||||
try {
|
||||
logger.info('Attempting to connect to Redis...');
|
||||
await redis.ping();
|
||||
logger.info('Connected to Redis. Executing flushall...');
|
||||
const result = await Promise.race([
|
||||
redis.flushall(),
|
||||
new Promise((_, reject) => setTimeout(() => reject(new Error('Flush timeout')), 5000)),
|
||||
]);
|
||||
logger.info('Redis cache flushed successfully', { result });
|
||||
} catch (err) {
|
||||
logger.error('Error while flushing Redis cache:', err);
|
||||
throw err;
|
||||
} finally {
|
||||
redis.disconnect();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Master process
|
||||
* Manages worker processes and handles graceful shutdowns
|
||||
*/
|
||||
if (cluster.isMaster) {
|
||||
logger.info(wrapLogMessage(`Master ${process.pid} is starting...`));
|
||||
logger.info(`Spawning ${workers} workers to simulate multi-pod environment`);
|
||||
|
||||
let activeWorkers = 0;
|
||||
const startTime = Date.now();
|
||||
|
||||
/** Flush Redis cache before starting workers */
|
||||
flushRedisCache()
|
||||
.then(() => {
|
||||
logger.info('Cache flushed, forking workers...');
|
||||
for (let i = 0; i < workers; i++) {
|
||||
cluster.fork();
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error('Unable to flush Redis cache, not forking workers:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
/** Track worker lifecycle */
|
||||
cluster.on('online', (worker) => {
|
||||
activeWorkers++;
|
||||
const uptime = ((Date.now() - startTime) / 1000).toFixed(2);
|
||||
logger.info(
|
||||
`Worker ${worker.process.pid} is online (${activeWorkers}/${workers}) after ${uptime}s`,
|
||||
);
|
||||
|
||||
/** Notify the last worker to perform one-time initialization tasks */
|
||||
if (activeWorkers === workers) {
|
||||
const allWorkers = Object.values(cluster.workers);
|
||||
const lastWorker = allWorkers[allWorkers.length - 1];
|
||||
if (lastWorker) {
|
||||
logger.info(wrapLogMessage(`All ${workers} workers are online`));
|
||||
lastWorker.send({ type: 'last-worker' });
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
cluster.on('exit', (worker, code, signal) => {
|
||||
activeWorkers--;
|
||||
logger.error(
|
||||
`Worker ${worker.process.pid} died (${activeWorkers}/${workers}). Code: ${code}, Signal: ${signal}`,
|
||||
);
|
||||
logger.info('Starting a new worker to replace it...');
|
||||
cluster.fork();
|
||||
});
|
||||
|
||||
/** Graceful shutdown on SIGTERM/SIGINT */
|
||||
const shutdown = () => {
|
||||
logger.info('Master received shutdown signal, terminating workers...');
|
||||
for (const id in cluster.workers) {
|
||||
cluster.workers[id].kill();
|
||||
}
|
||||
setTimeout(() => {
|
||||
logger.info('Forcing shutdown after timeout');
|
||||
process.exit(0);
|
||||
}, 10000);
|
||||
};
|
||||
|
||||
process.on('SIGTERM', shutdown);
|
||||
process.on('SIGINT', shutdown);
|
||||
} else {
|
||||
/**
|
||||
* Worker process
|
||||
* Each worker runs a full Express server instance
|
||||
*/
|
||||
const app = express();
|
||||
|
||||
const startServer = async () => {
|
||||
logger.info(`Worker ${process.pid} initializing...`);
|
||||
|
||||
if (typeof Bun !== 'undefined') {
|
||||
axios.defaults.headers.common['Accept-Encoding'] = 'gzip';
|
||||
}
|
||||
|
||||
/** Connect to MongoDB */
|
||||
await connectDb();
|
||||
logger.info(`Worker ${process.pid}: Connected to MongoDB`);
|
||||
|
||||
/** Background index sync (non-blocking) */
|
||||
indexSync().catch((err) => {
|
||||
logger.error(`[Worker ${process.pid}][indexSync] Background sync failed:`, err);
|
||||
});
|
||||
|
||||
app.disable('x-powered-by');
|
||||
app.set('trust proxy', trusted_proxy);
|
||||
|
||||
/** Seed database (idempotent) */
|
||||
await seedDatabase();
|
||||
|
||||
/** Initialize app configuration */
|
||||
const appConfig = await getAppConfig();
|
||||
initializeFileStorage(appConfig);
|
||||
await performStartupChecks(appConfig);
|
||||
await updateInterfacePermissions(appConfig);
|
||||
|
||||
/** Load index.html for SPA serving */
|
||||
const indexPath = path.join(appConfig.paths.dist, 'index.html');
|
||||
let indexHTML = fs.readFileSync(indexPath, 'utf8');
|
||||
|
||||
/** Support serving in subdirectory if DOMAIN_CLIENT is set */
|
||||
if (process.env.DOMAIN_CLIENT) {
|
||||
const clientUrl = new URL(process.env.DOMAIN_CLIENT);
|
||||
const baseHref = clientUrl.pathname.endsWith('/')
|
||||
? clientUrl.pathname
|
||||
: `${clientUrl.pathname}/`;
|
||||
if (baseHref !== '/') {
|
||||
logger.info(`Setting base href to ${baseHref}`);
|
||||
indexHTML = indexHTML.replace(/base href="\/"/, `base href="${baseHref}"`);
|
||||
}
|
||||
}
|
||||
|
||||
/** Health check endpoint */
|
||||
app.get('/health', (_req, res) => res.status(200).send('OK'));
|
||||
|
||||
/** Middleware */
|
||||
app.use(noIndex);
|
||||
app.use(express.json({ limit: '3mb' }));
|
||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||
app.use(handleJsonParseError);
|
||||
app.use(mongoSanitize());
|
||||
app.use(cors());
|
||||
app.use(cookieParser());
|
||||
|
||||
if (!isEnabled(DISABLE_COMPRESSION)) {
|
||||
app.use(compression());
|
||||
} else {
|
||||
logger.warn('Response compression has been disabled via DISABLE_COMPRESSION.');
|
||||
}
|
||||
|
||||
app.use(staticCache(appConfig.paths.dist));
|
||||
app.use(staticCache(appConfig.paths.fonts));
|
||||
app.use(staticCache(appConfig.paths.assets));
|
||||
|
||||
if (!ALLOW_SOCIAL_LOGIN) {
|
||||
logger.warn('Social logins are disabled. Set ALLOW_SOCIAL_LOGIN=true to enable them.');
|
||||
}
|
||||
|
||||
/** OAUTH */
|
||||
app.use(passport.initialize());
|
||||
passport.use(jwtLogin());
|
||||
passport.use(passportLogin());
|
||||
|
||||
/** LDAP Auth */
|
||||
if (process.env.LDAP_URL && process.env.LDAP_USER_SEARCH_BASE) {
|
||||
passport.use(ldapLogin);
|
||||
}
|
||||
|
||||
if (isEnabled(ALLOW_SOCIAL_LOGIN)) {
|
||||
await configureSocialLogins(app);
|
||||
}
|
||||
|
||||
/** Routes */
|
||||
app.use('/oauth', routes.oauth);
|
||||
app.use('/api/auth', routes.auth);
|
||||
app.use('/api/actions', routes.actions);
|
||||
app.use('/api/keys', routes.keys);
|
||||
app.use('/api/user', routes.user);
|
||||
app.use('/api/search', routes.search);
|
||||
app.use('/api/edit', routes.edit);
|
||||
app.use('/api/messages', routes.messages);
|
||||
app.use('/api/convos', routes.convos);
|
||||
app.use('/api/presets', routes.presets);
|
||||
app.use('/api/prompts', routes.prompts);
|
||||
app.use('/api/categories', routes.categories);
|
||||
app.use('/api/endpoints', routes.endpoints);
|
||||
app.use('/api/balance', routes.balance);
|
||||
app.use('/api/models', routes.models);
|
||||
app.use('/api/plugins', routes.plugins);
|
||||
app.use('/api/config', routes.config);
|
||||
app.use('/api/assistants', routes.assistants);
|
||||
app.use('/api/files', await routes.files.initialize());
|
||||
app.use('/images/', createValidateImageRequest(appConfig.secureImageLinks), routes.staticRoute);
|
||||
app.use('/api/share', routes.share);
|
||||
app.use('/api/roles', routes.roles);
|
||||
app.use('/api/agents', routes.agents);
|
||||
app.use('/api/banner', routes.banner);
|
||||
app.use('/api/memories', routes.memories);
|
||||
app.use('/api/permissions', routes.accessPermissions);
|
||||
app.use('/api/tags', routes.tags);
|
||||
app.use('/api/mcp', routes.mcp);
|
||||
|
||||
/** Error handler */
|
||||
app.use(ErrorController);
|
||||
|
||||
/** SPA fallback - serve index.html for all unmatched routes */
|
||||
app.use((req, res) => {
|
||||
res.set({
|
||||
'Cache-Control': process.env.INDEX_CACHE_CONTROL || 'no-cache, no-store, must-revalidate',
|
||||
Pragma: process.env.INDEX_PRAGMA || 'no-cache',
|
||||
Expires: process.env.INDEX_EXPIRES || '0',
|
||||
});
|
||||
|
||||
const lang = req.cookies.lang || req.headers['accept-language']?.split(',')[0] || 'en-US';
|
||||
const saneLang = lang.replace(/"/g, '"');
|
||||
let updatedIndexHtml = indexHTML.replace(/lang="en-US"/g, `lang="${saneLang}"`);
|
||||
|
||||
res.type('html');
|
||||
res.send(updatedIndexHtml);
|
||||
});
|
||||
|
||||
/** Start listening on shared port (cluster will distribute connections) */
|
||||
app.listen(port, host, async () => {
|
||||
logger.info(
|
||||
`Worker ${process.pid} started: Server listening at http://${
|
||||
host == '0.0.0.0' ? 'localhost' : host
|
||||
}:${port}`,
|
||||
);
|
||||
|
||||
/** Initialize MCP servers and OAuth reconnection for this worker */
|
||||
await initializeMCPs();
|
||||
await initializeOAuthReconnectManager();
|
||||
await checkMigrations();
|
||||
});
|
||||
|
||||
/** Handle inter-process messages from master */
|
||||
process.on('message', async (msg) => {
|
||||
if (msg.type === 'last-worker') {
|
||||
logger.info(
|
||||
wrapLogMessage(
|
||||
`Worker ${process.pid} is the last worker and can perform special initialization tasks`,
|
||||
),
|
||||
);
|
||||
/** Add any one-time initialization tasks here */
|
||||
/** For example: scheduled jobs, cleanup tasks, etc. */
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
startServer().catch((err) => {
|
||||
logger.error(`Failed to start worker ${process.pid}:`, err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
/** Export app for testing purposes (only available in worker processes) */
|
||||
module.exports = app;
|
||||
}
|
||||
|
||||
/**
|
||||
* Uncaught exception handler
|
||||
* Filters out known non-critical errors
|
||||
*/
|
||||
let messageCount = 0;
|
||||
process.on('uncaughtException', (err) => {
|
||||
if (!err.message.includes('fetch failed')) {
|
||||
logger.error('There was an uncaught error:', err);
|
||||
}
|
||||
|
||||
if (err.message && err.message?.toLowerCase()?.includes('abort')) {
|
||||
logger.warn('There was an uncatchable abort error.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (err.message.includes('GoogleGenerativeAI')) {
|
||||
logger.warn(
|
||||
'\n\n`GoogleGenerativeAI` errors cannot be caught due to an upstream issue, see: https://github.com/google-gemini/generative-ai-js/issues/303',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (err.message.includes('fetch failed')) {
|
||||
if (messageCount === 0) {
|
||||
logger.warn('Meilisearch error, search will be disabled');
|
||||
messageCount++;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (err.message.includes('OpenAIError') || err.message.includes('ChatCompletionMessage')) {
|
||||
logger.error(
|
||||
'\n\nAn Uncaught `OpenAIError` error may be due to your reverse-proxy setup or stream configuration, or a bug in the `openai` node package.',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (err.stack && err.stack.includes('@librechat/agents')) {
|
||||
logger.error(
|
||||
'\n\nAn error occurred in the agents system. The error has been logged and the app will continue running.',
|
||||
{
|
||||
message: err.message,
|
||||
stack: err.stack,
|
||||
},
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
});
|
||||
|
|
@ -14,6 +14,7 @@ const {
|
|||
isEnabled,
|
||||
ErrorController,
|
||||
performStartupChecks,
|
||||
handleJsonParseError,
|
||||
initializeFileStorage,
|
||||
} = require('@librechat/api');
|
||||
const { connectDb, indexSync } = require('~/db');
|
||||
|
|
@ -81,6 +82,7 @@ const startServer = async () => {
|
|||
app.use(noIndex);
|
||||
app.use(express.json({ limit: '3mb' }));
|
||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||
app.use(handleJsonParseError);
|
||||
app.use(mongoSanitize());
|
||||
app.use(cors());
|
||||
app.use(cookieParser());
|
||||
|
|
@ -126,7 +128,6 @@ const startServer = async () => {
|
|||
app.use('/api/presets', routes.presets);
|
||||
app.use('/api/prompts', routes.prompts);
|
||||
app.use('/api/categories', routes.categories);
|
||||
app.use('/api/tokenizer', routes.tokenizer);
|
||||
app.use('/api/endpoints', routes.endpoints);
|
||||
app.use('/api/balance', routes.balance);
|
||||
app.use('/api/models', routes.models);
|
||||
|
|
@ -213,6 +214,17 @@ process.on('uncaughtException', (err) => {
|
|||
return;
|
||||
}
|
||||
|
||||
if (err.stack && err.stack.includes('@librechat/agents')) {
|
||||
logger.error(
|
||||
'\n\nAn error occurred in the agents system. The error has been logged and the app will continue running.',
|
||||
{
|
||||
message: err.message,
|
||||
stack: err.stack,
|
||||
},
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const { logger } = require('@librechat/data-schemas');
|
||||
const { countTokens, isEnabled, sendEvent } = require('@librechat/api');
|
||||
const { countTokens, isEnabled, sendEvent, sanitizeMessageForTransmit } = require('@librechat/api');
|
||||
const { isAssistantsEndpoint, ErrorTypes, Constants } = require('librechat-data-provider');
|
||||
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
||||
const clearPendingReq = require('~/cache/clearPendingReq');
|
||||
|
|
@ -290,7 +290,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
|||
title: conversation && !conversation.title ? null : conversation?.title || 'New Chat',
|
||||
final: true,
|
||||
conversation,
|
||||
requestMessage: userMessage,
|
||||
requestMessage: sanitizeMessageForTransmit(userMessage),
|
||||
responseMessage: responseMessage,
|
||||
};
|
||||
};
|
||||
|
|
|
|||
|
|
@ -61,18 +61,24 @@ async function buildEndpointOption(req, res, next) {
|
|||
|
||||
try {
|
||||
currentModelSpec.preset.spec = spec;
|
||||
if (currentModelSpec.iconURL != null && currentModelSpec.iconURL !== '') {
|
||||
currentModelSpec.preset.iconURL = currentModelSpec.iconURL;
|
||||
}
|
||||
parsedBody = parseCompactConvo({
|
||||
endpoint,
|
||||
endpointType,
|
||||
conversation: currentModelSpec.preset,
|
||||
});
|
||||
if (currentModelSpec.iconURL != null && currentModelSpec.iconURL !== '') {
|
||||
parsedBody.iconURL = currentModelSpec.iconURL;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error parsing model spec for endpoint ${endpoint}`, error);
|
||||
return handleError(res, { text: 'Error parsing model spec' });
|
||||
}
|
||||
} else if (parsedBody.spec && appConfig.modelSpecs?.list) {
|
||||
// Non-enforced mode: if spec is selected, derive iconURL from model spec
|
||||
const modelSpec = appConfig.modelSpecs.list.find((s) => s.name === parsedBody.spec);
|
||||
if (modelSpec?.iconURL) {
|
||||
parsedBody.iconURL = modelSpec.iconURL;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const crypto = require('crypto');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { parseConvo } = require('librechat-data-provider');
|
||||
const { sendEvent, handleError } = require('@librechat/api');
|
||||
const { sendEvent, handleError, sanitizeMessageForTransmit } = require('@librechat/api');
|
||||
const { saveMessage, getMessages } = require('~/models/Message');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
|
||||
|
|
@ -71,7 +71,7 @@ const sendError = async (req, res, options, callback) => {
|
|||
|
||||
return sendEvent(res, {
|
||||
final: true,
|
||||
requestMessage: query?.[0] ? query[0] : requestMessage,
|
||||
requestMessage: sanitizeMessageForTransmit(query?.[0] ?? requestMessage),
|
||||
responseMessage: errorMessage,
|
||||
conversation: convo,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,11 +1,14 @@
|
|||
const jwt = require('jsonwebtoken');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const createValidateImageRequest = require('~/server/middleware/validateImageRequest');
|
||||
|
||||
// Mock only isEnabled, keep getBasePath real so it reads process.env.DOMAIN_CLIENT
|
||||
jest.mock('@librechat/api', () => ({
|
||||
...jest.requireActual('@librechat/api'),
|
||||
isEnabled: jest.fn(),
|
||||
}));
|
||||
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
|
||||
describe('validateImageRequest middleware', () => {
|
||||
let req, res, next, validateImageRequest;
|
||||
const validObjectId = '65cfb246f7ecadb8b1e8036b';
|
||||
|
|
@ -23,6 +26,7 @@ describe('validateImageRequest middleware', () => {
|
|||
next = jest.fn();
|
||||
process.env.JWT_REFRESH_SECRET = 'test-secret';
|
||||
process.env.OPENID_REUSE_TOKENS = 'false';
|
||||
delete process.env.DOMAIN_CLIENT; // Clear for tests without basePath
|
||||
|
||||
// Default: OpenID token reuse disabled
|
||||
isEnabled.mockReturnValue(false);
|
||||
|
|
@ -296,4 +300,175 @@ describe('validateImageRequest middleware', () => {
|
|||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
});
|
||||
|
||||
describe('basePath functionality', () => {
|
||||
let originalDomainClient;
|
||||
|
||||
beforeEach(() => {
|
||||
originalDomainClient = process.env.DOMAIN_CLIENT;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env.DOMAIN_CLIENT = originalDomainClient;
|
||||
});
|
||||
|
||||
test('should validate image paths with base path', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'http://localhost:3080/librechat';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/librechat/images/${validObjectId}/test.jpg`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should validate agent avatar paths with base path', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'http://localhost:3080/librechat';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/librechat/images/${validObjectId}/agent-avatar.png`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should reject image paths without base path when DOMAIN_CLIENT is set', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'http://localhost:3080/librechat';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/test.jpg`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should handle empty base path (root deployment)', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'http://localhost:3080/';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/test.jpg`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should handle missing DOMAIN_CLIENT', async () => {
|
||||
delete process.env.DOMAIN_CLIENT;
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/test.jpg`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should handle nested subdirectories in base path', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'http://localhost:3080/apps/librechat';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/apps/librechat/images/${validObjectId}/test.jpg`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should prevent path traversal with base path', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'http://localhost:3080/librechat';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/librechat/images/${validObjectId}/../../../etc/passwd`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should handle URLs with query parameters and base path', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'http://localhost:3080/librechat';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/librechat/images/${validObjectId}/test.jpg?version=1`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should handle URLs with fragments and base path', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'http://localhost:3080/librechat';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/librechat/images/${validObjectId}/test.jpg#section`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should handle HTTPS URLs with base path', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'https://example.com/librechat';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/librechat/images/${validObjectId}/test.jpg`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should handle invalid DOMAIN_CLIENT gracefully', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'not-a-valid-url';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = `/images/${validObjectId}/test.jpg`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should handle OpenID flow with base path', async () => {
|
||||
process.env.DOMAIN_CLIENT = 'http://localhost:3080/librechat';
|
||||
process.env.OPENID_REUSE_TOKENS = 'true';
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}; token_provider=openid; openid_user_id=${validToken}`;
|
||||
req.originalUrl = `/librechat/images/${validObjectId}/test.jpg`;
|
||||
|
||||
await validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const cookies = require('cookie');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isEnabled, getBasePath } = require('@librechat/api');
|
||||
|
||||
const OBJECT_ID_LENGTH = 24;
|
||||
const OBJECT_ID_PATTERN = /^[0-9a-f]{24}$/i;
|
||||
|
|
@ -124,14 +124,21 @@ function createValidateImageRequest(secureImageLinks) {
|
|||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
const agentAvatarPattern = /^\/images\/[a-f0-9]{24}\/agent-[^/]*$/;
|
||||
const basePath = getBasePath();
|
||||
const imagesPath = `${basePath}/images`;
|
||||
|
||||
const agentAvatarPattern = new RegExp(
|
||||
`^${imagesPath.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}/[a-f0-9]{24}/agent-[^/]*$`,
|
||||
);
|
||||
if (agentAvatarPattern.test(fullPath)) {
|
||||
logger.debug('[validateImageRequest] Image request validated');
|
||||
return next();
|
||||
}
|
||||
|
||||
const escapedUserId = userIdForPath.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
const pathPattern = new RegExp(`^/images/${escapedUserId}/[^/]+$`);
|
||||
const pathPattern = new RegExp(
|
||||
`^${imagesPath.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}/${escapedUserId}/[^/]+$`,
|
||||
);
|
||||
|
||||
if (pathPattern.test(fullPath)) {
|
||||
logger.debug('[validateImageRequest] Image request validated');
|
||||
|
|
|
|||
|
|
@ -146,7 +146,15 @@ router.delete(
|
|||
* @param {number} req.body.version_index - Index of the version to revert to.
|
||||
* @returns {Agent} 200 - success response - application/json
|
||||
*/
|
||||
router.post('/:id/revert', checkGlobalAgentShare, v1.revertAgentVersion);
|
||||
router.post(
|
||||
'/:id/revert',
|
||||
checkGlobalAgentShare,
|
||||
canAccessAgentResource({
|
||||
requiredPermission: PermissionBits.EDIT,
|
||||
resourceIdParam: 'id',
|
||||
}),
|
||||
v1.revertAgentVersion,
|
||||
);
|
||||
|
||||
/**
|
||||
* Returns a list of agents.
|
||||
|
|
|
|||
|
|
@ -30,11 +30,46 @@ const publicSharedLinksEnabled =
|
|||
const sharePointFilePickerEnabled = isEnabled(process.env.ENABLE_SHAREPOINT_FILEPICKER);
|
||||
const openidReuseTokens = isEnabled(process.env.OPENID_REUSE_TOKENS);
|
||||
|
||||
/**
|
||||
* Fetches MCP servers from registry and adds them to the payload.
|
||||
* Registry now includes all configured servers (from YAML) plus inspection data when available.
|
||||
* Always fetches fresh to avoid caching incomplete initialization state.
|
||||
*/
|
||||
const getMCPServers = async (payload, appConfig) => {
|
||||
try {
|
||||
if (appConfig?.mcpConfig == null) {
|
||||
return;
|
||||
}
|
||||
const mcpManager = getMCPManager();
|
||||
if (!mcpManager) {
|
||||
return;
|
||||
}
|
||||
const mcpServers = await mcpServersRegistry.getAllServerConfigs();
|
||||
if (!mcpServers) return;
|
||||
for (const serverName in mcpServers) {
|
||||
if (!payload.mcpServers) {
|
||||
payload.mcpServers = {};
|
||||
}
|
||||
const serverConfig = mcpServers[serverName];
|
||||
payload.mcpServers[serverName] = removeNullishValues({
|
||||
startup: serverConfig?.startup,
|
||||
chatMenu: serverConfig?.chatMenu,
|
||||
isOAuth: serverConfig.requiresOAuth,
|
||||
customUserVars: serverConfig?.customUserVars,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error loading MCP servers', error);
|
||||
}
|
||||
};
|
||||
|
||||
router.get('/', async function (req, res) {
|
||||
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||
|
||||
const cachedStartupConfig = await cache.get(CacheKeys.STARTUP_CONFIG);
|
||||
if (cachedStartupConfig) {
|
||||
const appConfig = await getAppConfig({ role: req.user?.role });
|
||||
await getMCPServers(cachedStartupConfig, appConfig);
|
||||
res.send(cachedStartupConfig);
|
||||
return;
|
||||
}
|
||||
|
|
@ -126,35 +161,6 @@ router.get('/', async function (req, res) {
|
|||
payload.minPasswordLength = minPasswordLength;
|
||||
}
|
||||
|
||||
const getMCPServers = async () => {
|
||||
try {
|
||||
if (appConfig?.mcpConfig == null) {
|
||||
return;
|
||||
}
|
||||
const mcpManager = getMCPManager();
|
||||
if (!mcpManager) {
|
||||
return;
|
||||
}
|
||||
const mcpServers = await mcpServersRegistry.getAllServerConfigs();
|
||||
if (!mcpServers) return;
|
||||
for (const serverName in mcpServers) {
|
||||
if (!payload.mcpServers) {
|
||||
payload.mcpServers = {};
|
||||
}
|
||||
const serverConfig = mcpServers[serverName];
|
||||
payload.mcpServers[serverName] = removeNullishValues({
|
||||
startup: serverConfig?.startup,
|
||||
chatMenu: serverConfig?.chatMenu,
|
||||
isOAuth: serverConfig.requiresOAuth,
|
||||
customUserVars: serverConfig?.customUserVars,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error loading MCP servers', error);
|
||||
}
|
||||
};
|
||||
|
||||
await getMCPServers();
|
||||
const webSearchConfig = appConfig?.webSearch;
|
||||
if (
|
||||
webSearchConfig != null &&
|
||||
|
|
@ -184,6 +190,7 @@ router.get('/', async function (req, res) {
|
|||
}
|
||||
|
||||
await cache.set(CacheKeys.STARTUP_CONFIG, payload);
|
||||
await getMCPServers(payload, appConfig);
|
||||
return res.status(200).send(payload);
|
||||
} catch (err) {
|
||||
logger.error('Error in startup config', err);
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
const accessPermissions = require('./accessPermissions');
|
||||
const assistants = require('./assistants');
|
||||
const categories = require('./categories');
|
||||
const tokenizer = require('./tokenizer');
|
||||
const endpoints = require('./endpoints');
|
||||
const staticRoute = require('./static');
|
||||
const messages = require('./messages');
|
||||
|
|
@ -53,7 +52,6 @@ module.exports = {
|
|||
messages,
|
||||
memories,
|
||||
endpoints,
|
||||
tokenizer,
|
||||
assistants,
|
||||
categories,
|
||||
staticRoute,
|
||||
|
|
|
|||
|
|
@ -429,13 +429,23 @@ router.get('/connection/status', requireJwtAuth, async (req, res) => {
|
|||
const connectionStatus = {};
|
||||
|
||||
for (const [serverName] of Object.entries(mcpConfig)) {
|
||||
connectionStatus[serverName] = await getServerConnectionStatus(
|
||||
user.id,
|
||||
serverName,
|
||||
appConnections,
|
||||
userConnections,
|
||||
oauthServers,
|
||||
);
|
||||
try {
|
||||
connectionStatus[serverName] = await getServerConnectionStatus(
|
||||
user.id,
|
||||
serverName,
|
||||
appConnections,
|
||||
userConnections,
|
||||
oauthServers,
|
||||
);
|
||||
} catch (error) {
|
||||
const message = `Failed to get status for server "${serverName}"`;
|
||||
logger.error(`[MCP Connection Status] ${message},`, error);
|
||||
connectionStatus[serverName] = {
|
||||
connectionState: 'error',
|
||||
requiresOAuth: oauthServers.has(serverName),
|
||||
error: message,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const express = require('express');
|
||||
const { unescapeLaTeX } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { ContentTypes } = require('librechat-data-provider');
|
||||
const { unescapeLaTeX, countTokens } = require('@librechat/api');
|
||||
const {
|
||||
saveConvo,
|
||||
getMessage,
|
||||
|
|
@ -14,7 +14,6 @@ const { findAllArtifacts, replaceArtifactContent } = require('~/server/services/
|
|||
const { requireJwtAuth, validateMessageReq } = require('~/server/middleware');
|
||||
const { cleanUpPrimaryKeyValue } = require('~/lib/utils/misc');
|
||||
const { getConvosQueried } = require('~/models/Conversation');
|
||||
const { countTokens } = require('~/server/utils');
|
||||
const { Message } = require('~/db/models');
|
||||
|
||||
const router = express.Router();
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ const {
|
|||
markPublicPromptGroups,
|
||||
buildPromptGroupFilter,
|
||||
formatPromptGroupsResponse,
|
||||
safeValidatePromptGroupUpdate,
|
||||
createEmptyPromptGroupsResponse,
|
||||
filterAccessibleIdsBySharedLogic,
|
||||
} = require('@librechat/api');
|
||||
|
|
@ -344,7 +345,16 @@ const patchPromptGroup = async (req, res) => {
|
|||
if (req.user.role === SystemRoles.ADMIN) {
|
||||
delete filter.author;
|
||||
}
|
||||
const promptGroup = await updatePromptGroup(filter, req.body);
|
||||
|
||||
const validationResult = safeValidatePromptGroupUpdate(req.body);
|
||||
if (!validationResult.success) {
|
||||
return res.status(400).send({
|
||||
error: 'Invalid request body',
|
||||
details: validationResult.error.errors,
|
||||
});
|
||||
}
|
||||
|
||||
const promptGroup = await updatePromptGroup(filter, validationResult.data);
|
||||
res.status(200).send(promptGroup);
|
||||
} catch (error) {
|
||||
logger.error(error);
|
||||
|
|
|
|||
|
|
@ -544,6 +544,169 @@ describe('Prompt Routes - ACL Permissions', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('PATCH /api/prompts/groups/:groupId - Update Prompt Group Security', () => {
|
||||
let testGroup;
|
||||
|
||||
beforeEach(async () => {
|
||||
// Create a prompt group
|
||||
testGroup = await PromptGroup.create({
|
||||
name: 'Security Test Group',
|
||||
category: 'security-test',
|
||||
author: testUsers.owner._id,
|
||||
authorName: testUsers.owner.name,
|
||||
productionId: new ObjectId(),
|
||||
});
|
||||
|
||||
// Grant owner permissions
|
||||
await grantPermission({
|
||||
principalType: PrincipalType.USER,
|
||||
principalId: testUsers.owner._id,
|
||||
resourceType: ResourceType.PROMPTGROUP,
|
||||
resourceId: testGroup._id,
|
||||
accessRoleId: AccessRoleIds.PROMPTGROUP_OWNER,
|
||||
grantedBy: testUsers.owner._id,
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await PromptGroup.deleteMany({});
|
||||
await AclEntry.deleteMany({});
|
||||
});
|
||||
|
||||
it('should allow updating allowed fields (name, category, oneliner)', async () => {
|
||||
const updateData = {
|
||||
name: 'Updated Group Name',
|
||||
category: 'updated-category',
|
||||
oneliner: 'Updated description',
|
||||
};
|
||||
|
||||
const response = await request(app)
|
||||
.patch(`/api/prompts/groups/${testGroup._id}`)
|
||||
.send(updateData)
|
||||
.expect(200);
|
||||
|
||||
expect(response.body.name).toBe(updateData.name);
|
||||
expect(response.body.category).toBe(updateData.category);
|
||||
expect(response.body.oneliner).toBe(updateData.oneliner);
|
||||
});
|
||||
|
||||
it('should reject request with author field (400 Bad Request)', async () => {
|
||||
const maliciousUpdate = {
|
||||
name: 'Legit Update',
|
||||
author: testUsers.noAccess._id.toString(), // Try to change ownership
|
||||
};
|
||||
|
||||
const response = await request(app)
|
||||
.patch(`/api/prompts/groups/${testGroup._id}`)
|
||||
.send(maliciousUpdate)
|
||||
.expect(400);
|
||||
|
||||
// Verify the request was rejected
|
||||
expect(response.body.error).toBe('Invalid request body');
|
||||
expect(response.body.details).toBeDefined();
|
||||
});
|
||||
|
||||
it('should reject request with authorName field (400 Bad Request)', async () => {
|
||||
const maliciousUpdate = {
|
||||
name: 'Legit Update',
|
||||
authorName: 'Malicious Author Name',
|
||||
};
|
||||
|
||||
const response = await request(app)
|
||||
.patch(`/api/prompts/groups/${testGroup._id}`)
|
||||
.send(maliciousUpdate)
|
||||
.expect(400);
|
||||
|
||||
// Verify the request was rejected
|
||||
expect(response.body.error).toBe('Invalid request body');
|
||||
});
|
||||
|
||||
it('should reject request with _id field (400 Bad Request)', async () => {
|
||||
const newId = new ObjectId();
|
||||
const maliciousUpdate = {
|
||||
name: 'Legit Update',
|
||||
_id: newId.toString(),
|
||||
};
|
||||
|
||||
const response = await request(app)
|
||||
.patch(`/api/prompts/groups/${testGroup._id}`)
|
||||
.send(maliciousUpdate)
|
||||
.expect(400);
|
||||
|
||||
// Verify the request was rejected
|
||||
expect(response.body.error).toBe('Invalid request body');
|
||||
});
|
||||
|
||||
it('should reject request with productionId field (400 Bad Request)', async () => {
|
||||
const newProductionId = new ObjectId();
|
||||
const maliciousUpdate = {
|
||||
name: 'Legit Update',
|
||||
productionId: newProductionId.toString(),
|
||||
};
|
||||
|
||||
const response = await request(app)
|
||||
.patch(`/api/prompts/groups/${testGroup._id}`)
|
||||
.send(maliciousUpdate)
|
||||
.expect(400);
|
||||
|
||||
// Verify the request was rejected
|
||||
expect(response.body.error).toBe('Invalid request body');
|
||||
});
|
||||
|
||||
it('should reject request with createdAt field (400 Bad Request)', async () => {
|
||||
const maliciousDate = new Date('2020-01-01');
|
||||
const maliciousUpdate = {
|
||||
name: 'Legit Update',
|
||||
createdAt: maliciousDate.toISOString(),
|
||||
};
|
||||
|
||||
const response = await request(app)
|
||||
.patch(`/api/prompts/groups/${testGroup._id}`)
|
||||
.send(maliciousUpdate)
|
||||
.expect(400);
|
||||
|
||||
// Verify the request was rejected
|
||||
expect(response.body.error).toBe('Invalid request body');
|
||||
});
|
||||
|
||||
it('should reject request with __v field (400 Bad Request)', async () => {
|
||||
const maliciousUpdate = {
|
||||
name: 'Legit Update',
|
||||
__v: 999,
|
||||
};
|
||||
|
||||
const response = await request(app)
|
||||
.patch(`/api/prompts/groups/${testGroup._id}`)
|
||||
.send(maliciousUpdate)
|
||||
.expect(400);
|
||||
|
||||
// Verify the request was rejected
|
||||
expect(response.body.error).toBe('Invalid request body');
|
||||
});
|
||||
|
||||
it('should reject request with multiple sensitive fields (400 Bad Request)', async () => {
|
||||
const maliciousUpdate = {
|
||||
name: 'Legit Update',
|
||||
author: testUsers.noAccess._id.toString(),
|
||||
authorName: 'Hacker',
|
||||
_id: new ObjectId().toString(),
|
||||
productionId: new ObjectId().toString(),
|
||||
createdAt: new Date('2020-01-01').toISOString(),
|
||||
__v: 999,
|
||||
};
|
||||
|
||||
const response = await request(app)
|
||||
.patch(`/api/prompts/groups/${testGroup._id}`)
|
||||
.send(maliciousUpdate)
|
||||
.expect(400);
|
||||
|
||||
// Verify the request was rejected with validation errors
|
||||
expect(response.body.error).toBe('Invalid request body');
|
||||
expect(response.body.details).toBeDefined();
|
||||
expect(Array.isArray(response.body.details)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pagination', () => {
|
||||
beforeEach(async () => {
|
||||
// Create multiple prompt groups for pagination testing
|
||||
|
|
|
|||
|
|
@ -1,19 +0,0 @@
|
|||
const express = require('express');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||
const { countTokens } = require('~/server/utils');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.post('/', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const { arg } = req.body;
|
||||
const count = await countTokens(arg?.text ?? arg);
|
||||
res.send({ count });
|
||||
} catch (e) {
|
||||
logger.error('[/tokenizer] Error counting tokens', e);
|
||||
res.status(500).json('Error counting tokens');
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
|
@ -8,7 +8,12 @@ const {
|
|||
deleteUserController,
|
||||
getUserController,
|
||||
} = require('~/server/controllers/UserController');
|
||||
const { requireJwtAuth, canDeleteAccount, verifyEmailLimiter } = require('~/server/middleware');
|
||||
const {
|
||||
verifyEmailLimiter,
|
||||
configMiddleware,
|
||||
canDeleteAccount,
|
||||
requireJwtAuth,
|
||||
} = require('~/server/middleware');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
|
|
@ -16,7 +21,7 @@ router.get('/', requireJwtAuth, getUserController);
|
|||
router.get('/terms', requireJwtAuth, getTermsStatusController);
|
||||
router.post('/terms/accept', requireJwtAuth, acceptTermsController);
|
||||
router.post('/plugins', requireJwtAuth, updateUserPluginsController);
|
||||
router.delete('/delete', requireJwtAuth, canDeleteAccount, deleteUserController);
|
||||
router.delete('/delete', requireJwtAuth, canDeleteAccount, configMiddleware, deleteUserController);
|
||||
router.post('/verify', verifyEmailController);
|
||||
router.post('/verify/resend', verifyEmailLimiter, resendVerificationController);
|
||||
|
||||
|
|
|
|||
|
|
@ -176,7 +176,7 @@ const registerUser = async (user, additionalData = {}) => {
|
|||
return { status: 404, message: errorMessage };
|
||||
}
|
||||
|
||||
const { email, password, name, username } = user;
|
||||
const { email, password, name, username, provider } = user;
|
||||
|
||||
let newUserId;
|
||||
try {
|
||||
|
|
@ -207,7 +207,7 @@ const registerUser = async (user, additionalData = {}) => {
|
|||
|
||||
const salt = bcrypt.genSaltSync(10);
|
||||
const newUserData = {
|
||||
provider: 'local',
|
||||
provider: provider ?? 'local',
|
||||
email,
|
||||
username,
|
||||
name,
|
||||
|
|
@ -412,7 +412,7 @@ const setAuthTokens = async (userId, res, _session = null) => {
|
|||
* @param {string} [userId] - Optional MongoDB user ID for image path validation
|
||||
* @returns {String} - access token
|
||||
*/
|
||||
const setOpenIDAuthTokens = (tokenset, res, userId) => {
|
||||
const setOpenIDAuthTokens = (tokenset, res, userId, existingRefreshToken) => {
|
||||
try {
|
||||
if (!tokenset) {
|
||||
logger.error('[setOpenIDAuthTokens] No tokenset found in request');
|
||||
|
|
@ -427,11 +427,25 @@ const setOpenIDAuthTokens = (tokenset, res, userId) => {
|
|||
logger.error('[setOpenIDAuthTokens] No tokenset found in request');
|
||||
return;
|
||||
}
|
||||
if (!tokenset.access_token || !tokenset.refresh_token) {
|
||||
logger.error('[setOpenIDAuthTokens] No access or refresh token found in tokenset');
|
||||
if (!tokenset.access_token) {
|
||||
logger.error('[setOpenIDAuthTokens] No access token found in tokenset');
|
||||
return;
|
||||
}
|
||||
res.cookie('refreshToken', tokenset.refresh_token, {
|
||||
|
||||
const refreshToken = tokenset.refresh_token || existingRefreshToken;
|
||||
|
||||
if (!refreshToken) {
|
||||
logger.error('[setOpenIDAuthTokens] No refresh token available');
|
||||
return;
|
||||
}
|
||||
|
||||
res.cookie('refreshToken', refreshToken, {
|
||||
expires: expirationDate,
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
res.cookie('openid_access_token', tokenset.access_token, {
|
||||
expires: expirationDate,
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
|
|
|
|||
|
|
@ -16,6 +16,11 @@ async function updateMCPServerTools({ userId, serverName, tools }) {
|
|||
const serverTools = {};
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
|
||||
if (tools == null || tools.length === 0) {
|
||||
logger.debug(`[MCP Cache] No tools to update for server ${serverName} (user: ${userId})`);
|
||||
return serverTools;
|
||||
}
|
||||
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${mcpDelimiter}${serverName}`;
|
||||
serverTools[name] = {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
const OpenAI = require('openai');
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { isUserProvided } = require('@librechat/api');
|
||||
const { ErrorTypes, EModelEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
getUserKeyValues,
|
||||
|
|
@ -7,7 +8,6 @@ const {
|
|||
checkUserKeyExpiry,
|
||||
} = require('~/server/services/UserService');
|
||||
const OAIClient = require('~/app/clients/OpenAIClient');
|
||||
const { isUserProvided } = require('~/server/utils');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption, version, initAppClient = false }) => {
|
||||
const { PROXY, OPENAI_ORGANIZATION, ASSISTANTS_API_KEY, ASSISTANTS_BASE_URL } = process.env;
|
||||
|
|
|
|||
|
|
@ -1,9 +1,4 @@
|
|||
const {
|
||||
resolveHeaders,
|
||||
isUserProvided,
|
||||
getOpenAIConfig,
|
||||
getCustomEndpointConfig,
|
||||
} = require('@librechat/api');
|
||||
const { isUserProvided, getOpenAIConfig, getCustomEndpointConfig } = require('@librechat/api');
|
||||
const {
|
||||
CacheKeys,
|
||||
ErrorTypes,
|
||||
|
|
@ -34,14 +29,6 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
|||
const CUSTOM_API_KEY = extractEnvVariable(endpointConfig.apiKey);
|
||||
const CUSTOM_BASE_URL = extractEnvVariable(endpointConfig.baseURL);
|
||||
|
||||
/** Intentionally excludes passing `body`, i.e. `req.body`, as
|
||||
* values may not be accurate until `AgentClient` is initialized
|
||||
*/
|
||||
let resolvedHeaders = resolveHeaders({
|
||||
headers: endpointConfig.headers,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (CUSTOM_API_KEY.match(envVarRegex)) {
|
||||
throw new Error(`Missing API Key for ${endpoint}.`);
|
||||
}
|
||||
|
|
@ -108,7 +95,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
|||
}
|
||||
|
||||
const customOptions = {
|
||||
headers: resolvedHeaders,
|
||||
headers: endpointConfig.headers,
|
||||
addParams: endpointConfig.addParams,
|
||||
dropParams: endpointConfig.dropParams,
|
||||
customParams: endpointConfig.customParams,
|
||||
|
|
|
|||
|
|
@ -69,17 +69,21 @@ describe('custom/initializeClient', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('calls resolveHeaders with headers, user, and body for body placeholder support', async () => {
|
||||
const { resolveHeaders } = require('@librechat/api');
|
||||
await initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true });
|
||||
expect(resolveHeaders).toHaveBeenCalledWith({
|
||||
headers: { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
|
||||
user: { id: 'user-123', email: 'test@example.com', role: 'user' },
|
||||
/**
|
||||
* Note: Request-based Header Resolution is deferred until right before LLM request is made
|
||||
body: { endpoint: 'test-endpoint' }, // body - supports {{LIBRECHAT_BODY_*}} placeholders
|
||||
*/
|
||||
it('stores original template headers for deferred resolution', async () => {
|
||||
/**
|
||||
* Note: Request-based Header Resolution is deferred until right before LLM request is made
|
||||
* in the OpenAIClient or AgentClient, not during initialization.
|
||||
* This test verifies that the initialize function completes successfully with optionsOnly flag,
|
||||
* and that headers are passed through to be resolved later during the actual LLM request.
|
||||
*/
|
||||
const result = await initializeClient({
|
||||
req: mockRequest,
|
||||
res: mockResponse,
|
||||
optionsOnly: true,
|
||||
});
|
||||
// Verify that options are returned for later use
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toHaveProperty('useLegacyContent', true);
|
||||
});
|
||||
|
||||
it('throws if endpoint config is missing', async () => {
|
||||
|
|
|
|||
|
|
@ -12,14 +12,13 @@ const initGoogle = require('~/server/services/Endpoints/google/initialize');
|
|||
* @returns {boolean} - True if the provider is a known custom provider, false otherwise
|
||||
*/
|
||||
function isKnownCustomProvider(provider) {
|
||||
return [Providers.XAI, Providers.OLLAMA, Providers.DEEPSEEK, Providers.OPENROUTER].includes(
|
||||
return [Providers.XAI, Providers.DEEPSEEK, Providers.OPENROUTER].includes(
|
||||
provider?.toLowerCase() || '',
|
||||
);
|
||||
}
|
||||
|
||||
const providerConfigMap = {
|
||||
[Providers.XAI]: initCustom,
|
||||
[Providers.OLLAMA]: initCustom,
|
||||
[Providers.DEEPSEEK]: initCustom,
|
||||
[Providers.OPENROUTER]: initCustom,
|
||||
[EModelEndpoint.openAI]: initOpenAI,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@ const fs = require('fs').promises;
|
|||
const FormData = require('form-data');
|
||||
const { Readable } = require('stream');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { genAzureEndpoint } = require('@librechat/api');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { genAzureEndpoint, logAxiosError } = require('@librechat/api');
|
||||
const { extractEnvVariable, STTProviders } = require('librechat-data-provider');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
|
||||
|
|
@ -34,6 +35,34 @@ const MIME_TO_EXTENSION_MAP = {
|
|||
'audio/x-flac': 'flac',
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates and extracts ISO-639-1 language code from a locale string.
|
||||
* @param {string} language - The language/locale string (e.g., "en-US", "en", "zh-CN")
|
||||
* @returns {string|null} The ISO-639-1 language code (e.g., "en") or null if invalid
|
||||
*/
|
||||
function getValidatedLanguageCode(language) {
|
||||
try {
|
||||
if (!language) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const normalizedLanguage = language.toLowerCase();
|
||||
const isValidLocaleCode = /^[a-z]{2}(-[a-z]{2})?$/.test(normalizedLanguage);
|
||||
|
||||
if (isValidLocaleCode) {
|
||||
return normalizedLanguage.split('-')[0];
|
||||
}
|
||||
|
||||
logger.warn(
|
||||
`[STT] Invalid language format "${language}". Expected ISO-639-1 locale code like "en-US" or "en". Skipping language parameter.`,
|
||||
);
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error(`[STT] Error validating language code "${language}":`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the file extension from the MIME type.
|
||||
* @param {string} mimeType - The MIME type.
|
||||
|
|
@ -172,10 +201,9 @@ class STTService {
|
|||
model: sttSchema.model,
|
||||
};
|
||||
|
||||
if (language) {
|
||||
/** Converted locale code (e.g., "en-US") to ISO-639-1 format (e.g., "en") */
|
||||
const isoLanguage = language.split('-')[0];
|
||||
data.language = isoLanguage;
|
||||
const validLanguage = getValidatedLanguageCode(language);
|
||||
if (validLanguage) {
|
||||
data.language = validLanguage;
|
||||
}
|
||||
|
||||
const headers = {
|
||||
|
|
@ -220,10 +248,9 @@ class STTService {
|
|||
contentType: audioFile.mimetype,
|
||||
});
|
||||
|
||||
if (language) {
|
||||
/** Converted locale code (e.g., "en-US") to ISO-639-1 format (e.g., "en") */
|
||||
const isoLanguage = language.split('-')[0];
|
||||
formData.append('language', isoLanguage);
|
||||
const validLanguage = getValidatedLanguageCode(language);
|
||||
if (validLanguage) {
|
||||
formData.append('language', validLanguage);
|
||||
}
|
||||
|
||||
const headers = {
|
||||
|
|
@ -266,8 +293,14 @@ class STTService {
|
|||
language,
|
||||
);
|
||||
|
||||
const options = { headers };
|
||||
|
||||
if (process.env.PROXY) {
|
||||
options.httpsAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.post(url, data, { headers });
|
||||
const response = await axios.post(url, data, options);
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error('Invalid response from the STT API');
|
||||
|
|
@ -279,7 +312,7 @@ class STTService {
|
|||
|
||||
return response.data.text.trim();
|
||||
} catch (error) {
|
||||
logger.error(`STT request failed for provider ${provider}:`, error);
|
||||
logAxiosError({ message: `STT request failed for provider ${provider}:`, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
|
@ -309,7 +342,7 @@ class STTService {
|
|||
const text = await this.sttRequest(provider, sttSchema, { audioBuffer, audioFile, language });
|
||||
res.json({ text });
|
||||
} catch (error) {
|
||||
logger.error('An error occurred while processing the audio:', error);
|
||||
logAxiosError({ message: 'An error occurred while processing the audio:', error });
|
||||
res.sendStatus(500);
|
||||
} finally {
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
const axios = require('axios');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { genAzureEndpoint } = require('@librechat/api');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { genAzureEndpoint, logAxiosError } = require('@librechat/api');
|
||||
const { extractEnvVariable, TTSProviders } = require('librechat-data-provider');
|
||||
const { getRandomVoiceId, createChunkProcessor, splitTextIntoChunks } = require('./streamAudio');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
|
|
@ -266,10 +267,14 @@ class TTSService {
|
|||
|
||||
const options = { headers, responseType: stream ? 'stream' : 'arraybuffer' };
|
||||
|
||||
if (process.env.PROXY) {
|
||||
options.httpsAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
|
||||
try {
|
||||
return await axios.post(url, data, options);
|
||||
} catch (error) {
|
||||
logger.error(`TTS request failed for provider ${provider}:`, error);
|
||||
logAxiosError({ message: `TTS request failed for provider ${provider}:`, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
|
@ -325,7 +330,10 @@ class TTSService {
|
|||
break;
|
||||
}
|
||||
} catch (innerError) {
|
||||
logger.error('Error processing manual update:', chunk, innerError);
|
||||
logAxiosError({
|
||||
message: `[TTS] Error processing manual update for chunk: ${chunk?.text?.substring(0, 50)}...`,
|
||||
error: innerError,
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
return res.status(500).end();
|
||||
}
|
||||
|
|
@ -337,7 +345,7 @@ class TTSService {
|
|||
res.end();
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error creating the audio stream:', error);
|
||||
logAxiosError({ message: '[TTS] Error creating the audio stream:', error });
|
||||
if (!res.headersSent) {
|
||||
return res.status(500).send('An error occurred');
|
||||
}
|
||||
|
|
@ -407,7 +415,10 @@ class TTSService {
|
|||
break;
|
||||
}
|
||||
} catch (innerError) {
|
||||
logger.error('Error processing audio stream update:', update, innerError);
|
||||
logAxiosError({
|
||||
message: `[TTS] Error processing audio stream update: ${update?.text?.substring(0, 50)}...`,
|
||||
error: innerError,
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
return res.status(500).end();
|
||||
}
|
||||
|
|
@ -424,7 +435,7 @@ class TTSService {
|
|||
res.end();
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch audio:', error);
|
||||
logAxiosError({ message: '[TTS] Failed to fetch audio:', error });
|
||||
if (!res.headersSent) {
|
||||
res.status(500).end();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
const path = require('path');
|
||||
const { v4 } = require('uuid');
|
||||
const axios = require('axios');
|
||||
const { logAxiosError } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getCodeBaseURL } = require('@librechat/agents');
|
||||
const { logAxiosError, getBasePath } = require('@librechat/api');
|
||||
const {
|
||||
Tools,
|
||||
FileContext,
|
||||
|
|
@ -41,11 +41,12 @@ const processCodeOutput = async ({
|
|||
const appConfig = req.config;
|
||||
const currentDate = new Date();
|
||||
const baseURL = getCodeBaseURL();
|
||||
const basePath = getBasePath();
|
||||
const fileExt = path.extname(name);
|
||||
if (!fileExt || !imageExtRegex.test(name)) {
|
||||
return {
|
||||
filename: name,
|
||||
filepath: `/api/files/code/download/${session_id}/${id}`,
|
||||
filepath: `${basePath}/api/files/code/download/${session_id}/${id}`,
|
||||
/** Note: expires 24 hours after creation */
|
||||
expiresAt: currentDate.getTime() + 86400000,
|
||||
conversationId,
|
||||
|
|
|
|||
|
|
@ -169,14 +169,24 @@ function extractFirebaseFilePath(urlString) {
|
|||
const deleteFirebaseFile = async (req, file) => {
|
||||
if (file.embedded && process.env.RAG_API_URL) {
|
||||
const jwtToken = req.headers.authorization.split(' ')[1];
|
||||
axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
data: [file.file_id],
|
||||
});
|
||||
try {
|
||||
await axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
data: [file.file_id],
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.response?.status === 404) {
|
||||
logger.warn(
|
||||
`[deleteFirebaseFile] Document ${file.file_id} not found in RAG API, may have been deleted already`,
|
||||
);
|
||||
} else {
|
||||
logger.error('[deleteFirebaseFile] Error deleting document from RAG API:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const fileName = extractFirebaseFilePath(file.filepath);
|
||||
|
|
|
|||
|
|
@ -210,14 +210,24 @@ const deleteLocalFile = async (req, file) => {
|
|||
|
||||
if (file.embedded && process.env.RAG_API_URL) {
|
||||
const jwtToken = generateShortLivedToken(req.user.id);
|
||||
axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
data: [file.file_id],
|
||||
});
|
||||
try {
|
||||
await axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
data: [file.file_id],
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.response?.status === 404) {
|
||||
logger.warn(
|
||||
`[deleteLocalFile] Document ${file.file_id} not found in RAG API, may have been deleted already`,
|
||||
);
|
||||
} else {
|
||||
logger.error('[deleteLocalFile] Error deleting document from RAG API:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (cleanFilepath.startsWith(`/uploads/${req.user.id}`)) {
|
||||
|
|
|
|||
|
|
@ -1,11 +1,14 @@
|
|||
const axios = require('axios');
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { logAxiosError, inputSchema, processModelData } = require('@librechat/api');
|
||||
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
|
||||
const { logAxiosError, inputSchema, processModelData, isUserProvided } = require('@librechat/api');
|
||||
const {
|
||||
CacheKeys,
|
||||
defaultModels,
|
||||
KnownEndpoints,
|
||||
EModelEndpoint,
|
||||
} = require('librechat-data-provider');
|
||||
const { OllamaClient } = require('~/app/clients/OllamaClient');
|
||||
const { isUserProvided } = require('~/server/utils');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
|
||||
|
|
@ -68,7 +71,7 @@ const fetchModels = async ({
|
|||
return models;
|
||||
}
|
||||
|
||||
if (name && name.toLowerCase().startsWith(Providers.OLLAMA)) {
|
||||
if (name && name.toLowerCase().startsWith(KnownEndpoints.ollama)) {
|
||||
try {
|
||||
return await OllamaClient.fetchModels(baseURL, { headers, user: userObject });
|
||||
} catch (ollamaError) {
|
||||
|
|
@ -80,7 +83,9 @@ const fetchModels = async ({
|
|||
|
||||
try {
|
||||
const options = {
|
||||
headers: {},
|
||||
headers: {
|
||||
...(headers ?? {}),
|
||||
},
|
||||
timeout: 5000,
|
||||
};
|
||||
|
||||
|
|
@ -101,7 +106,7 @@ const fetchModels = async ({
|
|||
options.headers['OpenAI-Organization'] = process.env.OPENAI_ORGANIZATION;
|
||||
}
|
||||
|
||||
const url = new URL(`${baseURL}${azure ? '' : '/models'}`);
|
||||
const url = new URL(`${baseURL.replace(/\/+$/, '')}${azure ? '' : '/models'}`);
|
||||
if (user && userIdQuery) {
|
||||
url.searchParams.append('user', user);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -81,6 +81,70 @@ describe('fetchModels', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('should pass custom headers to the API request', async () => {
|
||||
const customHeaders = {
|
||||
'X-Custom-Header': 'custom-value',
|
||||
'X-API-Version': 'v2',
|
||||
};
|
||||
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com',
|
||||
name: 'TestAPI',
|
||||
headers: customHeaders,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
expect.stringContaining('https://api.test.com/models'),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
'X-Custom-Header': 'custom-value',
|
||||
'X-API-Version': 'v2',
|
||||
Authorization: 'Bearer testApiKey',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle null headers gracefully', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com',
|
||||
name: 'TestAPI',
|
||||
headers: null,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
expect.stringContaining('https://api.test.com/models'),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer testApiKey',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle undefined headers gracefully', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com',
|
||||
name: 'TestAPI',
|
||||
headers: undefined,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
expect.stringContaining('https://api.test.com/models'),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer testApiKey',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
|
@ -372,6 +436,68 @@ describe('fetchModels with Ollama specific logic', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('fetchModels URL construction with trailing slashes', () => {
|
||||
beforeEach(() => {
|
||||
axios.get.mockResolvedValue({
|
||||
data: {
|
||||
data: [{ id: 'model-1' }, { id: 'model-2' }],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should not create double slashes when baseURL has a trailing slash', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com/v1/',
|
||||
name: 'TestAPI',
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith('https://api.test.com/v1/models', expect.any(Object));
|
||||
});
|
||||
|
||||
it('should handle baseURL without trailing slash normally', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com/v1',
|
||||
name: 'TestAPI',
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith('https://api.test.com/v1/models', expect.any(Object));
|
||||
});
|
||||
|
||||
it('should handle baseURL with multiple trailing slashes', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com/v1///',
|
||||
name: 'TestAPI',
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith('https://api.test.com/v1/models', expect.any(Object));
|
||||
});
|
||||
|
||||
it('should correctly append query params after stripping trailing slashes', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com/v1/',
|
||||
name: 'TestAPI',
|
||||
userIdQuery: true,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
'https://api.test.com/v1/models?user=user123',
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('splitAndTrim', () => {
|
||||
it('should split a string by commas and trim each value', () => {
|
||||
const input = ' model1, model2 , model3,model4 ';
|
||||
|
|
@ -410,6 +536,64 @@ describe('getAnthropicModels', () => {
|
|||
const models = await getAnthropicModels();
|
||||
expect(models).toEqual(['claude-1', 'claude-2']);
|
||||
});
|
||||
|
||||
it('should use Anthropic-specific headers when fetching models', async () => {
|
||||
delete process.env.ANTHROPIC_MODELS;
|
||||
process.env.ANTHROPIC_API_KEY = 'test-anthropic-key';
|
||||
|
||||
axios.get.mockResolvedValue({
|
||||
data: {
|
||||
data: [{ id: 'claude-3' }, { id: 'claude-4' }],
|
||||
},
|
||||
});
|
||||
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'test-anthropic-key',
|
||||
baseURL: 'https://api.anthropic.com/v1',
|
||||
name: EModelEndpoint.anthropic,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
headers: {
|
||||
'x-api-key': 'test-anthropic-key',
|
||||
'anthropic-version': expect.any(String),
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should pass custom headers for Anthropic endpoint', async () => {
|
||||
const customHeaders = {
|
||||
'X-Custom-Header': 'custom-value',
|
||||
};
|
||||
|
||||
axios.get.mockResolvedValue({
|
||||
data: {
|
||||
data: [{ id: 'claude-3' }],
|
||||
},
|
||||
});
|
||||
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'test-anthropic-key',
|
||||
baseURL: 'https://api.anthropic.com/v1',
|
||||
name: EModelEndpoint.anthropic,
|
||||
headers: customHeaders,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
headers: {
|
||||
'x-api-key': 'test-anthropic-key',
|
||||
'anthropic-version': expect.any(String),
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getGoogleModels', () => {
|
||||
|
|
|
|||
|
|
@ -292,7 +292,7 @@ const ensurePrincipalExists = async function (principal) {
|
|||
let existingUser = await findUser({ idOnTheSource: principal.idOnTheSource });
|
||||
|
||||
if (!existingUser) {
|
||||
existingUser = await findUser({ email: principal.email.toLowerCase() });
|
||||
existingUser = await findUser({ email: principal.email });
|
||||
}
|
||||
|
||||
if (existingUser) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
const path = require('path');
|
||||
const { v4 } = require('uuid');
|
||||
const { countTokens, escapeRegExp } = require('@librechat/api');
|
||||
const {
|
||||
Constants,
|
||||
ContentTypes,
|
||||
|
|
@ -8,7 +9,6 @@ const {
|
|||
} = require('librechat-data-provider');
|
||||
const { retrieveAndProcessFile } = require('~/server/services/Files/process');
|
||||
const { recordMessage, getMessages } = require('~/models/Message');
|
||||
const { countTokens, escapeRegExp } = require('~/server/utils');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const { saveConvo } = require('~/models/Conversation');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,37 +0,0 @@
|
|||
const { Tiktoken } = require('tiktoken/lite');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const p50k_base = require('tiktoken/encoders/p50k_base.json');
|
||||
const cl100k_base = require('tiktoken/encoders/cl100k_base.json');
|
||||
|
||||
/**
|
||||
* Counts the number of tokens in a given text using a specified encoding model.
|
||||
*
|
||||
* This function utilizes the 'Tiktoken' library to encode text based on the selected model.
|
||||
* It supports two models, 'text-davinci-003' and 'gpt-3.5-turbo', each with its own encoding strategy.
|
||||
* For 'text-davinci-003', the 'p50k_base' encoder is used, whereas for other models, the 'cl100k_base' encoder is applied.
|
||||
* In case of an error during encoding, the error is logged, and the function returns 0.
|
||||
*
|
||||
* @async
|
||||
* @param {string} text - The text to be tokenized. Defaults to an empty string if not provided.
|
||||
* @param {string} modelName - The name of the model used for tokenizing. Defaults to 'gpt-3.5-turbo'.
|
||||
* @returns {Promise<number>} The number of tokens in the provided text. Returns 0 if an error occurs.
|
||||
* @throws Logs the error to a logger and rethrows if any error occurs during tokenization.
|
||||
*/
|
||||
const countTokens = async (text = '', modelName = 'gpt-3.5-turbo') => {
|
||||
let encoder = null;
|
||||
try {
|
||||
const model = modelName.includes('text-davinci-003') ? p50k_base : cl100k_base;
|
||||
encoder = new Tiktoken(model.bpe_ranks, model.special_tokens, model.pat_str);
|
||||
const tokens = encoder.encode(text);
|
||||
encoder.free();
|
||||
return tokens.length;
|
||||
} catch (e) {
|
||||
logger.error('[countTokens]', e);
|
||||
if (encoder) {
|
||||
encoder.free();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = countTokens;
|
||||
|
|
@ -10,14 +10,6 @@ const {
|
|||
const { sendEvent } = require('@librechat/api');
|
||||
const partialRight = require('lodash/partialRight');
|
||||
|
||||
/** Helper function to escape special characters in regex
|
||||
* @param {string} string - The string to escape.
|
||||
* @returns {string} The escaped string.
|
||||
*/
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
const addSpaceIfNeeded = (text) => (text.length > 0 && !text.endsWith(' ') ? text + ' ' : text);
|
||||
|
||||
const base = { message: true, initial: true };
|
||||
|
|
@ -181,7 +173,6 @@ function generateConfig(key, baseURL, endpoint) {
|
|||
module.exports = {
|
||||
handleText,
|
||||
formatSteps,
|
||||
escapeRegExp,
|
||||
formatAction,
|
||||
isUserProvided,
|
||||
generateConfig,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
const removePorts = require('./removePorts');
|
||||
const countTokens = require('./countTokens');
|
||||
const handleText = require('./handleText');
|
||||
const sendEmail = require('./sendEmail');
|
||||
const queue = require('./queue');
|
||||
|
|
@ -7,7 +6,6 @@ const files = require('./files');
|
|||
|
||||
module.exports = {
|
||||
...handleText,
|
||||
countTokens,
|
||||
removePorts,
|
||||
sendEmail,
|
||||
...files,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue