Merge branch 'main' into feat/E2EE

This commit is contained in:
Ruben Talstra 2025-03-05 10:50:49 +01:00 committed by GitHub
commit 40e59bc55c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
206 changed files with 14792 additions and 3465 deletions

View file

@ -61,7 +61,7 @@ const refreshController = async (req, res) => {
try {
const payload = jwt.verify(refreshToken, process.env.JWT_REFRESH_SECRET);
const user = await getUserById(payload.id, '-password -__v');
const user = await getUserById(payload.id, '-password -__v -totpSecret');
if (!user) {
return res.status(401).redirect('/login');
}

View file

@ -0,0 +1,119 @@
const {
verifyTOTP,
verifyBackupCode,
generateTOTPSecret,
generateBackupCodes,
getTOTPSecret,
} = require('~/server/services/twoFactorService');
const { updateUser, getUserById } = require('~/models');
const { logger } = require('~/config');
const { encryptV2 } = require('~/server/utils/crypto');
const enable2FAController = async (req, res) => {
const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, '');
try {
const userId = req.user.id;
const secret = generateTOTPSecret();
const { plainCodes, codeObjects } = await generateBackupCodes();
const encryptedSecret = await encryptV2(secret);
const user = await updateUser(userId, { totpSecret: encryptedSecret, backupCodes: codeObjects });
const otpauthUrl = `otpauth://totp/${safeAppTitle}:${user.email}?secret=${secret}&issuer=${safeAppTitle}`;
res.status(200).json({
otpauthUrl,
backupCodes: plainCodes,
});
} catch (err) {
logger.error('[enable2FAController]', err);
res.status(500).json({ message: err.message });
}
};
const verify2FAController = async (req, res) => {
try {
const userId = req.user.id;
const { token, backupCode } = req.body;
const user = await getUserById(userId);
if (!user || !user.totpSecret) {
return res.status(400).json({ message: '2FA not initiated' });
}
// Retrieve the plain TOTP secret using getTOTPSecret.
const secret = await getTOTPSecret(user.totpSecret);
if (token && (await verifyTOTP(secret, token))) {
return res.status(200).json();
} else if (backupCode) {
const verified = await verifyBackupCode({ user, backupCode });
if (verified) {
return res.status(200).json();
}
}
return res.status(400).json({ message: 'Invalid token.' });
} catch (err) {
logger.error('[verify2FAController]', err);
res.status(500).json({ message: err.message });
}
};
const confirm2FAController = async (req, res) => {
try {
const userId = req.user.id;
const { token } = req.body;
const user = await getUserById(userId);
if (!user || !user.totpSecret) {
return res.status(400).json({ message: '2FA not initiated' });
}
// Retrieve the plain TOTP secret using getTOTPSecret.
const secret = await getTOTPSecret(user.totpSecret);
if (await verifyTOTP(secret, token)) {
return res.status(200).json();
}
return res.status(400).json({ message: 'Invalid token.' });
} catch (err) {
logger.error('[confirm2FAController]', err);
res.status(500).json({ message: err.message });
}
};
const disable2FAController = async (req, res) => {
try {
const userId = req.user.id;
await updateUser(userId, { totpSecret: null, backupCodes: [] });
res.status(200).json();
} catch (err) {
logger.error('[disable2FAController]', err);
res.status(500).json({ message: err.message });
}
};
const regenerateBackupCodesController = async (req, res) => {
try {
const userId = req.user.id;
const { plainCodes, codeObjects } = await generateBackupCodes();
await updateUser(userId, { backupCodes: codeObjects });
res.status(200).json({
backupCodes: plainCodes,
backupCodesHash: codeObjects,
});
} catch (err) {
logger.error('[regenerateBackupCodesController]', err);
res.status(500).json({ message: err.message });
}
};
module.exports = {
enable2FAController,
verify2FAController,
confirm2FAController,
disable2FAController,
regenerateBackupCodesController,
};

View file

@ -20,7 +20,9 @@ const { Transaction } = require('~/models/Transaction');
const { logger } = require('~/config');
const getUserController = async (req, res) => {
res.status(200).send(req.user);
const userData = req.user.toObject != null ? req.user.toObject() : { ...req.user };
delete userData.totpSecret;
res.status(200).send(userData);
};
const getTermsStatusController = async (req, res) => {

View file

@ -1,4 +1,5 @@
const { Tools, StepTypes, imageGenTools, FileContext } = require('librechat-data-provider');
const { nanoid } = require('nanoid');
const { Tools, StepTypes, FileContext } = require('librechat-data-provider');
const {
EnvVar,
Providers,
@ -242,32 +243,6 @@ function createToolEndCallback({ req, res, artifactPromises }) {
return;
}
if (imageGenTools.has(output.name)) {
artifactPromises.push(
(async () => {
const fileMetadata = Object.assign(output.artifact, {
messageId: metadata.run_id,
toolCallId: output.tool_call_id,
conversationId: metadata.thread_id,
});
if (!res.headersSent) {
return fileMetadata;
}
if (!fileMetadata) {
return null;
}
res.write(`event: attachment\ndata: ${JSON.stringify(fileMetadata)}\n\n`);
return fileMetadata;
})().catch((error) => {
logger.error('Error processing code output:', error);
return null;
}),
);
return;
}
if (output.artifact.content) {
/** @type {FormattedContent[]} */
const content = output.artifact.content;
@ -278,7 +253,7 @@ function createToolEndCallback({ req, res, artifactPromises }) {
const { url } = part.image_url;
artifactPromises.push(
(async () => {
const filename = `${output.tool_call_id}-image-${new Date().getTime()}`;
const filename = `${output.name}_${output.tool_call_id}_img_${nanoid()}`;
const file = await saveBase64Image(url, {
req,
filename,

View file

@ -17,19 +17,21 @@ const {
KnownEndpoints,
anthropicSchema,
isAgentsEndpoint,
bedrockOutputParser,
bedrockInputSchema,
removeNullishValues,
} = require('librechat-data-provider');
const {
formatMessage,
addCacheControl,
formatAgentMessages,
formatContentStrings,
createContextHandlers,
} = require('~/app/clients/prompts');
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
const { getBufferString, HumanMessage } = require('@langchain/core/messages');
const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const Tokenizer = require('~/server/services/Tokenizer');
const { spendTokens } = require('~/models/spendTokens');
const BaseClient = require('~/app/clients/BaseClient');
const { createRun } = require('./run');
const { logger } = require('~/config');
@ -38,10 +40,10 @@ const { logger } = require('~/config');
/** @typedef {import('@langchain/core/runnables').RunnableConfig} RunnableConfig */
const providerParsers = {
[EModelEndpoint.openAI]: openAISchema,
[EModelEndpoint.azureOpenAI]: openAISchema,
[EModelEndpoint.anthropic]: anthropicSchema,
[EModelEndpoint.bedrock]: bedrockOutputParser,
[EModelEndpoint.openAI]: openAISchema.parse,
[EModelEndpoint.azureOpenAI]: openAISchema.parse,
[EModelEndpoint.anthropic]: anthropicSchema.parse,
[EModelEndpoint.bedrock]: bedrockInputSchema.parse,
};
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
@ -186,7 +188,14 @@ class AgentClient extends BaseClient {
: {};
if (parseOptions) {
runOptions = parseOptions(this.options.agent.model_parameters);
try {
runOptions = parseOptions(this.options.agent.model_parameters);
} catch (error) {
logger.error(
'[api/server/controllers/agents/client.js #getSaveOptions] Error parsing options',
error,
);
}
}
return removeNullishValues(
@ -379,15 +388,34 @@ class AgentClient extends BaseClient {
if (!collectedUsage || !collectedUsage.length) {
return;
}
const input_tokens = collectedUsage[0]?.input_tokens || 0;
const input_tokens =
(collectedUsage[0]?.input_tokens || 0) +
(Number(collectedUsage[0]?.input_token_details?.cache_creation) || 0) +
(Number(collectedUsage[0]?.input_token_details?.cache_read) || 0);
let output_tokens = 0;
let previousTokens = input_tokens; // Start with original input
for (let i = 0; i < collectedUsage.length; i++) {
const usage = collectedUsage[i];
if (!usage) {
continue;
}
const cache_creation = Number(usage.input_token_details?.cache_creation) || 0;
const cache_read = Number(usage.input_token_details?.cache_read) || 0;
const txMetadata = {
context,
conversationId: this.conversationId,
user: this.user ?? this.options.req.user?.id,
endpointTokenConfig: this.options.endpointTokenConfig,
model: usage.model ?? model ?? this.model ?? this.options.agent.model_parameters.model,
};
if (i > 0) {
// Count new tokens generated (input_tokens minus previous accumulated tokens)
output_tokens += (Number(usage.input_tokens) || 0) - previousTokens;
output_tokens +=
(Number(usage.input_tokens) || 0) + cache_creation + cache_read - previousTokens;
}
// Add this message's output tokens
@ -395,16 +423,26 @@ class AgentClient extends BaseClient {
// Update previousTokens to include this message's output
previousTokens += Number(usage.output_tokens) || 0;
spendTokens(
{
context,
conversationId: this.conversationId,
user: this.user ?? this.options.req.user?.id,
endpointTokenConfig: this.options.endpointTokenConfig,
model: usage.model ?? model ?? this.model ?? this.options.agent.model_parameters.model,
},
{ promptTokens: usage.input_tokens, completionTokens: usage.output_tokens },
).catch((err) => {
if (cache_creation > 0 || cache_read > 0) {
spendStructuredTokens(txMetadata, {
promptTokens: {
input: usage.input_tokens,
write: cache_creation,
read: cache_read,
},
completionTokens: usage.output_tokens,
}).catch((err) => {
logger.error(
'[api/server/controllers/agents/client.js #recordCollectedUsage] Error spending structured tokens',
err,
);
});
}
spendTokens(txMetadata, {
promptTokens: usage.input_tokens,
completionTokens: usage.output_tokens,
}).catch((err) => {
logger.error(
'[api/server/controllers/agents/client.js #recordCollectedUsage] Error spending tokens',
err,
@ -589,7 +627,7 @@ class AgentClient extends BaseClient {
* @param {number} [i]
* @param {TMessageContentParts[]} [contentData]
*/
const runAgent = async (agent, messages, i = 0, contentData = []) => {
const runAgent = async (agent, _messages, i = 0, contentData = []) => {
config.configurable.model = agent.model_parameters.model;
if (i > 0) {
this.model = agent.model_parameters.model;
@ -622,12 +660,21 @@ class AgentClient extends BaseClient {
}
if (noSystemMessages === true && systemContent?.length) {
let latestMessage = messages.pop().content;
let latestMessage = _messages.pop().content;
if (typeof latestMessage !== 'string') {
latestMessage = latestMessage[0].text;
}
latestMessage = [systemContent, latestMessage].join('\n');
messages.push(new HumanMessage(latestMessage));
_messages.push(new HumanMessage(latestMessage));
}
let messages = _messages;
if (
agent.model_parameters?.clientOptions?.defaultHeaders?.['anthropic-beta']?.includes(
'prompt-caching',
)
) {
messages = addCacheControl(messages);
}
run = await createRun({
@ -756,6 +803,10 @@ class AgentClient extends BaseClient {
);
}
} catch (err) {
logger.error(
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
err,
);
if (!abortController.signal.aborted) {
logger.error(
'[api/server/controllers/agents/client.js #sendCompletion] Unhandled error type',
@ -763,11 +814,6 @@ class AgentClient extends BaseClient {
);
throw err;
}
logger.warn(
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
err,
);
}
}
@ -782,14 +828,20 @@ class AgentClient extends BaseClient {
throw new Error('Run not initialized');
}
const { handleLLMEnd, collected: collectedMetadata } = createMetadataAggregator();
const clientOptions = {};
const providerConfig = this.options.req.app.locals[this.options.agent.provider];
/** @type {import('@librechat/agents').ClientOptions} */
const clientOptions = {
maxTokens: 75,
};
let endpointConfig = this.options.req.app.locals[this.options.agent.endpoint];
if (!endpointConfig) {
endpointConfig = await getCustomEndpointConfig(this.options.agent.endpoint);
}
if (
providerConfig &&
providerConfig.titleModel &&
providerConfig.titleModel !== Constants.CURRENT_MODEL
endpointConfig &&
endpointConfig.titleModel &&
endpointConfig.titleModel !== Constants.CURRENT_MODEL
) {
clientOptions.model = providerConfig.titleModel;
clientOptions.model = endpointConfig.titleModel;
}
try {
const titleResult = await this.run.generateTitle({

View file

@ -45,7 +45,10 @@ async function createRun({
/** @type {'reasoning_content' | 'reasoning'} */
let reasoningKey;
if (llmConfig.configuration?.baseURL.includes(KnownEndpoints.openrouter)) {
if (
llmConfig.configuration?.baseURL?.includes(KnownEndpoints.openrouter) ||
(agent.endpoint && agent.endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
) {
reasoningKey = 'reasoning';
}
if (/o1(?!-(?:mini|preview)).*$/.test(llmConfig.model)) {

View file

@ -1,3 +1,4 @@
const { generate2FATempToken } = require('~/server/services/twoFactorService');
const { setAuthTokens } = require('~/server/services/AuthService');
const { logger } = require('~/config');
@ -7,7 +8,12 @@ const loginController = async (req, res) => {
return res.status(400).json({ message: 'Invalid credentials' });
}
const { password: _, __v, ...user } = req.user;
if (req.user.backupCodes != null && req.user.backupCodes.length > 0) {
const tempToken = generate2FATempToken(req.user._id);
return res.status(200).json({ twoFAPending: true, tempToken });
}
const { password: _p, totpSecret: _t, __v, ...user } = req.user;
user.id = user._id.toString();
const token = await setAuthTokens(req.user._id, res);

View file

@ -0,0 +1,58 @@
const jwt = require('jsonwebtoken');
const { verifyTOTP, verifyBackupCode, getTOTPSecret } = require('~/server/services/twoFactorService');
const { setAuthTokens } = require('~/server/services/AuthService');
const { getUserById } = require('~/models/userMethods');
const { logger } = require('~/config');
const verify2FA = async (req, res) => {
try {
const { tempToken, token, backupCode } = req.body;
if (!tempToken) {
return res.status(400).json({ message: 'Missing temporary token' });
}
let payload;
try {
payload = jwt.verify(tempToken, process.env.JWT_SECRET);
} catch (err) {
return res.status(401).json({ message: 'Invalid or expired temporary token' });
}
const user = await getUserById(payload.userId);
// Ensure that the user exists and has backup codes (i.e. 2FA enabled)
if (!user || !(user.backupCodes && user.backupCodes.length > 0)) {
return res.status(400).json({ message: '2FA is not enabled for this user' });
}
// Use the new getTOTPSecret function to retrieve (and decrypt if necessary) the TOTP secret.
const secret = await getTOTPSecret(user.totpSecret);
let verified = false;
if (token && (await verifyTOTP(secret, token))) {
verified = true;
} else if (backupCode) {
verified = await verifyBackupCode({ user, backupCode });
}
if (!verified) {
return res.status(401).json({ message: 'Invalid 2FA code or backup code' });
}
// Prepare user data for response.
// If the user is a plain object (from lean queries), we create a shallow copy.
const userData = user.toObject ? user.toObject() : { ...user };
// Remove sensitive fields.
delete userData.password;
delete userData.__v;
delete userData.totpSecret;
userData.id = user._id.toString();
const authToken = await setAuthTokens(user._id, res);
return res.status(200).json({ token: authToken, user: userData });
} catch (err) {
logger.error('[verify2FA]', err);
return res.status(500).json({ message: 'Something went wrong' });
}
};
module.exports = { verify2FA };

View file

@ -1,10 +1,17 @@
const { nanoid } = require('nanoid');
const { EnvVar } = require('@librechat/agents');
const { Tools, AuthType, ToolCallTypes } = require('librechat-data-provider');
const {
Tools,
AuthType,
Permissions,
ToolCallTypes,
PermissionTypes,
} = require('librechat-data-provider');
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
const { processCodeOutput } = require('~/server/services/Files/Code/process');
const { loadAuthValues, loadTools } = require('~/app/clients/tools/util');
const { createToolCall, getToolCallsByConvo } = require('~/models/ToolCall');
const { loadAuthValues, loadTools } = require('~/app/clients/tools/util');
const { checkAccess } = require('~/server/middleware');
const { getMessage } = require('~/models/Message');
const { logger } = require('~/config');
@ -12,6 +19,10 @@ const fieldsMap = {
[Tools.execute_code]: [EnvVar.CODE_API_KEY],
};
const toolAccessPermType = {
[Tools.execute_code]: PermissionTypes.RUN_CODE,
};
/**
* @param {ServerRequest} req - The request object, containing information about the HTTP request.
* @param {ServerResponse} res - The response object, used to send back the desired HTTP response.
@ -58,6 +69,7 @@ const verifyToolAuth = async (req, res) => {
/**
* @param {ServerRequest} req - The request object, containing information about the HTTP request.
* @param {ServerResponse} res - The response object, used to send back the desired HTTP response.
* @param {NextFunction} next - The next middleware function to call.
* @returns {Promise<void>} A promise that resolves when the function has completed.
*/
const callTool = async (req, res) => {
@ -83,6 +95,16 @@ const callTool = async (req, res) => {
return;
}
logger.debug(`[${toolId}/call] User: ${req.user.id}`);
let hasAccess = true;
if (toolAccessPermType[toolId]) {
hasAccess = await checkAccess(req.user, toolAccessPermType[toolId], [Permissions.USE]);
}
if (!hasAccess) {
logger.warn(
`[${toolAccessPermType[toolId]}] Forbidden: Insufficient permissions for User ${req.user.id}: ${Permissions.USE}`,
);
return res.status(403).json({ message: 'Forbidden: Insufficient permissions' });
}
const { loadedTools } = await loadTools({
user: req.user.id,
tools: [toolId],

View file

@ -22,10 +22,11 @@ const staticCache = require('./utils/staticCache');
const noIndex = require('./middleware/noIndex');
const routes = require('./routes');
const { PORT, HOST, ALLOW_SOCIAL_LOGIN, DISABLE_COMPRESSION } = process.env ?? {};
const { PORT, HOST, ALLOW_SOCIAL_LOGIN, DISABLE_COMPRESSION, TRUST_PROXY } = process.env ?? {};
const port = Number(PORT) || 3080;
const host = HOST || 'localhost';
const trusted_proxy = Number(TRUST_PROXY) || 1; /* trust first proxy by default */
const startServer = async () => {
if (typeof Bun !== 'undefined') {
@ -53,7 +54,7 @@ const startServer = async () => {
app.use(staticCache(app.locals.paths.dist));
app.use(staticCache(app.locals.paths.fonts));
app.use(staticCache(app.locals.paths.assets));
app.set('trust proxy', 1); /* trust first proxy */
app.set('trust proxy', trusted_proxy);
app.use(cors());
app.use(cookieParser());
@ -145,6 +146,18 @@ process.on('uncaughtException', (err) => {
logger.error('There was an uncaught error:', err);
}
if (err.message.includes('abort')) {
logger.warn('There was an uncatchable AbortController error.');
return;
}
if (err.message.includes('GoogleGenerativeAI')) {
logger.warn(
'\n\n`GoogleGenerativeAI` errors cannot be caught due to an upstream issue, see: https://github.com/google-gemini/generative-ai-js/issues/303',
);
return;
}
if (err.message.includes('fetch failed')) {
if (messageCount === 0) {
logger.warn('Meilisearch error, search will be disabled');

View file

@ -1,4 +1,42 @@
const { getRoleByName } = require('~/models/Role');
const { logger } = require('~/config');
/**
* Core function to check if a user has one or more required permissions
*
* @param {object} user - The user object
* @param {PermissionTypes} permissionType - The type of permission to check
* @param {Permissions[]} permissions - The list of specific permissions to check
* @param {Record<Permissions, string[]>} [bodyProps] - An optional object where keys are permissions and values are arrays of properties to check
* @param {object} [checkObject] - The object to check properties against
* @returns {Promise<boolean>} Whether the user has the required permissions
*/
const checkAccess = async (user, permissionType, permissions, bodyProps = {}, checkObject = {}) => {
if (!user) {
return false;
}
const role = await getRoleByName(user.role);
if (role && role[permissionType]) {
const hasAnyPermission = permissions.some((permission) => {
if (role[permissionType][permission]) {
return true;
}
if (bodyProps[permission] && checkObject) {
return bodyProps[permission].some((prop) =>
Object.prototype.hasOwnProperty.call(checkObject, prop),
);
}
return false;
});
return hasAnyPermission;
}
return false;
};
/**
* Middleware to check if a user has one or more required permissions, optionally based on `req.body` properties.
@ -6,42 +44,35 @@ const { getRoleByName } = require('~/models/Role');
* @param {PermissionTypes} permissionType - The type of permission to check.
* @param {Permissions[]} permissions - The list of specific permissions to check.
* @param {Record<Permissions, string[]>} [bodyProps] - An optional object where keys are permissions and values are arrays of `req.body` properties to check.
* @returns {Function} Express middleware function.
* @returns {(req: ServerRequest, res: ServerResponse, next: NextFunction) => Promise<void>} Express middleware function.
*/
const generateCheckAccess = (permissionType, permissions, bodyProps = {}) => {
return async (req, res, next) => {
try {
const { user } = req;
if (!user) {
return res.status(401).json({ message: 'Authorization required' });
}
const role = await getRoleByName(user.role);
if (role && role[permissionType]) {
const hasAnyPermission = permissions.some((permission) => {
if (role[permissionType][permission]) {
return true;
}
if (bodyProps[permission] && req.body) {
return bodyProps[permission].some((prop) =>
Object.prototype.hasOwnProperty.call(req.body, prop),
);
}
return false;
});
if (hasAnyPermission) {
return next();
}
const hasAccess = await checkAccess(
req.user,
permissionType,
permissions,
bodyProps,
req.body,
);
if (hasAccess) {
return next();
}
logger.warn(
`[${permissionType}] Forbidden: Insufficient permissions for User ${req.user.id}: ${permissions.join(', ')}`,
);
return res.status(403).json({ message: 'Forbidden: Insufficient permissions' });
} catch (error) {
logger.error(error);
return res.status(500).json({ message: `Server error: ${error.message}` });
}
};
};
module.exports = generateCheckAccess;
module.exports = {
checkAccess,
generateCheckAccess,
};

View file

@ -1,7 +1,8 @@
const checkAdmin = require('./checkAdmin');
const generateCheckAccess = require('./generateCheckAccess');
const { checkAccess, generateCheckAccess } = require('./generateCheckAccess');
module.exports = {
checkAdmin,
checkAccess,
generateCheckAccess,
};

View file

@ -7,6 +7,13 @@ const {
} = require('~/server/controllers/AuthController');
const { loginController } = require('~/server/controllers/auth/LoginController');
const { logoutController } = require('~/server/controllers/auth/LogoutController');
const { verify2FA } = require('~/server/controllers/auth/TwoFactorAuthController');
const {
enable2FAController,
verify2FAController,
disable2FAController,
regenerateBackupCodesController, confirm2FAController,
} = require('~/server/controllers/TwoFactorController');
const {
checkBan,
loginLimiter,
@ -50,4 +57,11 @@ router.post(
);
router.post('/resetPassword', checkBan, validatePasswordReset, resetPasswordController);
router.get('/2fa/enable', requireJwtAuth, enable2FAController);
router.post('/2fa/verify', requireJwtAuth, verify2FAController);
router.post('/2fa/verify-temp', checkBan, verify2FA);
router.post('/2fa/confirm', requireJwtAuth, confirm2FAController);
router.post('/2fa/disable', requireJwtAuth, disable2FAController);
router.post('/2fa/backup/regenerate', requireJwtAuth, regenerateBackupCodesController);
module.exports = router;

View file

@ -101,6 +101,7 @@ const initializeAgentOptions = async ({
});
const provider = agent.provider;
agent.endpoint = provider;
let getOptions = providerConfigMap[provider];
if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) {
agent.provider = provider.toLowerCase();
@ -112,9 +113,7 @@ const initializeAgentOptions = async ({
}
getOptions = initCustom;
agent.provider = Providers.OPENAI;
agent.endpoint = provider.toLowerCase();
}
const model_parameters = Object.assign(
{},
agent.model_parameters ?? { model: agent.model },

View file

@ -20,10 +20,19 @@ const addTitle = async (req, { text, response, client }) => {
const titleCache = getLogStores(CacheKeys.GEN_TITLE);
const key = `${req.user.id}-${response.conversationId}`;
const responseText =
response?.content && Array.isArray(response?.content)
? response.content.reduce((acc, block) => {
if (block?.type === 'text') {
return acc + block.text;
}
return acc;
}, '')
: (response?.content ?? response?.text ?? '');
const title = await client.titleConvo({
text,
responseText: response?.text ?? '',
responseText,
conversationId: response.conversationId,
});
await titleCache.set(key, title, 120000);

View file

@ -1,4 +1,4 @@
const { removeNullishValues } = require('librechat-data-provider');
const { removeNullishValues, anthropicSettings } = require('librechat-data-provider');
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
const buildOptions = (endpoint, parsedBody) => {
@ -6,8 +6,10 @@ const buildOptions = (endpoint, parsedBody) => {
modelLabel,
promptPrefix,
maxContextTokens,
resendFiles = true,
promptCache = true,
resendFiles = anthropicSettings.resendFiles.default,
promptCache = anthropicSettings.promptCache.default,
thinking = anthropicSettings.thinking.default,
thinkingBudget = anthropicSettings.thinkingBudget.default,
iconURL,
greeting,
spec,
@ -21,6 +23,8 @@ const buildOptions = (endpoint, parsedBody) => {
promptPrefix,
resendFiles,
promptCache,
thinking,
thinkingBudget,
iconURL,
greeting,
spec,

View file

@ -0,0 +1,111 @@
const { EModelEndpoint, anthropicSettings } = require('librechat-data-provider');
const { matchModelName } = require('~/utils');
const { logger } = require('~/config');
/**
* @param {string} modelName
* @returns {boolean}
*/
function checkPromptCacheSupport(modelName) {
const modelMatch = matchModelName(modelName, EModelEndpoint.anthropic);
if (
modelMatch.includes('claude-3-5-sonnet-latest') ||
modelMatch.includes('claude-3.5-sonnet-latest')
) {
return false;
}
if (
modelMatch === 'claude-3-7-sonnet' ||
modelMatch === 'claude-3-5-sonnet' ||
modelMatch === 'claude-3-5-haiku' ||
modelMatch === 'claude-3-haiku' ||
modelMatch === 'claude-3-opus' ||
modelMatch === 'claude-3.7-sonnet' ||
modelMatch === 'claude-3.5-sonnet' ||
modelMatch === 'claude-3.5-haiku'
) {
return true;
}
return false;
}
/**
* Gets the appropriate headers for Claude models with cache control
* @param {string} model The model name
* @param {boolean} supportsCacheControl Whether the model supports cache control
* @returns {AnthropicClientOptions['extendedOptions']['defaultHeaders']|undefined} The headers object or undefined if not applicable
*/
function getClaudeHeaders(model, supportsCacheControl) {
if (!supportsCacheControl) {
return undefined;
}
if (/claude-3[-.]5-sonnet/.test(model)) {
return {
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15,prompt-caching-2024-07-31',
};
} else if (/claude-3[-.]7/.test(model)) {
return {
'anthropic-beta':
'token-efficient-tools-2025-02-19,output-128k-2025-02-19,prompt-caching-2024-07-31',
};
} else {
return {
'anthropic-beta': 'prompt-caching-2024-07-31',
};
}
}
/**
* Configures reasoning-related options for Claude models
* @param {AnthropicClientOptions & { max_tokens?: number }} anthropicInput The request options object
* @param {Object} extendedOptions Additional client configuration options
* @param {boolean} extendedOptions.thinking Whether thinking is enabled in client config
* @param {number|null} extendedOptions.thinkingBudget The token budget for thinking
* @returns {Object} Updated request options
*/
function configureReasoning(anthropicInput, extendedOptions = {}) {
const updatedOptions = { ...anthropicInput };
const currentMaxTokens = updatedOptions.max_tokens ?? updatedOptions.maxTokens;
if (
extendedOptions.thinking &&
updatedOptions?.model &&
/claude-3[-.]7/.test(updatedOptions.model)
) {
updatedOptions.thinking = {
type: 'enabled',
};
}
if (updatedOptions.thinking != null && extendedOptions.thinkingBudget != null) {
updatedOptions.thinking = {
...updatedOptions.thinking,
budget_tokens: extendedOptions.thinkingBudget,
};
}
if (
updatedOptions.thinking != null &&
(currentMaxTokens == null || updatedOptions.thinking.budget_tokens > currentMaxTokens)
) {
const maxTokens = anthropicSettings.maxOutputTokens.reset(updatedOptions.model);
updatedOptions.max_tokens = currentMaxTokens ?? maxTokens;
logger.warn(
updatedOptions.max_tokens === maxTokens
? '[AnthropicClient] max_tokens is not defined while thinking is enabled. Setting max_tokens to model default.'
: `[AnthropicClient] thinking budget_tokens (${updatedOptions.thinking.budget_tokens}) exceeds max_tokens (${updatedOptions.max_tokens}). Adjusting budget_tokens.`,
);
updatedOptions.thinking.budget_tokens = Math.min(
updatedOptions.thinking.budget_tokens,
Math.floor(updatedOptions.max_tokens * 0.9),
);
}
return updatedOptions;
}
module.exports = { checkPromptCacheSupport, getClaudeHeaders, configureReasoning };

View file

@ -27,6 +27,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
if (anthropicConfig) {
clientOptions.streamRate = anthropicConfig.streamRate;
clientOptions.titleModel = anthropicConfig.titleModel;
}
/** @type {undefined | TBaseEndpoint} */

View file

@ -1,5 +1,6 @@
const { HttpsProxyAgent } = require('https-proxy-agent');
const { anthropicSettings, removeNullishValues } = require('librechat-data-provider');
const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers');
/**
* Generates configuration options for creating an Anthropic language model (LLM) instance.
@ -20,6 +21,14 @@ const { anthropicSettings, removeNullishValues } = require('librechat-data-provi
* @returns {Object} Configuration options for creating an Anthropic LLM instance, with null and undefined values removed.
*/
function getLLMConfig(apiKey, options = {}) {
const systemOptions = {
thinking: options.modelOptions.thinking ?? anthropicSettings.thinking.default,
promptCache: options.modelOptions.promptCache ?? anthropicSettings.promptCache.default,
thinkingBudget: options.modelOptions.thinkingBudget ?? anthropicSettings.thinkingBudget.default,
};
for (let key in systemOptions) {
delete options.modelOptions[key];
}
const defaultOptions = {
model: anthropicSettings.model.default,
maxOutputTokens: anthropicSettings.maxOutputTokens.default,
@ -29,19 +38,34 @@ function getLLMConfig(apiKey, options = {}) {
const mergedOptions = Object.assign(defaultOptions, options.modelOptions);
/** @type {AnthropicClientOptions} */
const requestOptions = {
let requestOptions = {
apiKey,
model: mergedOptions.model,
stream: mergedOptions.stream,
temperature: mergedOptions.temperature,
topP: mergedOptions.topP,
topK: mergedOptions.topK,
stopSequences: mergedOptions.stop,
maxTokens:
mergedOptions.maxOutputTokens || anthropicSettings.maxOutputTokens.reset(mergedOptions.model),
clientOptions: {},
};
requestOptions = configureReasoning(requestOptions, systemOptions);
if (!/claude-3[-.]7/.test(mergedOptions.model)) {
requestOptions.topP = mergedOptions.topP;
requestOptions.topK = mergedOptions.topK;
} else if (requestOptions.thinking == null) {
requestOptions.topP = mergedOptions.topP;
requestOptions.topK = mergedOptions.topK;
}
const supportsCacheControl =
systemOptions.promptCache === true && checkPromptCacheSupport(requestOptions.model);
const headers = getClaudeHeaders(requestOptions.model, supportsCacheControl);
if (headers) {
requestOptions.clientOptions.defaultHeaders = headers;
}
if (options.proxy) {
requestOptions.clientOptions.httpAgent = new HttpsProxyAgent(options.proxy);
}

View file

@ -0,0 +1,153 @@
const { anthropicSettings } = require('librechat-data-provider');
const { getLLMConfig } = require('~/server/services/Endpoints/anthropic/llm');
jest.mock('https-proxy-agent', () => ({
HttpsProxyAgent: jest.fn().mockImplementation((proxy) => ({ proxy })),
}));
describe('getLLMConfig', () => {
it('should create a basic configuration with default values', () => {
const result = getLLMConfig('test-api-key', { modelOptions: {} });
expect(result.llmConfig).toHaveProperty('apiKey', 'test-api-key');
expect(result.llmConfig).toHaveProperty('model', anthropicSettings.model.default);
expect(result.llmConfig).toHaveProperty('stream', true);
expect(result.llmConfig).toHaveProperty('maxTokens');
});
it('should include proxy settings when provided', () => {
const result = getLLMConfig('test-api-key', {
modelOptions: {},
proxy: 'http://proxy:8080',
});
expect(result.llmConfig.clientOptions).toHaveProperty('httpAgent');
expect(result.llmConfig.clientOptions.httpAgent).toHaveProperty('proxy', 'http://proxy:8080');
});
it('should include reverse proxy URL when provided', () => {
const result = getLLMConfig('test-api-key', {
modelOptions: {},
reverseProxyUrl: 'http://reverse-proxy',
});
expect(result.llmConfig.clientOptions).toHaveProperty('baseURL', 'http://reverse-proxy');
});
it('should include topK and topP for non-Claude-3.7 models', () => {
const result = getLLMConfig('test-api-key', {
modelOptions: {
model: 'claude-3-opus',
topK: 10,
topP: 0.9,
},
});
expect(result.llmConfig).toHaveProperty('topK', 10);
expect(result.llmConfig).toHaveProperty('topP', 0.9);
});
it('should include topK and topP for Claude-3.5 models', () => {
const result = getLLMConfig('test-api-key', {
modelOptions: {
model: 'claude-3-5-sonnet',
topK: 10,
topP: 0.9,
},
});
expect(result.llmConfig).toHaveProperty('topK', 10);
expect(result.llmConfig).toHaveProperty('topP', 0.9);
});
it('should NOT include topK and topP for Claude-3-7 models (hyphen notation)', () => {
const result = getLLMConfig('test-api-key', {
modelOptions: {
model: 'claude-3-7-sonnet',
topK: 10,
topP: 0.9,
},
});
expect(result.llmConfig).not.toHaveProperty('topK');
expect(result.llmConfig).not.toHaveProperty('topP');
});
it('should NOT include topK and topP for Claude-3.7 models (decimal notation)', () => {
const result = getLLMConfig('test-api-key', {
modelOptions: {
model: 'claude-3.7-sonnet',
topK: 10,
topP: 0.9,
},
});
expect(result.llmConfig).not.toHaveProperty('topK');
expect(result.llmConfig).not.toHaveProperty('topP');
});
it('should handle custom maxOutputTokens', () => {
const result = getLLMConfig('test-api-key', {
modelOptions: {
model: 'claude-3-opus',
maxOutputTokens: 2048,
},
});
expect(result.llmConfig).toHaveProperty('maxTokens', 2048);
});
it('should handle promptCache setting', () => {
const result = getLLMConfig('test-api-key', {
modelOptions: {
model: 'claude-3-5-sonnet',
promptCache: true,
},
});
// We're not checking specific header values since that depends on the actual helper function
// Just verifying that the promptCache setting is processed
expect(result.llmConfig).toBeDefined();
});
it('should include topK and topP for Claude-3.7 models when thinking is not enabled', () => {
// Test with thinking explicitly set to null/undefined
const result = getLLMConfig('test-api-key', {
modelOptions: {
model: 'claude-3-7-sonnet',
topK: 10,
topP: 0.9,
thinking: false,
},
});
expect(result.llmConfig).toHaveProperty('topK', 10);
expect(result.llmConfig).toHaveProperty('topP', 0.9);
// Test with thinking explicitly set to false
const result2 = getLLMConfig('test-api-key', {
modelOptions: {
model: 'claude-3-7-sonnet',
topK: 10,
topP: 0.9,
thinking: false,
},
});
expect(result2.llmConfig).toHaveProperty('topK', 10);
expect(result2.llmConfig).toHaveProperty('topP', 0.9);
// Test with decimal notation as well
const result3 = getLLMConfig('test-api-key', {
modelOptions: {
model: 'claude-3.7-sonnet',
topK: 10,
topP: 0.9,
thinking: false,
},
});
expect(result3.llmConfig).toHaveProperty('topK', 10);
expect(result3.llmConfig).toHaveProperty('topP', 0.9);
});
});

View file

@ -1,6 +1,5 @@
const { removeNullishValues, bedrockInputParser } = require('librechat-data-provider');
const { removeNullishValues } = require('librechat-data-provider');
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
const { logger } = require('~/config');
const buildOptions = (endpoint, parsedBody) => {
const {
@ -15,12 +14,6 @@ const buildOptions = (endpoint, parsedBody) => {
artifacts,
...model_parameters
} = parsedBody;
let parsedParams = model_parameters;
try {
parsedParams = bedrockInputParser.parse(model_parameters);
} catch (error) {
logger.warn('Failed to parse bedrock input', error);
}
const endpointOption = removeNullishValues({
endpoint,
name,
@ -31,7 +24,7 @@ const buildOptions = (endpoint, parsedBody) => {
spec,
promptPrefix,
maxContextTokens,
model_parameters: parsedParams,
model_parameters,
});
if (typeof artifacts === 'string') {

View file

@ -1,14 +1,16 @@
const { HttpsProxyAgent } = require('https-proxy-agent');
const {
EModelEndpoint,
Constants,
AuthType,
Constants,
EModelEndpoint,
bedrockInputParser,
bedrockOutputParser,
removeNullishValues,
} = require('librechat-data-provider');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const { sleep } = require('~/server/utils');
const getOptions = async ({ req, endpointOption }) => {
const getOptions = async ({ req, overrideModel, endpointOption }) => {
const {
BEDROCK_AWS_SECRET_ACCESS_KEY,
BEDROCK_AWS_ACCESS_KEY_ID,
@ -62,39 +64,44 @@ const getOptions = async ({ req, endpointOption }) => {
/** @type {BedrockClientOptions} */
const requestOptions = {
model: endpointOption.model,
model: overrideModel ?? endpointOption.model,
region: BEDROCK_AWS_DEFAULT_REGION,
streaming: true,
streamUsage: true,
callbacks: [
{
handleLLMNewToken: async () => {
if (!streamRate) {
return;
}
await sleep(streamRate);
},
},
],
};
if (credentials) {
requestOptions.credentials = credentials;
}
if (BEDROCK_REVERSE_PROXY) {
requestOptions.endpointHost = BEDROCK_REVERSE_PROXY;
}
const configOptions = {};
if (PROXY) {
/** NOTE: NOT SUPPORTED BY BEDROCK */
configOptions.httpAgent = new HttpsProxyAgent(PROXY);
}
const llmConfig = bedrockOutputParser(
bedrockInputParser.parse(
removeNullishValues(Object.assign(requestOptions, endpointOption.model_parameters)),
),
);
if (credentials) {
llmConfig.credentials = credentials;
}
if (BEDROCK_REVERSE_PROXY) {
llmConfig.endpointHost = BEDROCK_REVERSE_PROXY;
}
llmConfig.callbacks = [
{
handleLLMNewToken: async () => {
if (!streamRate) {
return;
}
await sleep(streamRate);
},
},
];
return {
/** @type {BedrockClientOptions} */
llmConfig: removeNullishValues(Object.assign(requestOptions, endpointOption.model_parameters)),
llmConfig,
configOptions,
};
};

View file

@ -141,7 +141,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
},
clientOptions,
);
const options = getLLMConfig(apiKey, clientOptions);
const options = getLLMConfig(apiKey, clientOptions, endpoint);
if (!customOptions.streamRate) {
return options;
}

View file

@ -5,12 +5,7 @@ const { isEnabled } = require('~/server/utils');
const { GoogleClient } = require('~/app');
const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => {
const {
GOOGLE_KEY,
GOOGLE_REVERSE_PROXY,
GOOGLE_AUTH_HEADER,
PROXY,
} = process.env;
const { GOOGLE_KEY, GOOGLE_REVERSE_PROXY, GOOGLE_AUTH_HEADER, PROXY } = process.env;
const isUserProvided = GOOGLE_KEY === 'user_provided';
const { key: expiresAt } = req.body;
@ -43,6 +38,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
if (googleConfig) {
clientOptions.streamRate = googleConfig.streamRate;
clientOptions.titleModel = googleConfig.titleModel;
}
if (allConfig) {

View file

@ -113,6 +113,7 @@ const initializeClient = async ({
if (!isAzureOpenAI && openAIConfig) {
clientOptions.streamRate = openAIConfig.streamRate;
clientOptions.titleModel = openAIConfig.titleModel;
}
/** @type {undefined | TBaseEndpoint} */

View file

@ -23,13 +23,13 @@ const { isEnabled } = require('~/server/utils');
* @param {boolean} [options.streaming] - Whether to use streaming mode.
* @param {Object} [options.addParams] - Additional parameters to add to the model options.
* @param {string[]} [options.dropParams] - Parameters to remove from the model options.
* @param {string|null} [endpoint=null] - The endpoint name
* @returns {Object} Configuration options for creating an LLM instance.
*/
function getLLMConfig(apiKey, options = {}) {
function getLLMConfig(apiKey, options = {}, endpoint = null) {
const {
modelOptions = {},
reverseProxyUrl,
useOpenRouter,
defaultQuery,
headers,
proxy,
@ -56,9 +56,14 @@ function getLLMConfig(apiKey, options = {}) {
});
}
let useOpenRouter;
/** @type {OpenAIClientOptions['configuration']} */
const configOptions = {};
if (useOpenRouter || reverseProxyUrl.includes(KnownEndpoints.openrouter)) {
if (
(reverseProxyUrl && reverseProxyUrl.includes(KnownEndpoints.openrouter)) ||
(endpoint && endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
) {
useOpenRouter = true;
llmConfig.include_reasoning = true;
configOptions.baseURL = reverseProxyUrl;
configOptions.defaultHeaders = Object.assign(
@ -118,6 +123,13 @@ function getLLMConfig(apiKey, options = {}) {
llmConfig.organization = process.env.OPENAI_ORGANIZATION;
}
if (useOpenRouter && llmConfig.reasoning_effort != null) {
llmConfig.reasoning = {
effort: llmConfig.reasoning_effort,
};
delete llmConfig.reasoning_effort;
}
return {
/** @type {OpenAIClientOptions} */
llmConfig,

View file

@ -2,6 +2,7 @@
const axios = require('axios');
const FormData = require('form-data');
const { getCodeBaseURL } = require('@librechat/agents');
const { logAxiosError } = require('~/utils');
const MAX_FILE_SIZE = 150 * 1024 * 1024;
@ -78,7 +79,11 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''
return `${fileIdentifier}?entity_id=${entity_id}`;
} catch (error) {
throw new Error(`Error uploading file: ${error.message}`);
logAxiosError({
message: `Error uploading code environment file: ${error.message}`,
error,
});
throw new Error(`Error uploading code environment file: ${error.message}`);
}
}

View file

@ -12,6 +12,7 @@ const {
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { convertImage } = require('~/server/services/Files/images/convert');
const { createFile, getFiles, updateFile } = require('~/models/File');
const { logAxiosError } = require('~/utils');
const { logger } = require('~/config');
/**
@ -85,7 +86,10 @@ const processCodeOutput = async ({
/** Note: `messageId` & `toolCallId` are not part of file DB schema; message object records associated file ID */
return Object.assign(file, { messageId, toolCallId });
} catch (error) {
logger.error('Error downloading file:', error);
logAxiosError({
message: 'Error downloading code environment file',
error,
});
}
};
@ -135,7 +139,10 @@ async function getSessionInfo(fileIdentifier, apiKey) {
return response.data.find((file) => file.name.startsWith(path))?.lastModified;
} catch (error) {
logger.error(`Error fetching session info: ${error.message}`, error);
logAxiosError({
message: `Error fetching session info: ${error.message}`,
error,
});
return null;
}
}
@ -202,7 +209,7 @@ const primeFiles = async (options, apiKey) => {
const { handleFileUpload: uploadCodeEnvFile } = getStrategyFunctions(
FileSources.execute_code,
);
const stream = await getDownloadStream(file.filepath);
const stream = await getDownloadStream(options.req, file.filepath);
const fileIdentifier = await uploadCodeEnvFile({
req: options.req,
stream,

View file

@ -224,10 +224,11 @@ async function uploadFileToFirebase({ req, file, file_id }) {
/**
* Retrieves a readable stream for a file from Firebase storage.
*
* @param {ServerRequest} _req
* @param {string} filepath - The filepath.
* @returns {Promise<ReadableStream>} A readable stream of the file.
*/
async function getFirebaseFileStream(filepath) {
async function getFirebaseFileStream(_req, filepath) {
try {
const storage = getFirebaseStorage();
if (!storage) {

View file

@ -175,6 +175,17 @@ const isValidPath = (req, base, subfolder, filepath) => {
return normalizedFilepath.startsWith(normalizedBase);
};
/**
* @param {string} filepath
*/
const unlinkFile = async (filepath) => {
try {
await fs.promises.unlink(filepath);
} catch (error) {
logger.error('Error deleting file:', error);
}
};
/**
* Deletes a file from the filesystem. This function takes a file object, constructs the full path, and
* verifies the path's validity before deleting the file. If the path is invalid, an error is thrown.
@ -217,7 +228,7 @@ const deleteLocalFile = async (req, file) => {
throw new Error(`Invalid file path: ${file.filepath}`);
}
await fs.promises.unlink(filepath);
await unlinkFile(filepath);
return;
}
@ -233,7 +244,7 @@ const deleteLocalFile = async (req, file) => {
throw new Error('Invalid file path');
}
await fs.promises.unlink(filepath);
await unlinkFile(filepath);
};
/**
@ -275,11 +286,31 @@ async function uploadLocalFile({ req, file, file_id }) {
/**
* Retrieves a readable stream for a file from local storage.
*
* @param {ServerRequest} req - The request object from Express
* @param {string} filepath - The filepath.
* @returns {ReadableStream} A readable stream of the file.
*/
function getLocalFileStream(filepath) {
function getLocalFileStream(req, filepath) {
try {
if (filepath.includes('/uploads/')) {
const basePath = filepath.split('/uploads/')[1];
if (!basePath) {
logger.warn(`Invalid base path: ${filepath}`);
throw new Error(`Invalid file path: ${filepath}`);
}
const fullPath = path.join(req.app.locals.paths.uploads, basePath);
const uploadsDir = req.app.locals.paths.uploads;
const rel = path.relative(uploadsDir, fullPath);
if (rel.startsWith('..') || path.isAbsolute(rel) || rel.includes(`..${path.sep}`)) {
logger.warn(`Invalid relative file path: ${filepath}`);
throw new Error(`Invalid file path: ${filepath}`);
}
return fs.createReadStream(fullPath);
}
return fs.createReadStream(filepath);
} catch (error) {
logger.error('Error getting local file stream:', error);

View file

@ -37,7 +37,14 @@ const deleteVectors = async (req, file) => {
error,
message: 'Error deleting vectors',
});
throw new Error(error.message || 'An error occurred during file deletion.');
if (
error.response &&
error.response.status !== 404 &&
(error.response.status < 200 || error.response.status >= 300)
) {
logger.warn('Error deleting vectors, file will not be deleted');
throw new Error(error.message || 'An error occurred during file deletion.');
}
}
};

View file

@ -347,8 +347,8 @@ const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true })
req.app.locals.imageOutputType
}`;
}
const filepath = await saveBuffer({ userId: req.user.id, fileName: filename, buffer });
const fileName = `${file_id}-${filename}`;
const filepath = await saveBuffer({ userId: req.user.id, fileName, buffer });
return await createFile(
{
user: req.user.id,
@ -801,8 +801,7 @@ async function saveBase64Image(
{ req, file_id: _file_id, filename: _filename, endpoint, context, resolution = 'high' },
) {
const file_id = _file_id ?? v4();
let filename = _filename;
let filename = `${file_id}-${_filename}`;
const { buffer: inputBuffer, type } = base64ToBuffer(url);
if (!path.extname(_filename)) {
const extension = mime.getExtension(type);

View file

@ -129,9 +129,6 @@ const fetchOpenAIModels = async (opts, _models = []) => {
// .split('/deployments')[0]
// .concat(`/models?api-version=${azure.azureOpenAIApiVersion}`);
// apiKey = azureOpenAIApiKey;
} else if (process.env.OPENROUTER_API_KEY) {
reverseProxyUrl = 'https://openrouter.ai/api/v1';
apiKey = process.env.OPENROUTER_API_KEY;
}
if (reverseProxyUrl) {
@ -218,7 +215,7 @@ const getOpenAIModels = async (opts) => {
return models;
}
if (userProvidedOpenAI && !process.env.OPENROUTER_API_KEY) {
if (userProvidedOpenAI) {
return models;
}

View file

@ -161,22 +161,6 @@ describe('getOpenAIModels', () => {
expect(models).toEqual(expect.arrayContaining(['openai-model', 'openai-model-2']));
});
it('attempts to use OPENROUTER_API_KEY if set', async () => {
process.env.OPENROUTER_API_KEY = 'test-router-key';
const expectedModels = ['model-router-1', 'model-router-2'];
axios.get.mockResolvedValue({
data: {
data: expectedModels.map((id) => ({ id })),
},
});
const models = await getOpenAIModels({ user: 'user456' });
expect(models).toEqual(expect.arrayContaining(expectedModels));
expect(axios.get).toHaveBeenCalled();
});
it('utilizes proxy configuration when PROXY is set', async () => {
axios.get.mockResolvedValue({
data: {

View file

@ -34,6 +34,8 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
multiConvo: interfaceConfig?.multiConvo ?? defaults.multiConvo,
agents: interfaceConfig?.agents ?? defaults.agents,
temporaryChat: interfaceConfig?.temporaryChat ?? defaults.temporaryChat,
runCode: interfaceConfig?.runCode ?? defaults.runCode,
customWelcome: interfaceConfig?.customWelcome ?? defaults.customWelcome,
});
await updateAccessPermissions(roleName, {
@ -41,12 +43,16 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: loadedInterface.bookmarks },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: loadedInterface.multiConvo },
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
});
await updateAccessPermissions(SystemRoles.ADMIN, {
[PermissionTypes.PROMPTS]: { [Permissions.USE]: loadedInterface.prompts },
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: loadedInterface.bookmarks },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: loadedInterface.multiConvo },
[PermissionTypes.AGENTS]: { [Permissions.USE]: loadedInterface.agents },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: loadedInterface.temporaryChat },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: loadedInterface.runCode },
});
let i = 0;

View file

@ -14,6 +14,8 @@ describe('loadDefaultInterface', () => {
bookmarks: true,
multiConvo: true,
agents: true,
temporaryChat: true,
runCode: true,
},
};
const configDefaults = { interface: {} };
@ -25,6 +27,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
});
});
@ -35,6 +39,8 @@ describe('loadDefaultInterface', () => {
bookmarks: false,
multiConvo: false,
agents: false,
temporaryChat: false,
runCode: false,
},
};
const configDefaults = { interface: {} };
@ -46,6 +52,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: false },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
});
});
@ -60,6 +68,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
});
});
@ -70,6 +80,8 @@ describe('loadDefaultInterface', () => {
bookmarks: undefined,
multiConvo: undefined,
agents: undefined,
temporaryChat: undefined,
runCode: undefined,
},
};
const configDefaults = { interface: {} };
@ -81,6 +93,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
});
});
@ -91,6 +105,8 @@ describe('loadDefaultInterface', () => {
bookmarks: false,
multiConvo: undefined,
agents: true,
temporaryChat: undefined,
runCode: false,
},
};
const configDefaults = { interface: {} };
@ -102,6 +118,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
});
});
@ -113,6 +131,8 @@ describe('loadDefaultInterface', () => {
bookmarks: true,
multiConvo: true,
agents: true,
temporaryChat: true,
runCode: true,
},
};
@ -123,6 +143,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: true },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: true },
});
});
@ -137,6 +159,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
});
});
@ -151,6 +175,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
});
});
@ -165,6 +191,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: undefined },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: undefined },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
});
});
@ -175,6 +203,8 @@ describe('loadDefaultInterface', () => {
bookmarks: false,
multiConvo: true,
agents: false,
temporaryChat: true,
runCode: false,
},
};
const configDefaults = { interface: {} };
@ -186,6 +216,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: false },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: true },
[PermissionTypes.AGENTS]: { [Permissions.USE]: false },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: true },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: false },
});
});
@ -197,6 +229,8 @@ describe('loadDefaultInterface', () => {
bookmarks: true,
multiConvo: false,
agents: undefined,
temporaryChat: undefined,
runCode: undefined,
},
};
@ -207,6 +241,8 @@ describe('loadDefaultInterface', () => {
[PermissionTypes.BOOKMARKS]: { [Permissions.USE]: true },
[PermissionTypes.MULTI_CONVO]: { [Permissions.USE]: false },
[PermissionTypes.AGENTS]: { [Permissions.USE]: undefined },
[PermissionTypes.TEMPORARY_CHAT]: { [Permissions.USE]: undefined },
[PermissionTypes.RUN_CODE]: { [Permissions.USE]: undefined },
});
});
});

View file

@ -0,0 +1,238 @@
const { sign } = require('jsonwebtoken');
const { webcrypto } = require('node:crypto');
const { hashBackupCode, decryptV2 } = require('~/server/utils/crypto');
const { updateUser } = require('~/models/userMethods');
const BASE32_ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567';
/**
* Encodes a Buffer into a Base32 string using the RFC 4648 alphabet.
*
* @param {Buffer} buffer - The buffer to encode.
* @returns {string} The Base32 encoded string.
*/
const encodeBase32 = (buffer) => {
let bits = 0;
let value = 0;
let output = '';
for (const byte of buffer) {
value = (value << 8) | byte;
bits += 8;
while (bits >= 5) {
output += BASE32_ALPHABET[(value >>> (bits - 5)) & 31];
bits -= 5;
}
}
if (bits > 0) {
output += BASE32_ALPHABET[(value << (5 - bits)) & 31];
}
return output;
};
/**
* Decodes a Base32-encoded string back into a Buffer.
*
* @param {string} base32Str - The Base32-encoded string.
* @returns {Buffer} The decoded buffer.
*/
const decodeBase32 = (base32Str) => {
const cleaned = base32Str.replace(/=+$/, '').toUpperCase();
let bits = 0;
let value = 0;
const output = [];
for (const char of cleaned) {
const idx = BASE32_ALPHABET.indexOf(char);
if (idx === -1) {
continue;
}
value = (value << 5) | idx;
bits += 5;
if (bits >= 8) {
output.push((value >>> (bits - 8)) & 0xff);
bits -= 8;
}
}
return Buffer.from(output);
};
/**
* Generates a temporary token for 2FA verification.
* The token is signed with the JWT_SECRET and expires in 5 minutes.
*
* @param {string} userId - The unique identifier of the user.
* @returns {string} The signed JWT token.
*/
const generate2FATempToken = (userId) =>
sign({ userId, twoFAPending: true }, process.env.JWT_SECRET, { expiresIn: '5m' });
/**
* Generates a TOTP secret.
* Creates 10 random bytes using WebCrypto and encodes them into a Base32 string.
*
* @returns {string} A Base32-encoded secret for TOTP.
*/
const generateTOTPSecret = () => {
const randomArray = new Uint8Array(10);
webcrypto.getRandomValues(randomArray);
return encodeBase32(Buffer.from(randomArray));
};
/**
* Generates a Time-based One-Time Password (TOTP) based on the provided secret and time.
* This implementation uses a 30-second time step and produces a 6-digit code.
*
* @param {string} secret - The Base32-encoded TOTP secret.
* @param {number} [forTime=Date.now()] - The time (in milliseconds) for which to generate the TOTP.
* @returns {Promise<string>} A promise that resolves to the 6-digit TOTP code.
*/
const generateTOTP = async (secret, forTime = Date.now()) => {
const timeStep = 30; // seconds
const counter = Math.floor(forTime / 1000 / timeStep);
const counterBuffer = new ArrayBuffer(8);
const counterView = new DataView(counterBuffer);
// Write counter into the last 4 bytes (big-endian)
counterView.setUint32(4, counter, false);
// Decode the secret into an ArrayBuffer
const keyBuffer = decodeBase32(secret);
const keyArrayBuffer = keyBuffer.buffer.slice(
keyBuffer.byteOffset,
keyBuffer.byteOffset + keyBuffer.byteLength,
);
// Import the key for HMAC-SHA1 signing
const cryptoKey = await webcrypto.subtle.importKey(
'raw',
keyArrayBuffer,
{ name: 'HMAC', hash: 'SHA-1' },
false,
['sign'],
);
// Generate HMAC signature
const signatureBuffer = await webcrypto.subtle.sign('HMAC', cryptoKey, counterBuffer);
const hmac = new Uint8Array(signatureBuffer);
// Dynamic truncation as per RFC 4226
const offset = hmac[hmac.length - 1] & 0xf;
const slice = hmac.slice(offset, offset + 4);
const view = new DataView(slice.buffer, slice.byteOffset, slice.byteLength);
const binaryCode = view.getUint32(0, false) & 0x7fffffff;
const code = (binaryCode % 1000000).toString().padStart(6, '0');
return code;
};
/**
* Verifies a provided TOTP token against the secret.
* It allows for a ±1 time-step window to account for slight clock discrepancies.
*
* @param {string} secret - The Base32-encoded TOTP secret.
* @param {string} token - The TOTP token provided by the user.
* @returns {Promise<boolean>} A promise that resolves to true if the token is valid; otherwise, false.
*/
const verifyTOTP = async (secret, token) => {
const timeStepMS = 30 * 1000;
const currentTime = Date.now();
for (let offset = -1; offset <= 1; offset++) {
const expected = await generateTOTP(secret, currentTime + offset * timeStepMS);
if (expected === token) {
return true;
}
}
return false;
};
/**
* Generates backup codes for two-factor authentication.
* Each backup code is an 8-character hexadecimal string along with its SHA-256 hash.
* The plain codes are returned for one-time download, while the hashed objects are meant for secure storage.
*
* @param {number} [count=10] - The number of backup codes to generate.
* @returns {Promise<{ plainCodes: string[], codeObjects: Array<{ codeHash: string, used: boolean, usedAt: Date | null }> }>}
* A promise that resolves to an object containing both plain backup codes and their corresponding code objects.
*/
const generateBackupCodes = async (count = 10) => {
const plainCodes = [];
const codeObjects = [];
const encoder = new TextEncoder();
for (let i = 0; i < count; i++) {
const randomArray = new Uint8Array(4);
webcrypto.getRandomValues(randomArray);
const code = Array.from(randomArray)
.map((b) => b.toString(16).padStart(2, '0'))
.join(''); // 8-character hex code
plainCodes.push(code);
// Compute SHA-256 hash of the code using WebCrypto
const codeBuffer = encoder.encode(code);
const hashBuffer = await webcrypto.subtle.digest('SHA-256', codeBuffer);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const codeHash = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
codeObjects.push({ codeHash, used: false, usedAt: null });
}
return { plainCodes, codeObjects };
};
/**
* Verifies a backup code for a user and updates its status as used if valid.
*
* @param {Object} params - The parameters object.
* @param {TUser | undefined} [params.user] - The user object containing backup codes.
* @param {string | undefined} [params.backupCode] - The backup code to verify.
* @returns {Promise<boolean>} A promise that resolves to true if the backup code is valid and updated; otherwise, false.
*/
const verifyBackupCode = async ({ user, backupCode }) => {
if (!backupCode || !user || !Array.isArray(user.backupCodes)) {
return false;
}
const hashedInput = await hashBackupCode(backupCode.trim());
const matchingCode = user.backupCodes.find(
(codeObj) => codeObj.codeHash === hashedInput && !codeObj.used,
);
if (matchingCode) {
const updatedBackupCodes = user.backupCodes.map((codeObj) =>
codeObj.codeHash === hashedInput && !codeObj.used
? { ...codeObj, used: true, usedAt: new Date() }
: codeObj,
);
await updateUser(user._id, { backupCodes: updatedBackupCodes });
return true;
}
return false;
};
/**
* Retrieves and, if necessary, decrypts a stored TOTP secret.
* If the secret contains a colon, it is assumed to be in the format "iv:encryptedData" and will be decrypted.
* If the secret is exactly 16 characters long, it is assumed to be a legacy plain secret.
*
* @param {string|null} storedSecret - The stored TOTP secret (which may be encrypted).
* @returns {Promise<string|null>} A promise that resolves to the plain TOTP secret, or null if none is provided.
*/
const getTOTPSecret = async (storedSecret) => {
if (!storedSecret) { return null; }
// Check for a colon marker (encrypted secrets are stored as "iv:encryptedData")
if (storedSecret.includes(':')) {
return await decryptV2(storedSecret);
}
// If it's exactly 16 characters, assume it's already plain (legacy secret)
if (storedSecret.length === 16) {
return storedSecret;
}
// Fallback in case it doesn't meet our criteria.
return storedSecret;
};
module.exports = {
verifyTOTP,
generateTOTP,
getTOTPSecret,
verifyBackupCode,
generateTOTPSecret,
generateBackupCodes,
generate2FATempToken,
};

View file

@ -112,4 +112,25 @@ async function getRandomValues(length) {
return Buffer.from(randomValues).toString('hex');
}
module.exports = { encrypt, decrypt, encryptV2, decryptV2, hashToken, getRandomValues };
/**
* Computes SHA-256 hash for the given input using WebCrypto
* @param {string} input
* @returns {Promise<string>} - Hex hash string
*/
const hashBackupCode = async (input) => {
const encoder = new TextEncoder();
const data = encoder.encode(input);
const hashBuffer = await webcrypto.subtle.digest('SHA-256', data);
const hashArray = Array.from(new Uint8Array(hashBuffer));
return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
};
module.exports = {
encrypt,
decrypt,
encryptV2,
decryptV2,
hashToken,
hashBackupCode,
getRandomValues,
};

View file

@ -1,4 +1,4 @@
const express = require('express');
const expressStaticGzip = require('express-static-gzip');
const oneDayInSeconds = 24 * 60 * 60;
@ -6,13 +6,13 @@ const sMaxAge = process.env.STATIC_CACHE_S_MAX_AGE || oneDayInSeconds;
const maxAge = process.env.STATIC_CACHE_MAX_AGE || oneDayInSeconds * 2;
const staticCache = (staticPath) =>
express.static(staticPath, {
setHeaders: (res) => {
if (process.env.NODE_ENV?.toLowerCase() !== 'production') {
return;
expressStaticGzip(staticPath, {
enableBrotli: false, // disable Brotli, only using gzip
orderPreference: ['gz'],
setHeaders: (res, _path) => {
if (process.env.NODE_ENV?.toLowerCase() === 'production') {
res.setHeader('Cache-Control', `public, max-age=${maxAge}, s-maxage=${sMaxAge}`);
}
res.setHeader('Cache-Control', `public, max-age=${maxAge}, s-maxage=${sMaxAge}`);
},
});