Merge branch 'dev' into added-codeql

This commit is contained in:
Ruben Talstra 2025-05-22 10:36:14 +02:00 committed by GitHub
commit 74cdad76ba
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
158 changed files with 5793 additions and 965 deletions

View file

@ -23,6 +23,7 @@ const { updateAction, getActions } = require('~/models/Action');
const { updateAgentProjects } = require('~/models/Agent');
const { getProjectByName } = require('~/models/Project');
const { deleteFileByFilter } = require('~/models/File');
const { revertAgentVersion } = require('~/models/Agent');
const { logger } = require('~/config');
const systemTools = {
@ -104,6 +105,8 @@ const getAgentHandler = async (req, res) => {
return res.status(404).json({ error: 'Agent not found' });
}
agent.version = agent.versions ? agent.versions.length : 0;
if (agent.avatar && agent.avatar?.source === FileSources.s3) {
const originalUrl = agent.avatar.filepath;
agent.avatar.filepath = await refreshS3Url(agent.avatar);
@ -127,6 +130,7 @@ const getAgentHandler = async (req, res) => {
author: agent.author,
projectIds: agent.projectIds,
isCollaborative: agent.isCollaborative,
version: agent.version,
});
}
return res.status(200).json(agent);
@ -187,6 +191,14 @@ const updateAgentHandler = async (req, res) => {
return res.json(updatedAgent);
} catch (error) {
logger.error('[/Agents/:id] Error updating Agent', error);
if (error.statusCode === 409) {
return res.status(409).json({
error: error.message,
details: error.details,
});
}
res.status(500).json({ error: error.message });
}
};
@ -411,6 +423,66 @@ const uploadAgentAvatarHandler = async (req, res) => {
}
};
/**
* Reverts an agent to a previous version from its version history.
* @route PATCH /agents/:id/revert
* @param {object} req - Express Request object
* @param {object} req.params - Request parameters
* @param {string} req.params.id - The ID of the agent to revert
* @param {object} req.body - Request body
* @param {number} req.body.version_index - The index of the version to revert to
* @param {object} req.user - Authenticated user information
* @param {string} req.user.id - User ID
* @param {string} req.user.role - User role
* @param {ServerResponse} res - Express Response object
* @returns {Promise<Agent>} 200 - The updated agent after reverting to the specified version
* @throws {Error} 400 - If version_index is missing
* @throws {Error} 403 - If user doesn't have permission to modify the agent
* @throws {Error} 404 - If agent not found
* @throws {Error} 500 - If there's an internal server error during the reversion process
*/
const revertAgentVersionHandler = async (req, res) => {
try {
const { id } = req.params;
const { version_index } = req.body;
if (version_index === undefined) {
return res.status(400).json({ error: 'version_index is required' });
}
const isAdmin = req.user.role === SystemRoles.ADMIN;
const existingAgent = await getAgent({ id });
if (!existingAgent) {
return res.status(404).json({ error: 'Agent not found' });
}
const isAuthor = existingAgent.author.toString() === req.user.id;
const hasEditPermission = existingAgent.isCollaborative || isAdmin || isAuthor;
if (!hasEditPermission) {
return res.status(403).json({
error: 'You do not have permission to modify this non-collaborative agent',
});
}
const updatedAgent = await revertAgentVersion({ id }, version_index);
if (updatedAgent.author) {
updatedAgent.author = updatedAgent.author.toString();
}
if (updatedAgent.author !== req.user.id) {
delete updatedAgent.author;
}
return res.json(updatedAgent);
} catch (error) {
logger.error('[/agents/:id/revert] Error reverting Agent version', error);
res.status(500).json({ error: error.message });
}
};
module.exports = {
createAgent: createAgentHandler,
getAgent: getAgentHandler,
@ -419,4 +491,5 @@ module.exports = {
deleteAgent: deleteAgentHandler,
getListAgents: getListAgentsHandler,
uploadAgentAvatar: uploadAgentAvatarHandler,
revertAgentVersion: revertAgentVersionHandler,
};

View file

@ -326,8 +326,15 @@ const chatV1 = async (req, res) => {
file_ids = files.map(({ file_id }) => file_id);
if (file_ids.length || thread_file_ids.length) {
userMessage.file_ids = file_ids;
attachedFileIds = new Set([...file_ids, ...thread_file_ids]);
if (endpoint === EModelEndpoint.azureAssistants) {
userMessage.attachments = Array.from(attachedFileIds).map((file_id) => ({
file_id,
tools: [{ type: 'file_search' }],
}));
} else {
userMessage.file_ids = Array.from(attachedFileIds);
}
}
};

View file

@ -24,10 +24,13 @@ const routes = require('./routes');
const { PORT, HOST, ALLOW_SOCIAL_LOGIN, DISABLE_COMPRESSION, TRUST_PROXY } = process.env ?? {};
const port = Number(PORT) || 3080;
// Allow PORT=0 to be used for automatic free port assignment
const port = isNaN(Number(PORT)) ? 3080 : Number(PORT);
const host = HOST || 'localhost';
const trusted_proxy = Number(TRUST_PROXY) || 1; /* trust first proxy by default */
const app = express();
const startServer = async () => {
if (typeof Bun !== 'undefined') {
axios.defaults.headers.common['Accept-Encoding'] = 'gzip';
@ -36,8 +39,9 @@ const startServer = async () => {
logger.info('Connected to MongoDB');
await indexSync();
const app = express();
app.disable('x-powered-by');
app.set('trust proxy', trusted_proxy);
await AppService(app);
const indexPath = path.join(app.locals.paths.dist, 'index.html');
@ -49,23 +53,24 @@ const startServer = async () => {
app.use(noIndex);
app.use(errorController);
app.use(express.json({ limit: '3mb' }));
app.use(mongoSanitize());
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
app.use(staticCache(app.locals.paths.dist));
app.use(staticCache(app.locals.paths.fonts));
app.use(staticCache(app.locals.paths.assets));
app.set('trust proxy', trusted_proxy);
app.use(mongoSanitize());
app.use(cors());
app.use(cookieParser());
if (!isEnabled(DISABLE_COMPRESSION)) {
app.use(compression());
} else {
console.warn('Response compression has been disabled via DISABLE_COMPRESSION.');
}
// Serve static assets with aggressive caching
app.use(staticCache(app.locals.paths.dist));
app.use(staticCache(app.locals.paths.fonts));
app.use(staticCache(app.locals.paths.assets));
if (!ALLOW_SOCIAL_LOGIN) {
console.warn(
'Social logins are disabled. Set Environment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.',
);
console.warn('Social logins are disabled. Set ALLOW_SOCIAL_LOGIN=true to enable them.');
}
/* OAUTH */
@ -128,7 +133,7 @@ const startServer = async () => {
});
app.listen(port, host, () => {
if (host == '0.0.0.0') {
if (host === '0.0.0.0') {
logger.info(
`Server listening on all interfaces at port ${port}. Use http://localhost:${port} to access it`,
);
@ -176,3 +181,6 @@ process.on('uncaughtException', (err) => {
process.exit(1);
});
// export app for easier testing purposes
module.exports = app;

78
api/server/index.spec.js Normal file
View file

@ -0,0 +1,78 @@
const fs = require('fs');
const path = require('path');
const request = require('supertest');
const { MongoMemoryServer } = require('mongodb-memory-server');
const mongoose = require('mongoose');
describe('Server Configuration', () => {
// Increase the default timeout to allow for Mongo cleanup
jest.setTimeout(30_000);
let mongoServer;
let app;
/** Mocked fs.readFileSync for index.html */
const originalReadFileSync = fs.readFileSync;
beforeAll(() => {
fs.readFileSync = function (filepath, options) {
if (filepath.includes('index.html')) {
return '<!DOCTYPE html><html><head><title>LibreChat</title></head><body><div id="root"></div></body></html>';
}
return originalReadFileSync(filepath, options);
};
});
afterAll(() => {
// Restore original fs.readFileSync
fs.readFileSync = originalReadFileSync;
});
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
process.env.MONGO_URI = mongoServer.getUri();
process.env.PORT = '0'; // Use a random available port
app = require('~/server');
// Wait for the app to be healthy
await healthCheckPoll(app);
});
afterAll(async () => {
await mongoServer.stop();
await mongoose.disconnect();
});
it('should return OK for /health', async () => {
const response = await request(app).get('/health');
expect(response.status).toBe(200);
expect(response.text).toBe('OK');
});
it('should not cache index page', async () => {
const response = await request(app).get('/');
expect(response.status).toBe(200);
expect(response.headers['cache-control']).toBe('no-cache, no-store, must-revalidate');
expect(response.headers['pragma']).toBe('no-cache');
expect(response.headers['expires']).toBe('0');
});
});
// Polls the /health endpoint every 30ms for up to 10 seconds to wait for the server to start completely
async function healthCheckPoll(app, retries = 0) {
const maxRetries = Math.floor(10000 / 30); // 10 seconds / 30ms
try {
const response = await request(app).get('/health');
if (response.status === 200) {
return; // App is healthy
}
} catch (error) {
// Ignore connection errors during polling
}
if (retries < maxRetries) {
await new Promise((resolve) => setTimeout(resolve, 30));
await healthCheckPoll(app, retries + 1);
} else {
throw new Error('App did not become healthy within 10 seconds.');
}
}

View file

@ -78,6 +78,15 @@ router.post('/:id/duplicate', checkAgentCreate, v1.duplicateAgent);
*/
router.delete('/:id', checkAgentCreate, v1.deleteAgent);
/**
* Reverts an agent to a previous version.
* @route POST /agents/:id/revert
* @param {string} req.params.id - Agent identifier.
* @param {number} req.body.version_index - Index of the version to revert to.
* @returns {Agent} 200 - success response - application/json
*/
router.post('/:id/revert', checkGlobalAgentShare, v1.revertAgentVersion);
/**
* Returns a list of agents.
* @route GET /agents

View file

@ -121,6 +121,14 @@ router.delete('/', async (req, res) => {
await processDeleteRequest({ req, files: assistantFiles });
res.status(200).json({ message: 'File associations removed successfully from assistant' });
return;
} else if (
req.body.assistant_id &&
req.body.files?.[0]?.filepath === EModelEndpoint.azureAssistants
) {
await processDeleteRequest({ req, files: req.body.files });
return res
.status(200)
.json({ message: 'File associations removed successfully from Azure Assistant' });
}
await processDeleteRequest({ req, files: dbFiles });

View file

@ -10,17 +10,7 @@ const getLogStores = require('~/cache/getLogStores');
* */
async function getCustomConfig() {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
let customConfig = await cache.get(CacheKeys.CUSTOM_CONFIG);
if (!customConfig) {
customConfig = await loadCustomConfig();
}
if (!customConfig) {
return null;
}
return customConfig;
return (await cache.get(CacheKeys.CUSTOM_CONFIG)) || (await loadCustomConfig());
}
/**

View file

@ -29,7 +29,14 @@ async function loadConfigEndpoints(req) {
for (let i = 0; i < customEndpoints.length; i++) {
const endpoint = customEndpoints[i];
const { baseURL, apiKey, name: configName, iconURL, modelDisplayLabel } = endpoint;
const {
baseURL,
apiKey,
name: configName,
iconURL,
modelDisplayLabel,
customParams,
} = endpoint;
const name = normalizeEndpointName(configName);
const resolvedApiKey = extractEnvVariable(apiKey);
@ -41,6 +48,7 @@ async function loadConfigEndpoints(req) {
userProvideURL: isUserProvided(resolvedBaseURL),
modelDisplayLabel,
iconURL,
customParams,
};
}
}

View file

@ -1,10 +1,18 @@
const path = require('path');
const { CacheKeys, configSchema, EImageOutputType } = require('librechat-data-provider');
const {
CacheKeys,
configSchema,
EImageOutputType,
validateSettingDefinitions,
agentParamSettings,
paramSettings,
} = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores');
const loadYaml = require('~/utils/loadYaml');
const { logger } = require('~/config');
const axios = require('axios');
const yaml = require('js-yaml');
const keyBy = require('lodash/keyBy');
const projectRoot = path.resolve(__dirname, '..', '..', '..', '..');
const defaultConfigPath = path.resolve(projectRoot, 'librechat.yaml');
@ -105,6 +113,10 @@ https://www.librechat.ai/docs/configuration/stt_tts`);
logger.debug('Custom config:', customConfig);
}
(customConfig.endpoints?.custom ?? [])
.filter((endpoint) => endpoint.customParams)
.forEach((endpoint) => parseCustomParams(endpoint.name, endpoint.customParams));
if (customConfig.cache) {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
await cache.set(CacheKeys.CUSTOM_CONFIG, customConfig);
@ -117,4 +129,52 @@ https://www.librechat.ai/docs/configuration/stt_tts`);
return customConfig;
}
// Validate and fill out missing values for custom parameters
function parseCustomParams(endpointName, customParams) {
const paramEndpoint = customParams.defaultParamsEndpoint;
customParams.paramDefinitions = customParams.paramDefinitions || [];
// Checks if `defaultParamsEndpoint` is a key in `paramSettings`.
const validEndpoints = new Set([
...Object.keys(paramSettings),
...Object.keys(agentParamSettings),
]);
if (!validEndpoints.has(paramEndpoint)) {
throw new Error(
`defaultParamsEndpoint of "${endpointName}" endpoint is invalid. ` +
`Valid options are ${Array.from(validEndpoints).join(', ')}`,
);
}
// creates default param maps
const regularParams = paramSettings[paramEndpoint] ?? [];
const agentParams = agentParamSettings[paramEndpoint] ?? [];
const defaultParams = regularParams.concat(agentParams);
const defaultParamsMap = keyBy(defaultParams, 'key');
// TODO: Remove this check once we support new parameters not part of default parameters.
// Checks if every key in `paramDefinitions` is valid.
const validKeys = new Set(Object.keys(defaultParamsMap));
const paramKeys = customParams.paramDefinitions.map((param) => param.key);
if (paramKeys.some((key) => !validKeys.has(key))) {
throw new Error(
`paramDefinitions of "${endpointName}" endpoint contains invalid key(s). ` +
`Valid parameter keys are ${Array.from(validKeys).join(', ')}`,
);
}
// Fill out missing values for custom param definitions
customParams.paramDefinitions = customParams.paramDefinitions.map((param) => {
return { ...defaultParamsMap[param.key], ...param, optionType: 'custom' };
});
try {
validateSettingDefinitions(customParams.paramDefinitions);
} catch (e) {
throw new Error(
`Custom parameter definitions for "${endpointName}" endpoint is malformed: ${e.message}`,
);
}
}
module.exports = loadCustomConfig;

View file

@ -1,6 +1,34 @@
jest.mock('axios');
jest.mock('~/cache/getLogStores');
jest.mock('~/utils/loadYaml');
jest.mock('librechat-data-provider', () => {
const actual = jest.requireActual('librechat-data-provider');
return {
...actual,
paramSettings: { foo: {}, bar: {}, custom: {} },
agentParamSettings: {
custom: [],
google: [
{
key: 'pressure',
type: 'string',
component: 'input',
},
{
key: 'temperature',
type: 'number',
component: 'slider',
default: 0.5,
range: {
min: 0,
max: 2,
step: 0.01,
},
},
],
},
};
});
const axios = require('axios');
const loadCustomConfig = require('./loadCustomConfig');
@ -150,4 +178,126 @@ describe('loadCustomConfig', () => {
expect(logger.info).toHaveBeenCalledWith(JSON.stringify(mockConfig, null, 2));
expect(logger.debug).toHaveBeenCalledWith('Custom config:', mockConfig);
});
describe('parseCustomParams', () => {
const mockConfig = {
version: '1.0',
cache: false,
endpoints: {
custom: [
{
name: 'Google',
apiKey: 'user_provided',
customParams: {},
},
],
},
};
async function loadCustomParams(customParams) {
mockConfig.endpoints.custom[0].customParams = customParams;
loadYaml.mockReturnValue(mockConfig);
return await loadCustomConfig();
}
beforeEach(() => {
jest.resetAllMocks();
process.env.CONFIG_PATH = 'validConfig.yaml';
});
it('returns no error when customParams is undefined', async () => {
const result = await loadCustomParams(undefined);
expect(result).toEqual(mockConfig);
});
it('returns no error when customParams is valid', async () => {
const result = await loadCustomParams({
defaultParamsEndpoint: 'google',
paramDefinitions: [
{
key: 'temperature',
default: 0.5,
},
],
});
expect(result).toEqual(mockConfig);
});
it('throws an error when paramDefinitions contain unsupported keys', async () => {
const malformedCustomParams = {
defaultParamsEndpoint: 'google',
paramDefinitions: [
{ key: 'temperature', default: 0.5 },
{ key: 'unsupportedKey', range: 0.5 },
],
};
await expect(loadCustomParams(malformedCustomParams)).rejects.toThrow(
'paramDefinitions of "Google" endpoint contains invalid key(s). Valid parameter keys are pressure, temperature',
);
});
it('throws an error when paramDefinitions is malformed', async () => {
const malformedCustomParams = {
defaultParamsEndpoint: 'google',
paramDefinitions: [
{
key: 'temperature',
type: 'noomba',
component: 'inpoot',
optionType: 'custom',
},
],
};
await expect(loadCustomParams(malformedCustomParams)).rejects.toThrow(
/Custom parameter definitions for "Google" endpoint is malformed:/,
);
});
it('throws an error when defaultParamsEndpoint is not provided', async () => {
const malformedCustomParams = { defaultParamsEndpoint: undefined };
await expect(loadCustomParams(malformedCustomParams)).rejects.toThrow(
'defaultParamsEndpoint of "Google" endpoint is invalid. Valid options are foo, bar, custom, google',
);
});
it('fills the paramDefinitions with missing values', async () => {
const customParams = {
defaultParamsEndpoint: 'google',
paramDefinitions: [
{ key: 'temperature', default: 0.7, range: { min: 0.1, max: 0.9, step: 0.1 } },
{ key: 'pressure', component: 'textarea' },
],
};
const parsedConfig = await loadCustomParams(customParams);
const paramDefinitions = parsedConfig.endpoints.custom[0].customParams.paramDefinitions;
expect(paramDefinitions).toEqual([
{
columnSpan: 1,
component: 'slider',
default: 0.7, // overridden
includeInput: true,
key: 'temperature',
label: 'temperature',
optionType: 'custom',
range: {
// overridden
max: 0.9,
min: 0.1,
step: 0.1,
},
type: 'number',
},
{
columnSpan: 1,
component: 'textarea', // overridden
key: 'pressure',
label: 'pressure',
optionType: 'custom',
placeholder: '',
type: 'string',
},
]);
});
});
});

View file

@ -105,6 +105,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
headers: resolvedHeaders,
addParams: endpointConfig.addParams,
dropParams: endpointConfig.dropParams,
customParams: endpointConfig.customParams,
titleConvo: endpointConfig.titleConvo,
titleModel: endpointConfig.titleModel,
forcePrompt: endpointConfig.forcePrompt,

View file

@ -54,7 +54,7 @@ async function deleteOpenAIFile(req, file, openai) {
throw new Error('OpenAI returned `false` for deleted status');
}
logger.debug(
`[deleteOpenAIFile] User ${req.user.id} successfully deleted ${file.file_id} from OpenAI`,
`[deleteOpenAIFile] User ${req.user.id} successfully deleted file "${file.file_id}" from OpenAI`,
);
} catch (error) {
logger.error('[deleteOpenAIFile] Error deleting file from OpenAI: ' + error.message);

View file

@ -5,9 +5,10 @@ const { EModelEndpoint } = require('librechat-data-provider');
* Resizes an image from a given buffer based on the specified resolution.
*
* @param {Buffer} inputBuffer - The buffer of the image to be resized.
* @param {'low' | 'high'} resolution - The resolution to resize the image to.
* @param {'low' | 'high' | {percentage?: number, px?: number}} resolution - The resolution to resize the image to.
* 'low' for a maximum of 512x512 resolution,
* 'high' for a maximum of 768x2000 resolution.
* 'high' for a maximum of 768x2000 resolution,
* or a custom object with percentage or px values.
* @param {EModelEndpoint} endpoint - Identifier for specific endpoint handling
* @returns {Promise<{buffer: Buffer, width: number, height: number}>} An object containing the resized image buffer and its dimensions.
* @throws Will throw an error if the resolution parameter is invalid.
@ -17,10 +18,32 @@ async function resizeImageBuffer(inputBuffer, resolution, endpoint) {
const maxShortSideHighRes = 768;
const maxLongSideHighRes = endpoint === EModelEndpoint.anthropic ? 1568 : 2000;
let customPercent, customPx;
if (resolution && typeof resolution === 'object') {
if (typeof resolution.percentage === 'number') {
customPercent = resolution.percentage;
} else if (typeof resolution.px === 'number') {
customPx = resolution.px;
}
}
let newWidth, newHeight;
let resizeOptions = { fit: 'inside', withoutEnlargement: true };
if (resolution === 'low') {
if (customPercent != null || customPx != null) {
// percentage-based resize
const metadata = await sharp(inputBuffer).metadata();
if (customPercent != null) {
newWidth = Math.round(metadata.width * (customPercent / 100));
newHeight = Math.round(metadata.height * (customPercent / 100));
} else {
// pixel max on both sides
newWidth = Math.min(metadata.width, customPx);
newHeight = Math.min(metadata.height, customPx);
}
resizeOptions.width = newWidth;
resizeOptions.height = newHeight;
} else if (resolution === 'low') {
resizeOptions.width = maxLowRes;
resizeOptions.height = maxLowRes;
} else if (resolution === 'high') {

View file

@ -137,11 +137,13 @@ const processDeleteRequest = async ({ req, files }) => {
/** @type {Record<string, OpenAI | undefined>} */
const client = { [FileSources.openai]: undefined, [FileSources.azure]: undefined };
const initializeClients = async () => {
const openAIClient = await getOpenAIClient({
req,
overrideEndpoint: EModelEndpoint.assistants,
});
client[FileSources.openai] = openAIClient.openai;
if (req.app.locals[EModelEndpoint.assistants]) {
const openAIClient = await getOpenAIClient({
req,
overrideEndpoint: EModelEndpoint.assistants,
});
client[FileSources.openai] = openAIClient.openai;
}
if (!req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
return;
@ -693,7 +695,7 @@ const processOpenAIFile = async ({
const processOpenAIImageOutput = async ({ req, buffer, file_id, filename, fileExt }) => {
const currentDate = new Date();
const formattedDate = currentDate.toISOString();
const _file = await convertImage(req, buffer, 'high', `${file_id}${fileExt}`);
const _file = await convertImage(req, buffer, undefined, `${file_id}${fileExt}`);
const file = {
..._file,
usage: 1,
@ -838,8 +840,9 @@ function base64ToBuffer(base64String) {
async function saveBase64Image(
url,
{ req, file_id: _file_id, filename: _filename, endpoint, context, resolution = 'high' },
{ req, file_id: _file_id, filename: _filename, endpoint, context, resolution },
) {
const effectiveResolution = resolution ?? req.app.locals.fileConfig?.imageGeneration ?? 'high';
const file_id = _file_id ?? v4();
let filename = `${file_id}-${_filename}`;
const { buffer: inputBuffer, type } = base64ToBuffer(url);
@ -852,7 +855,7 @@ async function saveBase64Image(
}
}
const image = await resizeImageBuffer(inputBuffer, resolution, endpoint);
const image = await resizeImageBuffer(inputBuffer, effectiveResolution, endpoint);
const source = req.app.locals.fileStrategy;
const { saveBuffer } = getStrategyFunctions(source);
const filepath = await saveBuffer({

View file

@ -1,5 +1,6 @@
const { z } = require('zod');
const { tool } = require('@langchain/core/tools');
const { normalizeServerName } = require('librechat-mcp');
const { Constants: AgentConstants, Providers } = require('@librechat/agents');
const {
Constants,
@ -38,6 +39,7 @@ async function createMCPTool({ req, toolKey, provider: _provider }) {
}
const [toolName, serverName] = toolKey.split(Constants.mcp_delimiter);
const normalizedToolKey = `${toolName}${Constants.mcp_delimiter}${normalizeServerName(serverName)}`;
if (!req.user?.id) {
logger.error(
@ -83,7 +85,7 @@ async function createMCPTool({ req, toolKey, provider: _provider }) {
const toolInstance = tool(_call, {
schema,
name: toolKey,
name: normalizedToolKey,
description: description || '',
responseFormat: AgentConstants.CONTENT_AND_ARTIFACT,
});

View file

@ -26,7 +26,17 @@ function loadTurnstileConfig(config, configDefaults) {
options: customTurnstile.options ?? defaults.options,
});
logger.info('Turnstile configuration loaded:\n' + JSON.stringify(loadedTurnstile, null, 2));
const enabled = Boolean(loadedTurnstile.siteKey);
if (enabled) {
logger.info(
'Turnstile is ENABLED with configuration:\n' + JSON.stringify(loadedTurnstile, null, 2),
);
} else {
logger.info('Turnstile is DISABLED (no siteKey provided).');
}
return loadedTurnstile;
}

View file

@ -14,6 +14,7 @@ const staticCache = (staticPath) =>
res.setHeader('Cache-Control', `public, max-age=${maxAge}, s-maxage=${sMaxAge}`);
}
},
index: false,
});
module.exports = staticCache;