mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 17:00:15 +01:00
🔧 refactor: Consolidate Logging, Model Selection & Actions Optimizations, Minor Fixes (#6553)
* 🔧 feat: Enhance logging configuration for production and debug environments * 🔒 feat: Implement encryption and decryption functions for sensitive values in ActionService with URL encoding/decoding * refactor: optimize action service for agent tools * refactor: optimize action processing for Assistants API * fix: handle case where agent is not found in loadAgent function * refactor: improve error handling in API calls by throwing new Error with logAxiosError output * chore: bump @librechat/agents to 2.3.95, fixes "Invalid tool call structure: No preceding AIMessage with tool_call_ids" * refactor: enhance error logging in logAxiosError function to include response status * refactor: remove unused useModelSelection hook from Endpoint * refactor: add support for assistants in useSelectorEffects hook * refactor: replace string easing with imported easings in Landing component * chore: remove duplicate translation * refactor: update model selection logic and improve localization for UI elements * refactor: replace endpoint value checks with helper functions for agents and assistants * refactor: optimize display value logic and utilize useMemo for performance improvements * refactor: clean up imports and optimize display/icon value logic in endpoint components, fix spec selection * refactor: enhance error logging in axios utility to include stack traces for better debugging * refactor: update logging configuration to use DEBUG_LOGGING and streamline log level handling * refactor: adjust className for export menu button to improve layout consistency and remove unused title prop from ShareButton * refactor: update import path for logAxiosError utility to improve module organization and clarity * refactor: implement debounced search value setter in ModelSelectorContext for improved performance
This commit is contained in:
parent
801b602e27
commit
299cabd6ed
26 changed files with 970 additions and 1135 deletions
|
|
@ -4,7 +4,11 @@ require('winston-daily-rotate-file');
|
|||
|
||||
const logDir = path.join(__dirname, '..', 'logs');
|
||||
|
||||
const { NODE_ENV } = process.env;
|
||||
const { NODE_ENV, DEBUG_LOGGING = false } = process.env;
|
||||
|
||||
const useDebugLogging =
|
||||
(typeof DEBUG_LOGGING === 'string' && DEBUG_LOGGING?.toLowerCase() === 'true') ||
|
||||
DEBUG_LOGGING === true;
|
||||
|
||||
const levels = {
|
||||
error: 0,
|
||||
|
|
@ -36,9 +40,10 @@ const fileFormat = winston.format.combine(
|
|||
winston.format.splat(),
|
||||
);
|
||||
|
||||
const logLevel = useDebugLogging ? 'debug' : 'error';
|
||||
const transports = [
|
||||
new winston.transports.DailyRotateFile({
|
||||
level: 'debug',
|
||||
level: logLevel,
|
||||
filename: `${logDir}/meiliSync-%DATE%.log`,
|
||||
datePattern: 'YYYY-MM-DD',
|
||||
zippedArchive: true,
|
||||
|
|
@ -48,14 +53,6 @@ const transports = [
|
|||
}),
|
||||
];
|
||||
|
||||
// if (NODE_ENV !== 'production') {
|
||||
// transports.push(
|
||||
// new winston.transports.Console({
|
||||
// format: winston.format.combine(winston.format.colorize(), winston.format.simple()),
|
||||
// }),
|
||||
// );
|
||||
// }
|
||||
|
||||
const consoleFormat = winston.format.combine(
|
||||
winston.format.colorize({ all: true }),
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ const { redactFormat, redactMessage, debugTraverse, jsonTruncateFormat } = requi
|
|||
|
||||
const logDir = path.join(__dirname, '..', 'logs');
|
||||
|
||||
const { NODE_ENV, DEBUG_LOGGING = true, DEBUG_CONSOLE = false, CONSOLE_JSON = false } = process.env;
|
||||
const { NODE_ENV, DEBUG_LOGGING = true, CONSOLE_JSON = false, DEBUG_CONSOLE = false } = process.env;
|
||||
|
||||
const useConsoleJson =
|
||||
(typeof CONSOLE_JSON === 'string' && CONSOLE_JSON?.toLowerCase() === 'true') ||
|
||||
|
|
@ -15,6 +15,10 @@ const useDebugConsole =
|
|||
(typeof DEBUG_CONSOLE === 'string' && DEBUG_CONSOLE?.toLowerCase() === 'true') ||
|
||||
DEBUG_CONSOLE === true;
|
||||
|
||||
const useDebugLogging =
|
||||
(typeof DEBUG_LOGGING === 'string' && DEBUG_LOGGING?.toLowerCase() === 'true') ||
|
||||
DEBUG_LOGGING === true;
|
||||
|
||||
const levels = {
|
||||
error: 0,
|
||||
warn: 1,
|
||||
|
|
@ -57,28 +61,9 @@ const transports = [
|
|||
maxFiles: '14d',
|
||||
format: fileFormat,
|
||||
}),
|
||||
// new winston.transports.DailyRotateFile({
|
||||
// level: 'info',
|
||||
// filename: `${logDir}/info-%DATE%.log`,
|
||||
// datePattern: 'YYYY-MM-DD',
|
||||
// zippedArchive: true,
|
||||
// maxSize: '20m',
|
||||
// maxFiles: '14d',
|
||||
// }),
|
||||
];
|
||||
|
||||
// if (NODE_ENV !== 'production') {
|
||||
// transports.push(
|
||||
// new winston.transports.Console({
|
||||
// format: winston.format.combine(winston.format.colorize(), winston.format.simple()),
|
||||
// }),
|
||||
// );
|
||||
// }
|
||||
|
||||
if (
|
||||
(typeof DEBUG_LOGGING === 'string' && DEBUG_LOGGING?.toLowerCase() === 'true') ||
|
||||
DEBUG_LOGGING === true
|
||||
) {
|
||||
if (useDebugLogging) {
|
||||
transports.push(
|
||||
new winston.transports.DailyRotateFile({
|
||||
level: 'debug',
|
||||
|
|
@ -107,10 +92,16 @@ const consoleFormat = winston.format.combine(
|
|||
}),
|
||||
);
|
||||
|
||||
// Determine console log level
|
||||
let consoleLogLevel = 'info';
|
||||
if (useDebugConsole) {
|
||||
consoleLogLevel = 'debug';
|
||||
}
|
||||
|
||||
if (useDebugConsole) {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: 'debug',
|
||||
level: consoleLogLevel,
|
||||
format: useConsoleJson
|
||||
? winston.format.combine(fileFormat, jsonTruncateFormat(), winston.format.json())
|
||||
: winston.format.combine(fileFormat, debugTraverse),
|
||||
|
|
@ -119,14 +110,14 @@ if (useDebugConsole) {
|
|||
} else if (useConsoleJson) {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: 'info',
|
||||
level: consoleLogLevel,
|
||||
format: winston.format.combine(fileFormat, jsonTruncateFormat(), winston.format.json()),
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
transports.push(
|
||||
new winston.transports.Console({
|
||||
level: 'info',
|
||||
level: consoleLogLevel,
|
||||
format: consoleFormat,
|
||||
}),
|
||||
);
|
||||
|
|
|
|||
|
|
@ -46,6 +46,10 @@ const loadAgent = async ({ req, agent_id }) => {
|
|||
id: agent_id,
|
||||
});
|
||||
|
||||
if (!agent) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (agent.author.toString() === req.user.id) {
|
||||
return agent;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@
|
|||
"@langchain/google-genai": "^0.1.11",
|
||||
"@langchain/google-vertexai": "^0.2.2",
|
||||
"@langchain/textsplitters": "^0.1.0",
|
||||
"@librechat/agents": "^2.3.94",
|
||||
"@librechat/agents": "^2.3.95",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@waylaidwanderer/fetch-event-source": "^3.0.1",
|
||||
"axios": "^1.8.2",
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ const {
|
|||
actionDomainSeparator,
|
||||
} = require('librechat-data-provider');
|
||||
const { refreshAccessToken } = require('~/server/services/TokenService');
|
||||
const { isActionDomainAllowed } = require('~/server/services/domains');
|
||||
const { logger, getFlowStateManager, sendEvent } = require('~/config');
|
||||
const { encryptV2, decryptV2 } = require('~/server/utils/crypto');
|
||||
const { getActions, deleteActions } = require('~/models/Action');
|
||||
|
|
@ -130,6 +129,7 @@ async function loadActionSets(searchParams) {
|
|||
* @param {string | undefined} [params.name] - The name of the tool.
|
||||
* @param {string | undefined} [params.description] - The description for the tool.
|
||||
* @param {import('zod').ZodTypeAny | undefined} [params.zodSchema] - The Zod schema for tool input validation/definition
|
||||
* @param {{ oauth_client_id?: string; oauth_client_secret?: string; }} params.encrypted - The encrypted values for the action.
|
||||
* @returns { Promise<typeof tool | { _call: (toolInput: Object | string) => unknown}> } An object with `_call` method to execute the tool input.
|
||||
*/
|
||||
async function createActionTool({
|
||||
|
|
@ -140,17 +140,8 @@ async function createActionTool({
|
|||
zodSchema,
|
||||
name,
|
||||
description,
|
||||
encrypted,
|
||||
}) {
|
||||
const isDomainAllowed = await isActionDomainAllowed(action.metadata.domain);
|
||||
if (!isDomainAllowed) {
|
||||
return null;
|
||||
}
|
||||
const encrypted = {
|
||||
oauth_client_id: action.metadata.oauth_client_id,
|
||||
oauth_client_secret: action.metadata.oauth_client_secret,
|
||||
};
|
||||
action.metadata = await decryptMetadata(action.metadata);
|
||||
|
||||
/** @type {(toolInput: Object | string, config: GraphRunnableConfig) => Promise<unknown>} */
|
||||
const _call = async (toolInput, config) => {
|
||||
try {
|
||||
|
|
@ -308,9 +299,8 @@ async function createActionTool({
|
|||
}
|
||||
return response.data;
|
||||
} catch (error) {
|
||||
const logMessage = `API call to ${action.metadata.domain} failed`;
|
||||
logAxiosError({ message: logMessage, error });
|
||||
throw error;
|
||||
const message = `API call to ${action.metadata.domain} failed:`;
|
||||
return logAxiosError({ message, error });
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -327,6 +317,27 @@ async function createActionTool({
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypts a sensitive value.
|
||||
* @param {string} value
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async function encryptSensitiveValue(value) {
|
||||
// Encode API key to handle special characters like ":"
|
||||
const encodedValue = encodeURIComponent(value);
|
||||
return await encryptV2(encodedValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypts a sensitive value.
|
||||
* @param {string} value
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async function decryptSensitiveValue(value) {
|
||||
const decryptedValue = await decryptV2(value);
|
||||
return decodeURIComponent(decryptedValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encrypts sensitive metadata values for an action.
|
||||
*
|
||||
|
|
@ -339,17 +350,19 @@ async function encryptMetadata(metadata) {
|
|||
// ServiceHttp
|
||||
if (metadata.auth && metadata.auth.type === AuthTypeEnum.ServiceHttp) {
|
||||
if (metadata.api_key) {
|
||||
encryptedMetadata.api_key = await encryptV2(metadata.api_key);
|
||||
encryptedMetadata.api_key = await encryptSensitiveValue(metadata.api_key);
|
||||
}
|
||||
}
|
||||
|
||||
// OAuth
|
||||
else if (metadata.auth && metadata.auth.type === AuthTypeEnum.OAuth) {
|
||||
if (metadata.oauth_client_id) {
|
||||
encryptedMetadata.oauth_client_id = await encryptV2(metadata.oauth_client_id);
|
||||
encryptedMetadata.oauth_client_id = await encryptSensitiveValue(metadata.oauth_client_id);
|
||||
}
|
||||
if (metadata.oauth_client_secret) {
|
||||
encryptedMetadata.oauth_client_secret = await encryptV2(metadata.oauth_client_secret);
|
||||
encryptedMetadata.oauth_client_secret = await encryptSensitiveValue(
|
||||
metadata.oauth_client_secret,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -368,17 +381,19 @@ async function decryptMetadata(metadata) {
|
|||
// ServiceHttp
|
||||
if (metadata.auth && metadata.auth.type === AuthTypeEnum.ServiceHttp) {
|
||||
if (metadata.api_key) {
|
||||
decryptedMetadata.api_key = await decryptV2(metadata.api_key);
|
||||
decryptedMetadata.api_key = await decryptSensitiveValue(metadata.api_key);
|
||||
}
|
||||
}
|
||||
|
||||
// OAuth
|
||||
else if (metadata.auth && metadata.auth.type === AuthTypeEnum.OAuth) {
|
||||
if (metadata.oauth_client_id) {
|
||||
decryptedMetadata.oauth_client_id = await decryptV2(metadata.oauth_client_id);
|
||||
decryptedMetadata.oauth_client_id = await decryptSensitiveValue(metadata.oauth_client_id);
|
||||
}
|
||||
if (metadata.oauth_client_secret) {
|
||||
decryptedMetadata.oauth_client_secret = await decryptV2(metadata.oauth_client_secret);
|
||||
decryptedMetadata.oauth_client_secret = await decryptSensitiveValue(
|
||||
metadata.oauth_client_secret,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -32,11 +32,12 @@ async function getCodeOutputDownloadStream(fileIdentifier, apiKey) {
|
|||
const response = await axios(options);
|
||||
return response;
|
||||
} catch (error) {
|
||||
logAxiosError({
|
||||
message: `Error downloading code environment file stream: ${error.message}`,
|
||||
error,
|
||||
});
|
||||
throw new Error(`Error downloading file: ${error.message}`);
|
||||
throw new Error(
|
||||
logAxiosError({
|
||||
message: `Error downloading code environment file stream: ${error.message}`,
|
||||
error,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -89,11 +90,12 @@ async function uploadCodeEnvFile({ req, stream, filename, apiKey, entity_id = ''
|
|||
|
||||
return `${fileIdentifier}?entity_id=${entity_id}`;
|
||||
} catch (error) {
|
||||
logAxiosError({
|
||||
message: `Error uploading code environment file: ${error.message}`,
|
||||
error,
|
||||
});
|
||||
throw new Error(`Error uploading code environment file: ${error.message}`);
|
||||
throw new Error(
|
||||
logAxiosError({
|
||||
message: `Error uploading code environment file: ${error.message}`,
|
||||
error,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ const FormData = require('form-data');
|
|||
const { FileSources, envVarRegex, extractEnvVariable } = require('librechat-data-provider');
|
||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||
const { logger, createAxiosInstance } = require('~/config');
|
||||
const { logAxiosError } = require('~/utils');
|
||||
const { logAxiosError } = require('~/utils/axios');
|
||||
|
||||
const axios = createAxiosInstance();
|
||||
|
||||
|
|
@ -194,8 +194,7 @@ const uploadMistralOCR = async ({ req, file, file_id, entity_id }) => {
|
|||
};
|
||||
} catch (error) {
|
||||
const message = 'Error uploading document to Mistral OCR API';
|
||||
logAxiosError({ error, message });
|
||||
throw new Error(message);
|
||||
throw new Error(logAxiosError({ error, message }));
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -29,9 +29,6 @@ const mockAxios = {
|
|||
|
||||
jest.mock('axios', () => mockAxios);
|
||||
jest.mock('fs');
|
||||
jest.mock('~/utils', () => ({
|
||||
logAxiosError: jest.fn(),
|
||||
}));
|
||||
jest.mock('~/config', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
|
|
@ -494,9 +491,6 @@ describe('MistralOCR Service', () => {
|
|||
}),
|
||||
).rejects.toThrow('Error uploading document to Mistral OCR API');
|
||||
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
|
||||
|
||||
const { logAxiosError } = require('~/utils');
|
||||
expect(logAxiosError).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle single page documents without page numbering', async () => {
|
||||
|
|
|
|||
|
|
@ -55,8 +55,7 @@ async function retrieveRun({ thread_id, run_id, timeout, openai }) {
|
|||
return response.data;
|
||||
} catch (error) {
|
||||
const message = '[retrieveRun] Failed to retrieve run data:';
|
||||
logAxiosError({ message, error });
|
||||
throw error;
|
||||
throw new Error(logAxiosError({ message, error }));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -93,11 +93,12 @@ const refreshAccessToken = async ({
|
|||
return response.data;
|
||||
} catch (error) {
|
||||
const message = 'Error refreshing OAuth tokens';
|
||||
logAxiosError({
|
||||
message,
|
||||
error,
|
||||
});
|
||||
throw new Error(message);
|
||||
throw new Error(
|
||||
logAxiosError({
|
||||
message,
|
||||
error,
|
||||
}),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -156,11 +157,12 @@ const getAccessToken = async ({
|
|||
return response.data;
|
||||
} catch (error) {
|
||||
const message = 'Error exchanging OAuth code';
|
||||
logAxiosError({
|
||||
message,
|
||||
error,
|
||||
});
|
||||
throw new Error(message);
|
||||
throw new Error(
|
||||
logAxiosError({
|
||||
message,
|
||||
error,
|
||||
}),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -15,9 +15,15 @@ const {
|
|||
AgentCapabilities,
|
||||
validateAndParseOpenAPISpec,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
loadActionSets,
|
||||
createActionTool,
|
||||
decryptMetadata,
|
||||
domainParser,
|
||||
} = require('./ActionService');
|
||||
const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process');
|
||||
const { createYouTubeTools, manifestToolMap, toolkits } = require('~/app/clients/tools');
|
||||
const { loadActionSets, createActionTool, domainParser } = require('./ActionService');
|
||||
const { isActionDomainAllowed } = require('~/server/services/domains');
|
||||
const { getEndpointsConfig } = require('~/server/services/Config');
|
||||
const { recordUsage } = require('~/server/services/Threads');
|
||||
const { loadTools } = require('~/app/clients/tools/util');
|
||||
|
|
@ -315,58 +321,95 @@ async function processRequiredActions(client, requiredActions) {
|
|||
if (!tool) {
|
||||
// throw new Error(`Tool ${currentAction.tool} not found.`);
|
||||
|
||||
// Load all action sets once if not already loaded
|
||||
if (!actionSets.length) {
|
||||
actionSets =
|
||||
(await loadActionSets({
|
||||
assistant_id: client.req.body.assistant_id,
|
||||
})) ?? [];
|
||||
|
||||
// Process all action sets once
|
||||
// Map domains to their processed action sets
|
||||
const processedDomains = new Map();
|
||||
const domainMap = new Map();
|
||||
|
||||
for (const action of actionSets) {
|
||||
const domain = await domainParser(client.req, action.metadata.domain, true);
|
||||
domainMap.set(domain, action);
|
||||
|
||||
// Check if domain is allowed
|
||||
const isDomainAllowed = await isActionDomainAllowed(action.metadata.domain);
|
||||
if (!isDomainAllowed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Validate and parse OpenAPI spec
|
||||
const validationResult = validateAndParseOpenAPISpec(action.metadata.raw_spec);
|
||||
if (!validationResult.spec) {
|
||||
throw new Error(
|
||||
`Invalid spec: user: ${client.req.user.id} | thread_id: ${requiredActions[0].thread_id} | run_id: ${requiredActions[0].run_id}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Process the OpenAPI spec
|
||||
const { requestBuilders } = openapiToFunction(validationResult.spec);
|
||||
|
||||
// Store encrypted values for OAuth flow
|
||||
const encrypted = {
|
||||
oauth_client_id: action.metadata.oauth_client_id,
|
||||
oauth_client_secret: action.metadata.oauth_client_secret,
|
||||
};
|
||||
|
||||
// Decrypt metadata
|
||||
const decryptedAction = { ...action };
|
||||
decryptedAction.metadata = await decryptMetadata(action.metadata);
|
||||
|
||||
processedDomains.set(domain, {
|
||||
action: decryptedAction,
|
||||
requestBuilders,
|
||||
encrypted,
|
||||
});
|
||||
|
||||
// Store builders for reuse
|
||||
ActionBuildersMap[action.metadata.domain] = requestBuilders;
|
||||
}
|
||||
|
||||
// Update actionSets reference to use the domain map
|
||||
actionSets = { domainMap, processedDomains };
|
||||
}
|
||||
|
||||
let actionSet = null;
|
||||
// Find the matching domain for this tool
|
||||
let currentDomain = '';
|
||||
for (let action of actionSets) {
|
||||
const domain = await domainParser(client.req, action.metadata.domain, true);
|
||||
for (const domain of actionSets.domainMap.keys()) {
|
||||
if (currentAction.tool.includes(domain)) {
|
||||
currentDomain = domain;
|
||||
actionSet = action;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!actionSet) {
|
||||
if (!currentDomain || !actionSets.processedDomains.has(currentDomain)) {
|
||||
// TODO: try `function` if no action set is found
|
||||
// throw new Error(`Tool ${currentAction.tool} not found.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let builders = ActionBuildersMap[actionSet.metadata.domain];
|
||||
|
||||
if (!builders) {
|
||||
const validationResult = validateAndParseOpenAPISpec(actionSet.metadata.raw_spec);
|
||||
if (!validationResult.spec) {
|
||||
throw new Error(
|
||||
`Invalid spec: user: ${client.req.user.id} | thread_id: ${requiredActions[0].thread_id} | run_id: ${requiredActions[0].run_id}`,
|
||||
);
|
||||
}
|
||||
const { requestBuilders } = openapiToFunction(validationResult.spec);
|
||||
ActionToolMap[actionSet.metadata.domain] = requestBuilders;
|
||||
builders = requestBuilders;
|
||||
}
|
||||
|
||||
const { action, requestBuilders, encrypted } = actionSets.processedDomains.get(currentDomain);
|
||||
const functionName = currentAction.tool.replace(`${actionDelimiter}${currentDomain}`, '');
|
||||
|
||||
const requestBuilder = builders[functionName];
|
||||
const requestBuilder = requestBuilders[functionName];
|
||||
|
||||
if (!requestBuilder) {
|
||||
// throw new Error(`Tool ${currentAction.tool} not found.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// We've already decrypted the metadata, so we can pass it directly
|
||||
tool = await createActionTool({
|
||||
req: client.req,
|
||||
res: client.res,
|
||||
action: actionSet,
|
||||
action,
|
||||
requestBuilder,
|
||||
// Note: intentionally not passing zodSchema, name, and description for assistants API
|
||||
encrypted, // Pass the encrypted values for OAuth flow
|
||||
});
|
||||
if (!tool) {
|
||||
logger.warn(
|
||||
|
|
@ -511,7 +554,62 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
|||
};
|
||||
}
|
||||
|
||||
let actionSets = [];
|
||||
const actionSets = (await loadActionSets({ agent_id: agent.id })) ?? [];
|
||||
if (actionSets.length === 0) {
|
||||
if (_agentTools.length > 0 && agentTools.length === 0) {
|
||||
logger.warn(`No tools found for the specified tool calls: ${_agentTools.join(', ')}`);
|
||||
}
|
||||
return {
|
||||
tools: agentTools,
|
||||
toolContextMap,
|
||||
};
|
||||
}
|
||||
|
||||
// Process each action set once (validate spec, decrypt metadata)
|
||||
const processedActionSets = new Map();
|
||||
const domainMap = new Map();
|
||||
|
||||
for (const action of actionSets) {
|
||||
const domain = await domainParser(req, action.metadata.domain, true);
|
||||
domainMap.set(domain, action);
|
||||
|
||||
// Check if domain is allowed (do this once per action set)
|
||||
const isDomainAllowed = await isActionDomainAllowed(action.metadata.domain);
|
||||
if (!isDomainAllowed) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Validate and parse OpenAPI spec once per action set
|
||||
const validationResult = validateAndParseOpenAPISpec(action.metadata.raw_spec);
|
||||
if (!validationResult.spec) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const encrypted = {
|
||||
oauth_client_id: action.metadata.oauth_client_id,
|
||||
oauth_client_secret: action.metadata.oauth_client_secret,
|
||||
};
|
||||
|
||||
// Decrypt metadata once per action set
|
||||
const decryptedAction = { ...action };
|
||||
decryptedAction.metadata = await decryptMetadata(action.metadata);
|
||||
|
||||
// Process the OpenAPI spec once per action set
|
||||
const { requestBuilders, functionSignatures, zodSchemas } = openapiToFunction(
|
||||
validationResult.spec,
|
||||
true,
|
||||
);
|
||||
|
||||
processedActionSets.set(domain, {
|
||||
action: decryptedAction,
|
||||
requestBuilders,
|
||||
functionSignatures,
|
||||
zodSchemas,
|
||||
encrypted,
|
||||
});
|
||||
}
|
||||
|
||||
// Now map tools to the processed action sets
|
||||
const ActionToolMap = {};
|
||||
|
||||
for (const toolName of _agentTools) {
|
||||
|
|
@ -519,55 +617,47 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
|
|||
continue;
|
||||
}
|
||||
|
||||
if (!actionSets.length) {
|
||||
actionSets = (await loadActionSets({ agent_id: agent.id })) ?? [];
|
||||
}
|
||||
|
||||
let actionSet = null;
|
||||
// Find the matching domain for this tool
|
||||
let currentDomain = '';
|
||||
for (let action of actionSets) {
|
||||
const domain = await domainParser(req, action.metadata.domain, true);
|
||||
for (const domain of domainMap.keys()) {
|
||||
if (toolName.includes(domain)) {
|
||||
currentDomain = domain;
|
||||
actionSet = action;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!actionSet) {
|
||||
if (!currentDomain || !processedActionSets.has(currentDomain)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const validationResult = validateAndParseOpenAPISpec(actionSet.metadata.raw_spec);
|
||||
if (validationResult.spec) {
|
||||
const { requestBuilders, functionSignatures, zodSchemas } = openapiToFunction(
|
||||
validationResult.spec,
|
||||
true,
|
||||
);
|
||||
const functionName = toolName.replace(`${actionDelimiter}${currentDomain}`, '');
|
||||
const functionSig = functionSignatures.find((sig) => sig.name === functionName);
|
||||
const requestBuilder = requestBuilders[functionName];
|
||||
const zodSchema = zodSchemas[functionName];
|
||||
const { action, encrypted, zodSchemas, requestBuilders, functionSignatures } =
|
||||
processedActionSets.get(currentDomain);
|
||||
const functionName = toolName.replace(`${actionDelimiter}${currentDomain}`, '');
|
||||
const functionSig = functionSignatures.find((sig) => sig.name === functionName);
|
||||
const requestBuilder = requestBuilders[functionName];
|
||||
const zodSchema = zodSchemas[functionName];
|
||||
|
||||
if (requestBuilder) {
|
||||
const tool = await createActionTool({
|
||||
req,
|
||||
res,
|
||||
action: actionSet,
|
||||
requestBuilder,
|
||||
zodSchema,
|
||||
name: toolName,
|
||||
description: functionSig.description,
|
||||
});
|
||||
if (!tool) {
|
||||
logger.warn(
|
||||
`Invalid action: user: ${req.user.id} | agent_id: ${agent.id} | toolName: ${toolName}`,
|
||||
);
|
||||
throw new Error(`{"type":"${ErrorTypes.INVALID_ACTION}"}`);
|
||||
}
|
||||
agentTools.push(tool);
|
||||
ActionToolMap[toolName] = tool;
|
||||
if (requestBuilder) {
|
||||
const tool = await createActionTool({
|
||||
req,
|
||||
res,
|
||||
action,
|
||||
requestBuilder,
|
||||
zodSchema,
|
||||
encrypted,
|
||||
name: toolName,
|
||||
description: functionSig.description,
|
||||
});
|
||||
|
||||
if (!tool) {
|
||||
logger.warn(
|
||||
`Invalid action: user: ${req.user.id} | agent_id: ${agent.id} | toolName: ${toolName}`,
|
||||
);
|
||||
throw new Error(`{"type":"${ErrorTypes.INVALID_ACTION}"}`);
|
||||
}
|
||||
|
||||
agentTools.push(tool);
|
||||
ActionToolMap[toolName] = tool;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -6,32 +6,41 @@ const { logger } = require('~/config');
|
|||
* @param {Object} options - The options object.
|
||||
* @param {string} options.message - The custom message to be logged.
|
||||
* @param {import('axios').AxiosError} options.error - The Axios error object.
|
||||
* @returns {string} The log message.
|
||||
*/
|
||||
const logAxiosError = ({ message, error }) => {
|
||||
let logMessage = message;
|
||||
try {
|
||||
const stack = error.stack || 'No stack trace available';
|
||||
|
||||
if (error.response?.status) {
|
||||
const { status, headers, data } = error.response;
|
||||
logger.error(`${message} The server responded with status ${status}: ${error.message}`, {
|
||||
logMessage = `${message} The server responded with status ${status}: ${error.message}`;
|
||||
logger.error(logMessage, {
|
||||
status,
|
||||
headers,
|
||||
data,
|
||||
stack,
|
||||
});
|
||||
} else if (error.request) {
|
||||
const { method, url } = error.config || {};
|
||||
logger.error(
|
||||
`${message} No response received for ${method ? method.toUpperCase() : ''} ${url || ''}: ${error.message}`,
|
||||
{ requestInfo: { method, url } },
|
||||
);
|
||||
logMessage = `${message} No response received for ${method ? method.toUpperCase() : ''} ${url || ''}: ${error.message}`;
|
||||
logger.error(logMessage, {
|
||||
requestInfo: { method, url },
|
||||
stack,
|
||||
});
|
||||
} else if (error?.message?.includes('Cannot read properties of undefined (reading \'status\')')) {
|
||||
logger.error(
|
||||
`${message} It appears the request timed out or was unsuccessful: ${error.message}`,
|
||||
);
|
||||
logMessage = `${message} It appears the request timed out or was unsuccessful: ${error.message}`;
|
||||
logger.error(logMessage, { stack });
|
||||
} else {
|
||||
logger.error(`${message} An error occurred while setting up the request: ${error.message}`);
|
||||
logMessage = `${message} An error occurred while setting up the request: ${error.message}`;
|
||||
logger.error(logMessage, { stack });
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`Error in logAxiosError: ${err.message}`);
|
||||
logMessage = `Error in logAxiosError: ${err.message}`;
|
||||
logger.error(logMessage, { stack: err.stack || 'No stack trace available' });
|
||||
}
|
||||
return logMessage;
|
||||
};
|
||||
|
||||
module.exports = { logAxiosError };
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue