mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 08:12:00 +02:00

* 🔧 refactor: move `processMCPEnv` from `librechat-data-provider` and move to `@librechat/api` * 🔧 refactor: Update resolveHeaders import paths * 🔧 refactor: Enhance resolveHeaders to support user and custom variables - Updated resolveHeaders function to accept user and custom user variables for placeholder replacement. - Modified header resolution in multiple client and controller files to utilize the enhanced resolveHeaders functionality. - Added comprehensive tests for resolveHeaders to ensure correct processing of user and custom variables. * 🔧 fix: Update user ID placeholder processing in env.ts * 🔧 fix: Remove arguments passing this.user rather than req.user - Updated multiple client and controller files to call resolveHeaders without the user parameter * 🔧 refactor: Enhance processUserPlaceholders to be more readable / less nested * 🔧 refactor: Update processUserPlaceholders to pass all tests in mpc.spec.ts and env.spec.ts * chore: remove legacy ChatGPTClient * chore: remove LLM initialization code * chore: initial deprecation removal of `gptPlugins` * chore: remove cohere-ai dependency from package.json and package-lock.json * chore: update brace-expansion to version 2.0.2 and add license information * chore: remove PluginsClient test file * chore: remove legacy * ci: remove deprecated sendMessage/getCompletion/chatCompletion tests --------- Co-authored-by: Dustin Healy <54083382+dustinhealy@users.noreply.github.com>
101 lines
3.6 KiB
JavaScript
101 lines
3.6 KiB
JavaScript
const { logger } = require('@librechat/data-schemas');
|
|
const {
|
|
EndpointURLs,
|
|
EModelEndpoint,
|
|
isAgentsEndpoint,
|
|
parseCompactConvo,
|
|
} = require('librechat-data-provider');
|
|
const azureAssistants = require('~/server/services/Endpoints/azureAssistants');
|
|
const assistants = require('~/server/services/Endpoints/assistants');
|
|
const { processFiles } = require('~/server/services/Files/process');
|
|
const anthropic = require('~/server/services/Endpoints/anthropic');
|
|
const bedrock = require('~/server/services/Endpoints/bedrock');
|
|
const openAI = require('~/server/services/Endpoints/openAI');
|
|
const agents = require('~/server/services/Endpoints/agents');
|
|
const custom = require('~/server/services/Endpoints/custom');
|
|
const google = require('~/server/services/Endpoints/google');
|
|
const { handleError } = require('~/server/utils');
|
|
|
|
const buildFunction = {
|
|
[EModelEndpoint.openAI]: openAI.buildOptions,
|
|
[EModelEndpoint.google]: google.buildOptions,
|
|
[EModelEndpoint.custom]: custom.buildOptions,
|
|
[EModelEndpoint.agents]: agents.buildOptions,
|
|
[EModelEndpoint.bedrock]: bedrock.buildOptions,
|
|
[EModelEndpoint.azureOpenAI]: openAI.buildOptions,
|
|
[EModelEndpoint.anthropic]: anthropic.buildOptions,
|
|
[EModelEndpoint.assistants]: assistants.buildOptions,
|
|
[EModelEndpoint.azureAssistants]: azureAssistants.buildOptions,
|
|
};
|
|
|
|
async function buildEndpointOption(req, res, next) {
|
|
const { endpoint, endpointType } = req.body;
|
|
let parsedBody;
|
|
try {
|
|
parsedBody = parseCompactConvo({ endpoint, endpointType, conversation: req.body });
|
|
} catch (error) {
|
|
logger.warn(
|
|
`Error parsing conversation for endpoint ${endpoint}${error?.message ? `: ${error.message}` : ''}`,
|
|
);
|
|
return handleError(res, { text: 'Error parsing conversation' });
|
|
}
|
|
|
|
if (req.app.locals.modelSpecs?.list && req.app.locals.modelSpecs?.enforce) {
|
|
/** @type {{ list: TModelSpec[] }}*/
|
|
const { list } = req.app.locals.modelSpecs;
|
|
const { spec } = parsedBody;
|
|
|
|
if (!spec) {
|
|
return handleError(res, { text: 'No model spec selected' });
|
|
}
|
|
|
|
const currentModelSpec = list.find((s) => s.name === spec);
|
|
if (!currentModelSpec) {
|
|
return handleError(res, { text: 'Invalid model spec' });
|
|
}
|
|
|
|
if (endpoint !== currentModelSpec.preset.endpoint) {
|
|
return handleError(res, { text: 'Model spec mismatch' });
|
|
}
|
|
|
|
try {
|
|
currentModelSpec.preset.spec = spec;
|
|
if (currentModelSpec.iconURL != null && currentModelSpec.iconURL !== '') {
|
|
currentModelSpec.preset.iconURL = currentModelSpec.iconURL;
|
|
}
|
|
parsedBody = parseCompactConvo({
|
|
endpoint,
|
|
endpointType,
|
|
conversation: currentModelSpec.preset,
|
|
});
|
|
} catch (error) {
|
|
logger.error(`Error parsing model spec for endpoint ${endpoint}`, error);
|
|
return handleError(res, { text: 'Error parsing model spec' });
|
|
}
|
|
}
|
|
|
|
try {
|
|
const isAgents =
|
|
isAgentsEndpoint(endpoint) || req.baseUrl.startsWith(EndpointURLs[EModelEndpoint.agents]);
|
|
const builder = isAgents
|
|
? (...args) => buildFunction[EModelEndpoint.agents](req, ...args)
|
|
: buildFunction[endpointType ?? endpoint];
|
|
|
|
// TODO: use object params
|
|
req.body.endpointOption = await builder(endpoint, parsedBody, endpointType);
|
|
|
|
if (req.body.files && !isAgents) {
|
|
req.body.endpointOption.attachments = processFiles(req.body.files);
|
|
}
|
|
|
|
next();
|
|
} catch (error) {
|
|
logger.error(
|
|
`Error building endpoint option for endpoint ${endpoint} with type ${endpointType}`,
|
|
error,
|
|
);
|
|
return handleError(res, { text: 'Error building endpoint option' });
|
|
}
|
|
}
|
|
|
|
module.exports = buildEndpointOption;
|