LibreChat/api/server/middleware/buildEndpointOption.js
Danny Avila 683702d555
🤖 refactor: Remove Default Model Params for All Endpoints (#3682)
* refactor: use parseCompactConvo in buildOptions, and generate no default values for the API to avoid weird model behavior with defaults

* refactor: OTHER - always show cursor when markdown component is empty (preferable to not)

* refactor(OpenAISettings): use config object for setting defaults app-wide

* refactor: Use removeNullishValues in buildOptions for ALL  endpoints

* fix: add missing conversationId to title methods for transactions; refactor(GoogleClient): model options, set no default, add todo note for recording token usage

* fix: at minimum set a model default, as is required by API (edge case)
2024-08-18 06:00:03 -04:00

78 lines
2.8 KiB
JavaScript

const { parseCompactConvo, EModelEndpoint } = require('librechat-data-provider');
const { getModelsConfig } = require('~/server/controllers/ModelController');
const azureAssistants = require('~/server/services/Endpoints/azureAssistants');
const assistants = require('~/server/services/Endpoints/assistants');
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
const { processFiles } = require('~/server/services/Files/process');
const anthropic = require('~/server/services/Endpoints/anthropic');
const openAI = require('~/server/services/Endpoints/openAI');
const custom = require('~/server/services/Endpoints/custom');
const google = require('~/server/services/Endpoints/google');
const enforceModelSpec = require('./enforceModelSpec');
const { handleError } = require('~/server/utils');
const buildFunction = {
[EModelEndpoint.openAI]: openAI.buildOptions,
[EModelEndpoint.google]: google.buildOptions,
[EModelEndpoint.custom]: custom.buildOptions,
[EModelEndpoint.azureOpenAI]: openAI.buildOptions,
[EModelEndpoint.anthropic]: anthropic.buildOptions,
[EModelEndpoint.gptPlugins]: gptPlugins.buildOptions,
[EModelEndpoint.assistants]: assistants.buildOptions,
[EModelEndpoint.azureAssistants]: azureAssistants.buildOptions,
};
async function buildEndpointOption(req, res, next) {
const { endpoint, endpointType } = req.body;
const parsedBody = parseCompactConvo({ endpoint, endpointType, conversation: req.body });
if (req.app.locals.modelSpecs?.list && req.app.locals.modelSpecs?.enforce) {
/** @type {{ list: TModelSpec[] }}*/
const { list } = req.app.locals.modelSpecs;
const { spec } = parsedBody;
if (!spec) {
return handleError(res, { text: 'No model spec selected' });
}
const currentModelSpec = list.find((s) => s.name === spec);
if (!currentModelSpec) {
return handleError(res, { text: 'Invalid model spec' });
}
if (endpoint !== currentModelSpec.preset.endpoint) {
return handleError(res, { text: 'Model spec mismatch' });
}
if (
currentModelSpec.preset.endpoint !== EModelEndpoint.gptPlugins &&
currentModelSpec.preset.tools
) {
return handleError(res, {
text: `Only the "${EModelEndpoint.gptPlugins}" endpoint can have tools defined in the preset`,
});
}
const isValidModelSpec = enforceModelSpec(currentModelSpec, parsedBody);
if (!isValidModelSpec) {
return handleError(res, { text: 'Model spec mismatch' });
}
}
req.body.endpointOption = buildFunction[endpointType ?? endpoint](
endpoint,
parsedBody,
endpointType,
);
const modelsConfig = await getModelsConfig(req);
req.body.endpointOption.modelsConfig = modelsConfig;
if (req.body.files) {
// hold the promise
req.body.endpointOption.attachments = processFiles(req.body.files);
}
next();
}
module.exports = buildEndpointOption;