2025-08-11 14:26:28 -04:00
|
|
|
const { logger } = require('@librechat/data-schemas');
|
2023-12-16 20:45:27 -05:00
|
|
|
const { EModelEndpoint } = require('librechat-data-provider');
|
2023-12-06 19:36:57 -05:00
|
|
|
const {
|
2025-08-11 14:26:28 -04:00
|
|
|
getAnthropicModels,
|
|
|
|
|
getBedrockModels,
|
2023-12-06 19:36:57 -05:00
|
|
|
getOpenAIModels,
|
2023-12-16 20:45:27 -05:00
|
|
|
getGoogleModels,
|
2023-12-06 19:36:57 -05:00
|
|
|
} = require('~/server/services/ModelService');
|
|
|
|
|
|
2024-02-08 10:06:58 -05:00
|
|
|
/**
|
|
|
|
|
* Loads the default models for the application.
|
|
|
|
|
* @async
|
|
|
|
|
* @function
|
|
|
|
|
* @param {Express.Request} req - The Express request object.
|
|
|
|
|
*/
|
|
|
|
|
async function loadDefaultModels(req) {
|
2025-03-05 12:04:26 -05:00
|
|
|
try {
|
2025-08-11 14:26:28 -04:00
|
|
|
const [openAI, anthropic, azureOpenAI, assistants, azureAssistants, google, bedrock] =
|
|
|
|
|
await Promise.all([
|
|
|
|
|
getOpenAIModels({ user: req.user.id }).catch((error) => {
|
|
|
|
|
logger.error('Error fetching OpenAI models:', error);
|
|
|
|
|
return [];
|
|
|
|
|
}),
|
|
|
|
|
getAnthropicModels({ user: req.user.id }).catch((error) => {
|
|
|
|
|
logger.error('Error fetching Anthropic models:', error);
|
|
|
|
|
return [];
|
|
|
|
|
}),
|
|
|
|
|
getOpenAIModels({ user: req.user.id, azure: true }).catch((error) => {
|
|
|
|
|
logger.error('Error fetching Azure OpenAI models:', error);
|
|
|
|
|
return [];
|
|
|
|
|
}),
|
|
|
|
|
getOpenAIModels({ assistants: true }).catch((error) => {
|
|
|
|
|
logger.error('Error fetching OpenAI Assistants API models:', error);
|
|
|
|
|
return [];
|
|
|
|
|
}),
|
|
|
|
|
getOpenAIModels({ azureAssistants: true }).catch((error) => {
|
|
|
|
|
logger.error('Error fetching Azure OpenAI Assistants API models:', error);
|
|
|
|
|
return [];
|
|
|
|
|
}),
|
|
|
|
|
Promise.resolve(getGoogleModels()).catch((error) => {
|
|
|
|
|
logger.error('Error getting Google models:', error);
|
|
|
|
|
return [];
|
|
|
|
|
}),
|
|
|
|
|
Promise.resolve(getBedrockModels()).catch((error) => {
|
|
|
|
|
logger.error('Error getting Bedrock models:', error);
|
2025-03-05 12:04:26 -05:00
|
|
|
return [];
|
2025-08-11 14:26:28 -04:00
|
|
|
}),
|
|
|
|
|
]);
|
2023-12-06 19:36:57 -05:00
|
|
|
|
2025-03-05 12:04:26 -05:00
|
|
|
return {
|
|
|
|
|
[EModelEndpoint.openAI]: openAI,
|
|
|
|
|
[EModelEndpoint.google]: google,
|
|
|
|
|
[EModelEndpoint.anthropic]: anthropic,
|
|
|
|
|
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
|
|
|
|
[EModelEndpoint.assistants]: assistants,
|
|
|
|
|
[EModelEndpoint.azureAssistants]: azureAssistants,
|
|
|
|
|
[EModelEndpoint.bedrock]: bedrock,
|
|
|
|
|
};
|
|
|
|
|
} catch (error) {
|
|
|
|
|
logger.error('Error fetching default models:', error);
|
|
|
|
|
throw new Error(`Failed to load default models: ${error.message}`);
|
|
|
|
|
}
|
2023-12-06 19:36:57 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
module.exports = loadDefaultModels;
|