LibreChat/api/server/routes/assistants/assistants.js
Danny Avila 5cd5c3bef8
🅰️ feat: Azure OpenAI Assistants API Support (#1992)
* chore: rename dir from `assistant` to plural

* feat: `assistants` field for azure config, spread options in AppService

* refactor: rename constructAzureURL param for azure as `azureOptions`

* chore: bump openai and bun

* chore(loadDefaultModels): change naming of assistant -> assistants

* feat: load azure settings with currect baseURL for assistants' initializeClient

* refactor: add `assistants` flags to groups and model configs, add mapGroupToAzureConfig

* feat(loadConfigEndpoints): initialize assistants endpoint if azure flag `assistants` is enabled

* feat(AppService): determine assistant models on startup, throw Error if none

* refactor(useDeleteAssistantMutation): send model along with assistant id for delete mutations

* feat: support listing and deleting assistants with azure

* feat: add model query to assistant avatar upload

* feat: add azure support for retrieveRun method

* refactor: update OpenAIClient initialization

* chore: update README

* fix(ci): tests passing

* refactor(uploadOpenAIFile): improve logging and use more efficient REST API method

* refactor(useFileHandling): add model to metadata to target Azure region compatible with current model

* chore(files): add azure naming pattern for valid file id recognition

* fix(assistants): initialize openai with first available assistant model if none provided

* refactor(uploadOpenAIFile): add content type for azure, initialize formdata before azure options

* refactor(sleep): move sleep function out of Runs and into `~/server/utils`

* fix(azureOpenAI/assistants): make sure to only overwrite models with assistant models if `assistants` flag is enabled

* refactor(uploadOpenAIFile): revert to old method

* chore(uploadOpenAIFile): use enum for file purpose

* docs: azureOpenAI update guide with more info, examples

* feat: enable/disable assistant capabilities and specify retrieval models

* refactor: optional chain conditional statement in loadConfigModels.js

* docs: add assistants examples

* chore: update librechat.example.yaml

* docs(azure): update note of file upload behavior in Azure OpenAI Assistants

* chore: update docs and add descriptive message about assistant errors

* fix: prevent message submission with invalid assistant or if files loading

* style: update Landing icon & text when assistant is not selected

* chore: bump librechat-data-provider to 0.4.8

* fix(assistants/azure): assign req.body.model for proper azure init to abort runs
2024-03-14 17:21:42 -04:00

264 lines
8.4 KiB
JavaScript

const multer = require('multer');
const express = require('express');
const { FileContext, EModelEndpoint } = require('librechat-data-provider');
const {
initializeClient,
listAssistantsForAzure,
listAssistants,
} = require('~/server/services/Endpoints/assistants');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { uploadImageBuffer } = require('~/server/services/Files/process');
const { updateAssistant, getAssistants } = require('~/models/Assistant');
const { deleteFileByFilter } = require('~/models/File');
const { logger } = require('~/config');
const actions = require('./actions');
const tools = require('./tools');
const upload = multer();
const router = express.Router();
/**
* Assistant actions route.
* @route GET|POST /assistants/actions
*/
router.use('/actions', actions);
/**
* Create an assistant.
* @route GET /assistants/tools
* @returns {TPlugin[]} 200 - application/json
*/
router.use('/tools', tools);
/**
* Create an assistant.
* @route POST /assistants
* @param {AssistantCreateParams} req.body - The assistant creation parameters.
* @returns {Assistant} 201 - success response - application/json
*/
router.post('/', async (req, res) => {
try {
/** @type {{ openai: OpenAI }} */
const { openai } = await initializeClient({ req, res });
const { tools = [], ...assistantData } = req.body;
assistantData.tools = tools
.map((tool) => {
if (typeof tool !== 'string') {
return tool;
}
return req.app.locals.availableTools[tool];
})
.filter((tool) => tool);
if (openai.locals?.azureOptions) {
assistantData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName;
}
const assistant = await openai.beta.assistants.create(assistantData);
logger.debug('/assistants/', assistant);
res.status(201).json(assistant);
} catch (error) {
logger.error('[/assistants] Error creating assistant', error);
res.status(500).json({ error: error.message });
}
});
/**
* Retrieves an assistant.
* @route GET /assistants/:id
* @param {string} req.params.id - Assistant identifier.
* @returns {Assistant} 200 - success response - application/json
*/
router.get('/:id', async (req, res) => {
try {
/** @type {{ openai: OpenAI }} */
const { openai } = await initializeClient({ req, res });
const assistant_id = req.params.id;
const assistant = await openai.beta.assistants.retrieve(assistant_id);
res.json(assistant);
} catch (error) {
logger.error('[/assistants/:id] Error retrieving assistant', error);
res.status(500).json({ error: error.message });
}
});
/**
* Modifies an assistant.
* @route PATCH /assistants/:id
* @param {string} req.params.id - Assistant identifier.
* @param {AssistantUpdateParams} req.body - The assistant update parameters.
* @returns {Assistant} 200 - success response - application/json
*/
router.patch('/:id', async (req, res) => {
try {
/** @type {{ openai: OpenAI }} */
const { openai } = await initializeClient({ req, res });
const assistant_id = req.params.id;
const updateData = req.body;
updateData.tools = (updateData.tools ?? [])
.map((tool) => {
if (typeof tool !== 'string') {
return tool;
}
return req.app.locals.availableTools[tool];
})
.filter((tool) => tool);
if (openai.locals?.azureOptions && updateData.model) {
updateData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName;
}
const updatedAssistant = await openai.beta.assistants.update(assistant_id, updateData);
res.json(updatedAssistant);
} catch (error) {
logger.error('[/assistants/:id] Error updating assistant', error);
res.status(500).json({ error: error.message });
}
});
/**
* Deletes an assistant.
* @route DELETE /assistants/:id
* @param {string} req.params.id - Assistant identifier.
* @returns {Assistant} 200 - success response - application/json
*/
router.delete('/:id', async (req, res) => {
try {
/** @type {{ openai: OpenAI }} */
const { openai } = await initializeClient({ req, res });
const assistant_id = req.params.id;
const deletionStatus = await openai.beta.assistants.del(assistant_id);
res.json(deletionStatus);
} catch (error) {
logger.error('[/assistants/:id] Error deleting assistant', error);
res.status(500).json({ error: 'Error deleting assistant' });
}
});
/**
* Returns a list of assistants.
* @route GET /assistants
* @param {AssistantListParams} req.query - The assistant list parameters for pagination and sorting.
* @returns {AssistantListResponse} 200 - success response - application/json
*/
router.get('/', async (req, res) => {
try {
const { limit, order, after, before } = req.query;
const query = { limit, order, after, before };
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
/** @type {AssistantListResponse} */
let body;
if (azureConfig?.assistants) {
body = await listAssistantsForAzure({ req, res, azureConfig, query });
} else {
({ body } = await listAssistants({ req, res, query }));
}
if (req.app.locals?.[EModelEndpoint.assistants]) {
/** @type {Partial<TAssistantEndpoint>} */
const assistantsConfig = req.app.locals[EModelEndpoint.assistants];
const { supportedIds, excludedIds } = assistantsConfig;
if (supportedIds?.length) {
body.data = body.data.filter((assistant) => supportedIds.includes(assistant.id));
} else if (excludedIds?.length) {
body.data = body.data.filter((assistant) => !excludedIds.includes(assistant.id));
}
}
res.json(body);
} catch (error) {
logger.error('[/assistants] Error listing assistants', error);
res.status(500).json({ message: 'Error listing assistants' });
}
});
/**
* Returns a list of the user's assistant documents (metadata saved to database).
* @route GET /assistants/documents
* @returns {AssistantDocument[]} 200 - success response - application/json
*/
router.get('/documents', async (req, res) => {
try {
res.json(await getAssistants({ user: req.user.id }));
} catch (error) {
logger.error('[/assistants/documents] Error listing assistant documents', error);
res.status(500).json({ error: error.message });
}
});
/**
* Uploads and updates an avatar for a specific assistant.
* @route POST /avatar/:assistant_id
* @param {string} req.params.assistant_id - The ID of the assistant.
* @param {Express.Multer.File} req.file - The avatar image file.
* @param {string} [req.body.metadata] - Optional metadata for the assistant's avatar.
* @returns {Object} 200 - success response - application/json
*/
router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) => {
try {
const { assistant_id } = req.params;
if (!assistant_id) {
return res.status(400).json({ message: 'Assistant ID is required' });
}
let { metadata: _metadata = '{}' } = req.body;
/** @type {{ openai: OpenAI }} */
const { openai } = await initializeClient({ req, res });
const image = await uploadImageBuffer({ req, context: FileContext.avatar });
try {
_metadata = JSON.parse(_metadata);
} catch (error) {
logger.error('[/avatar/:assistant_id] Error parsing metadata', error);
_metadata = {};
}
if (_metadata.avatar && _metadata.avatar_source) {
const { deleteFile } = getStrategyFunctions(_metadata.avatar_source);
try {
await deleteFile(req, { filepath: _metadata.avatar });
await deleteFileByFilter({ filepath: _metadata.avatar });
} catch (error) {
logger.error('[/avatar/:assistant_id] Error deleting old avatar', error);
}
}
const metadata = {
..._metadata,
avatar: image.filepath,
avatar_source: req.app.locals.fileStrategy,
};
const promises = [];
promises.push(
updateAssistant(
{ assistant_id, user: req.user.id },
{
avatar: {
filepath: image.filepath,
source: req.app.locals.fileStrategy,
},
},
),
);
promises.push(openai.beta.assistants.update(assistant_id, { metadata }));
const resolved = await Promise.all(promises);
res.status(201).json(resolved[1]);
} catch (error) {
const message = 'An error occurred while updating the Assistant Avatar';
logger.error(message, error);
res.status(500).json({ message });
}
});
module.exports = router;