LibreChat/api/server/services/Files/OpenAI/crud.js
Danny Avila 5cd5c3bef8
🅰️ feat: Azure OpenAI Assistants API Support (#1992)
* chore: rename dir from `assistant` to plural

* feat: `assistants` field for azure config, spread options in AppService

* refactor: rename constructAzureURL param for azure as `azureOptions`

* chore: bump openai and bun

* chore(loadDefaultModels): change naming of assistant -> assistants

* feat: load azure settings with currect baseURL for assistants' initializeClient

* refactor: add `assistants` flags to groups and model configs, add mapGroupToAzureConfig

* feat(loadConfigEndpoints): initialize assistants endpoint if azure flag `assistants` is enabled

* feat(AppService): determine assistant models on startup, throw Error if none

* refactor(useDeleteAssistantMutation): send model along with assistant id for delete mutations

* feat: support listing and deleting assistants with azure

* feat: add model query to assistant avatar upload

* feat: add azure support for retrieveRun method

* refactor: update OpenAIClient initialization

* chore: update README

* fix(ci): tests passing

* refactor(uploadOpenAIFile): improve logging and use more efficient REST API method

* refactor(useFileHandling): add model to metadata to target Azure region compatible with current model

* chore(files): add azure naming pattern for valid file id recognition

* fix(assistants): initialize openai with first available assistant model if none provided

* refactor(uploadOpenAIFile): add content type for azure, initialize formdata before azure options

* refactor(sleep): move sleep function out of Runs and into `~/server/utils`

* fix(azureOpenAI/assistants): make sure to only overwrite models with assistant models if `assistants` flag is enabled

* refactor(uploadOpenAIFile): revert to old method

* chore(uploadOpenAIFile): use enum for file purpose

* docs: azureOpenAI update guide with more info, examples

* feat: enable/disable assistant capabilities and specify retrieval models

* refactor: optional chain conditional statement in loadConfigModels.js

* docs: add assistants examples

* chore: update librechat.example.yaml

* docs(azure): update note of file upload behavior in Azure OpenAI Assistants

* chore: update docs and add descriptive message about assistant errors

* fix: prevent message submission with invalid assistant or if files loading

* style: update Landing icon & text when assistant is not selected

* chore: bump librechat-data-provider to 0.4.8

* fix(assistants/azure): assign req.body.model for proper azure init to abort runs
2024-03-14 17:21:42 -04:00

62 lines
2 KiB
JavaScript

const fs = require('fs');
const { FilePurpose } = require('librechat-data-provider');
const { sleep } = require('~/server/utils');
const { logger } = require('~/config');
/**
* Uploads a file that can be used across various OpenAI services.
*
* @param {Express.Request} req - The request object from Express. It should have a `user` property with an `id`
* representing the user, and an `app.locals.paths` object with an `imageOutput` path.
* @param {Express.Multer.File} file - The file uploaded to the server via multer.
* @param {OpenAIClient} openai - The initialized OpenAI client.
* @returns {Promise<OpenAIFile>}
*/
async function uploadOpenAIFile(req, file, openai) {
const uploadedFile = await openai.files.create({
file: fs.createReadStream(file.path),
purpose: FilePurpose.Assistants,
});
logger.debug(
`[uploadOpenAIFile] User ${req.user.id} successfully uploaded file to OpenAI`,
uploadedFile,
);
if (uploadedFile.status !== 'processed') {
const sleepTime = 2500;
logger.debug(
`[uploadOpenAIFile] File ${
uploadedFile.id
} is not yet processed. Waiting for it to be processed (${sleepTime / 1000}s)...`,
);
await sleep(sleepTime);
}
return uploadedFile;
}
/**
* Deletes a file previously uploaded to OpenAI.
*
* @param {Express.Request} req - The request object from Express.
* @param {MongoFile} file - The database representation of the uploaded file.
* @param {OpenAI} openai - The initialized OpenAI client.
* @returns {Promise<void>}
*/
async function deleteOpenAIFile(req, file, openai) {
try {
const res = await openai.files.del(file.file_id);
if (!res.deleted) {
throw new Error('OpenAI returned `false` for deleted status');
}
logger.debug(
`[deleteOpenAIFile] User ${req.user.id} successfully deleted ${file.file_id} from OpenAI`,
);
} catch (error) {
logger.error('[deleteOpenAIFile] Error deleting file from OpenAI: ' + error.message);
throw error;
}
}
module.exports = { uploadOpenAIFile, deleteOpenAIFile };