⬇️ feat: Assistant File Downloads (#2234)

* WIP: basic route for file downloads and file strategy for generating readablestream to pipe as res

* chore(DALLE3): add typing for OpenAI client

* chore: add `CONSOLE_JSON` notes to dotenv.md

* WIP: first pass OpenAI Assistants File Output handling

* feat: first pass assistants output file download from openai

* chore: yml vs. yaml variation to .gitignore for `librechat.yml`

* refactor(retrieveAndProcessFile): remove redundancies

* fix(syncMessages): explicit sort of apiMessages to fix message order on abort

* chore: add logs for warnings and errors, show toast on frontend

* chore: add logger where console was still being used
This commit is contained in:
Danny Avila 2024-03-29 08:23:38 -04:00 committed by GitHub
parent 7945fea0f9
commit a00756c469
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 555 additions and 248 deletions

1
.gitignore vendored
View file

@ -50,6 +50,7 @@ bower_components/
#config file #config file
librechat.yaml librechat.yaml
librechat.yml
# Environment # Environment
.npmrc .npmrc

View file

@ -1,5 +1,6 @@
const axios = require('axios'); const axios = require('axios');
const { isEnabled } = require('~/server/utils'); const { isEnabled } = require('~/server/utils');
const { logger } = require('~/config');
const footer = `Use the context as your learned knowledge to better answer the user. const footer = `Use the context as your learned knowledge to better answer the user.
@ -55,7 +56,7 @@ function createContextHandlers(req, userMessageContent) {
processedFiles.push(file); processedFiles.push(file);
processedIds.add(file.file_id); processedIds.add(file.file_id);
} catch (error) { } catch (error) {
console.error(`Error processing file ${file.filename}:`, error); logger.error(`Error processing file ${file.filename}:`, error);
} }
} }
}; };
@ -144,8 +145,8 @@ function createContextHandlers(req, userMessageContent) {
return prompt; return prompt;
} catch (error) { } catch (error) {
console.error('Error creating context:', error); logger.error('Error creating context:', error);
throw error; // Re-throw the error to propagate it to the caller throw error;
} }
}; };

View file

@ -43,6 +43,7 @@ class DALLE3 extends Tool {
config.httpAgent = new HttpsProxyAgent(process.env.PROXY); config.httpAgent = new HttpsProxyAgent(process.env.PROXY);
} }
/** @type {OpenAI} */
this.openai = new OpenAI(config); this.openai = new OpenAI(config);
this.name = 'dalle'; this.name = 'dalle';
this.description = `Use DALLE to create images from text descriptions. this.description = `Use DALLE to create images from text descriptions.

View file

@ -1,5 +1,6 @@
const { getUserPluginAuthValue } = require('~/server/services/PluginService'); const { getUserPluginAuthValue } = require('~/server/services/PluginService');
const { availableTools } = require('../'); const { availableTools } = require('../');
const { logger } = require('~/config');
/** /**
* Loads a suite of tools with authentication values for a given user, supporting alternate authentication fields. * Loads a suite of tools with authentication values for a given user, supporting alternate authentication fields.
@ -30,7 +31,7 @@ const loadToolSuite = async ({ pluginKey, tools, user, options = {} }) => {
return value; return value;
} }
} catch (err) { } catch (err) {
console.error(`Error fetching plugin auth value for ${field}: ${err.message}`); logger.error(`Error fetching plugin auth value for ${field}: ${err.message}`);
} }
} }
return null; return null;
@ -41,7 +42,7 @@ const loadToolSuite = async ({ pluginKey, tools, user, options = {} }) => {
if (authValue !== null) { if (authValue !== null) {
authValues[auth.authField] = authValue; authValues[auth.authField] = authValue;
} else { } else {
console.warn(`No auth value found for ${auth.authField}`); logger.warn(`[loadToolSuite] No auth value found for ${auth.authField}`);
} }
} }

View file

@ -2,6 +2,7 @@ const mongoose = require('mongoose');
const { isEnabled } = require('../server/utils/handleText'); const { isEnabled } = require('../server/utils/handleText');
const transactionSchema = require('./schema/transaction'); const transactionSchema = require('./schema/transaction');
const { getMultiplier } = require('./tx'); const { getMultiplier } = require('./tx');
const { logger } = require('~/config');
const Balance = require('./Balance'); const Balance = require('./Balance');
const cancelRate = 1.15; const cancelRate = 1.15;
@ -64,7 +65,7 @@ async function getTransactions(filter) {
try { try {
return await Transaction.find(filter).lean(); return await Transaction.find(filter).lean();
} catch (error) { } catch (error) {
console.error('Error querying transactions:', error); logger.error('Error querying transactions:', error);
throw error; throw error;
} }
} }

View file

@ -15,7 +15,9 @@ const mongoose = require('mongoose');
* @property {'file'} object - Type of object, always 'file' * @property {'file'} object - Type of object, always 'file'
* @property {string} type - Type of file * @property {string} type - Type of file
* @property {number} usage - Number of uses of the file * @property {number} usage - Number of uses of the file
* @property {string} [context] - Context of the file origin
* @property {boolean} [embedded] - Whether or not the file is embedded in vector db * @property {boolean} [embedded] - Whether or not the file is embedded in vector db
* @property {string} [model] - The model to identify the group region of the file (for Azure OpenAI hosting)
* @property {string} [source] - The source of the file * @property {string} [source] - The source of the file
* @property {number} [width] - Optional width of the file * @property {number} [width] - Optional width of the file
* @property {number} [height] - Optional height of the file * @property {number} [height] - Optional height of the file
@ -82,6 +84,9 @@ const fileSchema = mongoose.Schema(
type: String, type: String,
default: FileSources.local, default: FileSources.local,
}, },
model: {
type: String,
},
width: Number, width: Number,
height: Number, height: Number,
expiresAt: { expiresAt: {

View file

@ -1,5 +1,6 @@
const axios = require('axios'); const axios = require('axios');
const denyRequest = require('./denyRequest'); const denyRequest = require('./denyRequest');
const { logger } = require('~/config');
async function moderateText(req, res, next) { async function moderateText(req, res, next) {
if (process.env.OPENAI_MODERATION === 'true') { if (process.env.OPENAI_MODERATION === 'true') {
@ -28,7 +29,7 @@ async function moderateText(req, res, next) {
return await denyRequest(req, res, errorMessage); return await denyRequest(req, res, errorMessage);
} }
} catch (error) { } catch (error) {
console.error('Error in moderateText:', error); logger.error('Error in moderateText:', error);
const errorMessage = 'error in moderation check'; const errorMessage = 'error in moderation check';
return await denyRequest(req, res, errorMessage); return await denyRequest(req, res, errorMessage);
} }

View file

@ -597,7 +597,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
/** @type {ResponseMessage} */ /** @type {ResponseMessage} */
const responseMessage = { const responseMessage = {
...response.finalMessage, ...(response.responseMessage ?? response.finalMessage),
parentMessageId: userMessageId, parentMessageId: userMessageId,
conversationId, conversationId,
user: req.user.id, user: req.user.id,

View file

@ -1,12 +1,13 @@
const axios = require('axios');
const fs = require('fs').promises; const fs = require('fs').promises;
const express = require('express'); const express = require('express');
const { isUUID } = require('librechat-data-provider'); const { isUUID, FileSources } = require('librechat-data-provider');
const { const {
filterFile, filterFile,
processFileUpload, processFileUpload,
processDeleteRequest, processDeleteRequest,
} = require('~/server/services/Files/process'); } = require('~/server/services/Files/process');
const { initializeClient } = require('~/server/services/Endpoints/assistants');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { getFiles } = require('~/models/File'); const { getFiles } = require('~/models/File');
const { logger } = require('~/config'); const { logger } = require('~/config');
@ -65,28 +66,63 @@ router.delete('/', async (req, res) => {
} }
}); });
router.get('/download/:fileId', async (req, res) => { router.get('/download/:userId/:filepath', async (req, res) => {
try { try {
const { fileId } = req.params; const { userId, filepath } = req.params;
const options = { if (userId !== req.user.id) {
headers: { logger.warn(`${errorPrefix} forbidden: ${file_id}`);
// TODO: Client initialization for OpenAI API Authentication return res.status(403).send('Forbidden');
Authorization: `Bearer ${process.env.OPENAI_API_KEY}`, }
},
responseType: 'stream', const parts = filepath.split('/');
const file_id = parts[2];
const [file] = await getFiles({ file_id });
const errorPrefix = `File download requested by user ${userId}`;
if (!file) {
logger.warn(`${errorPrefix} not found: ${file_id}`);
return res.status(404).send('File not found');
}
if (!file.filepath.includes(userId)) {
logger.warn(`${errorPrefix} forbidden: ${file_id}`);
return res.status(403).send('Forbidden');
}
if (file.source === FileSources.openai && !file.model) {
logger.warn(`${errorPrefix} has no associated model: ${file_id}`);
return res.status(400).send('The model used when creating this file is not available');
}
const { getDownloadStream } = getStrategyFunctions(file.source);
if (!getDownloadStream) {
logger.warn(`${errorPrefix} has no stream method implemented: ${file.source}`);
return res.status(501).send('Not Implemented');
}
const setHeaders = () => {
res.setHeader('Content-Disposition', `attachment; filename="${file.filename}"`);
res.setHeader('Content-Type', 'application/octet-stream');
}; };
const fileResponse = await axios.get(`https://api.openai.com/v1/files/${fileId}`, { /** @type {{ body: import('stream').PassThrough } | undefined} */
headers: options.headers, let passThrough;
}); /** @type {ReadableStream | undefined} */
const { filename } = fileResponse.data; let fileStream;
if (file.source === FileSources.openai) {
const response = await axios.get(`https://api.openai.com/v1/files/${fileId}/content`, options); req.body = { model: file.model };
res.setHeader('Content-Disposition', `attachment; filename="${filename}"`); const { openai } = await initializeClient({ req, res });
response.data.pipe(res); passThrough = await getDownloadStream(file_id, openai);
setHeaders();
passThrough.body.pipe(res);
} else {
fileStream = getDownloadStream(file_id);
setHeaders();
fileStream.pipe(res);
}
} catch (error) { } catch (error) {
console.error('Error downloading file:', error); logger.error('Error downloading file:', error);
res.status(500).send('Error downloading file'); res.status(500).send('Error downloading file');
} }
}); });

View file

@ -2,9 +2,10 @@ const fs = require('fs');
const path = require('path'); const path = require('path');
const axios = require('axios'); const axios = require('axios');
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const { ref, uploadBytes, getDownloadURL, deleteObject } = require('firebase/storage'); const { ref, uploadBytes, getDownloadURL, getStream, deleteObject } = require('firebase/storage');
const { getBufferMetadata } = require('~/server/utils'); const { getBufferMetadata } = require('~/server/utils');
const { getFirebaseStorage } = require('./initialize'); const { getFirebaseStorage } = require('./initialize');
const { logger } = require('~/config');
/** /**
* Deletes a file from Firebase Storage. * Deletes a file from Firebase Storage.
@ -15,7 +16,7 @@ const { getFirebaseStorage } = require('./initialize');
async function deleteFile(basePath, fileName) { async function deleteFile(basePath, fileName) {
const storage = getFirebaseStorage(); const storage = getFirebaseStorage();
if (!storage) { if (!storage) {
console.error('Firebase is not initialized. Cannot delete file from Firebase Storage.'); logger.error('Firebase is not initialized. Cannot delete file from Firebase Storage.');
throw new Error('Firebase is not initialized'); throw new Error('Firebase is not initialized');
} }
@ -23,9 +24,9 @@ async function deleteFile(basePath, fileName) {
try { try {
await deleteObject(storageRef); await deleteObject(storageRef);
console.log('File deleted successfully from Firebase Storage'); logger.debug('File deleted successfully from Firebase Storage');
} catch (error) { } catch (error) {
console.error('Error deleting file from Firebase Storage:', error.message); logger.error('Error deleting file from Firebase Storage:', error.message);
throw error; throw error;
} }
} }
@ -51,7 +52,7 @@ async function deleteFile(basePath, fileName) {
async function saveURLToFirebase({ userId, URL, fileName, basePath = 'images' }) { async function saveURLToFirebase({ userId, URL, fileName, basePath = 'images' }) {
const storage = getFirebaseStorage(); const storage = getFirebaseStorage();
if (!storage) { if (!storage) {
console.error('Firebase is not initialized. Cannot save file to Firebase Storage.'); logger.error('Firebase is not initialized. Cannot save file to Firebase Storage.');
return null; return null;
} }
@ -63,7 +64,7 @@ async function saveURLToFirebase({ userId, URL, fileName, basePath = 'images' })
await uploadBytes(storageRef, buffer); await uploadBytes(storageRef, buffer);
return await getBufferMetadata(buffer); return await getBufferMetadata(buffer);
} catch (error) { } catch (error) {
console.error('Error uploading file to Firebase Storage:', error.message); logger.error('Error uploading file to Firebase Storage:', error.message);
return null; return null;
} }
} }
@ -87,7 +88,7 @@ async function saveURLToFirebase({ userId, URL, fileName, basePath = 'images' })
async function getFirebaseURL({ fileName, basePath = 'images' }) { async function getFirebaseURL({ fileName, basePath = 'images' }) {
const storage = getFirebaseStorage(); const storage = getFirebaseStorage();
if (!storage) { if (!storage) {
console.error('Firebase is not initialized. Cannot get image URL from Firebase Storage.'); logger.error('Firebase is not initialized. Cannot get image URL from Firebase Storage.');
return null; return null;
} }
@ -96,7 +97,7 @@ async function getFirebaseURL({ fileName, basePath = 'images' }) {
try { try {
return await getDownloadURL(storageRef); return await getDownloadURL(storageRef);
} catch (error) { } catch (error) {
console.error('Error fetching file URL from Firebase Storage:', error.message); logger.error('Error fetching file URL from Firebase Storage:', error.message);
return null; return null;
} }
} }
@ -212,6 +213,26 @@ async function uploadFileToFirebase({ req, file, file_id }) {
return { filepath: downloadURL, bytes }; return { filepath: downloadURL, bytes };
} }
/**
* Retrieves a readable stream for a file from Firebase storage.
*
* @param {string} filepath - The filepath.
* @returns {ReadableStream} A readable stream of the file.
*/
function getFirebaseFileStream(filepath) {
try {
const storage = getFirebaseStorage();
if (!storage) {
throw new Error('Firebase is not initialized');
}
const fileRef = ref(storage, filepath);
return getStream(fileRef);
} catch (error) {
logger.error('Error getting Firebase file stream:', error);
throw error;
}
}
module.exports = { module.exports = {
deleteFile, deleteFile,
getFirebaseURL, getFirebaseURL,
@ -219,4 +240,5 @@ module.exports = {
deleteFirebaseFile, deleteFirebaseFile,
uploadFileToFirebase, uploadFileToFirebase,
saveBufferToFirebase, saveBufferToFirebase,
getFirebaseFileStream,
}; };

View file

@ -255,6 +255,21 @@ async function uploadLocalFile({ req, file, file_id }) {
return { filepath, bytes }; return { filepath, bytes };
} }
/**
* Retrieves a readable stream for a file from local storage.
*
* @param {string} filepath - The filepath.
* @returns {ReadableStream} A readable stream of the file.
*/
function getLocalFileStream(filepath) {
try {
return fs.createReadStream(filepath);
} catch (error) {
logger.error('Error getting local file stream:', error);
throw error;
}
}
module.exports = { module.exports = {
saveLocalFile, saveLocalFile,
saveLocalImage, saveLocalImage,
@ -263,4 +278,5 @@ module.exports = {
getLocalFileURL, getLocalFileURL,
deleteLocalFile, deleteLocalFile,
uploadLocalFile, uploadLocalFile,
getLocalFileStream,
}; };

View file

@ -60,4 +60,20 @@ async function deleteOpenAIFile(req, file, openai) {
} }
} }
module.exports = { uploadOpenAIFile, deleteOpenAIFile }; /**
* Retrieves a readable stream for a file from local storage.
*
* @param {string} file_id - The file_id.
* @param {OpenAI} openai - The initialized OpenAI client.
* @returns {Promise<ReadableStream>} A readable stream of the file.
*/
async function getOpenAIFileStream(file_id, openai) {
try {
return await openai.files.content(file_id);
} catch (error) {
logger.error('Error getting OpenAI file download stream:', error);
throw error;
}
}
module.exports = { uploadOpenAIFile, deleteOpenAIFile, getOpenAIFileStream };

View file

@ -3,6 +3,7 @@ const path = require('path');
const sharp = require('sharp'); const sharp = require('sharp');
const { resizeImageBuffer } = require('./resize'); const { resizeImageBuffer } = require('./resize');
const { getStrategyFunctions } = require('../strategies'); const { getStrategyFunctions } = require('../strategies');
const { logger } = require('~/config');
/** /**
* Converts an image file or buffer to WebP format with specified resolution. * Converts an image file or buffer to WebP format with specified resolution.
@ -61,7 +62,7 @@ async function convertToWebP(req, file, resolution = 'high', basename = '') {
const bytes = Buffer.byteLength(outputBuffer); const bytes = Buffer.byteLength(outputBuffer);
return { filepath: savedFilePath, bytes, width, height }; return { filepath: savedFilePath, bytes, width, height };
} catch (err) { } catch (err) {
console.error(err); logger.error(err);
throw err; throw err;
} }
} }

View file

@ -1,6 +1,6 @@
const path = require('path'); const path = require('path');
const mime = require('mime');
const { v4 } = require('uuid'); const { v4 } = require('uuid');
const mime = require('mime/lite');
const { const {
isUUID, isUUID,
megabyte, megabyte,
@ -13,13 +13,11 @@ const {
const { convertToWebP, resizeAndConvert } = require('~/server/services/Files/images'); const { convertToWebP, resizeAndConvert } = require('~/server/services/Files/images');
const { initializeClient } = require('~/server/services/Endpoints/assistants'); const { initializeClient } = require('~/server/services/Endpoints/assistants');
const { createFile, updateFileUsage, deleteFiles } = require('~/models/File'); const { createFile, updateFileUsage, deleteFiles } = require('~/models/File');
const { isEnabled, determineFileType } = require('~/server/utils');
const { LB_QueueAsyncCall } = require('~/server/utils/queue'); const { LB_QueueAsyncCall } = require('~/server/utils/queue');
const { getStrategyFunctions } = require('./strategies'); const { getStrategyFunctions } = require('./strategies');
const { determineFileType } = require('~/server/utils');
const { logger } = require('~/config'); const { logger } = require('~/config');
const { GPTS_DOWNLOAD_IMAGES = 'true' } = process.env;
const processFiles = async (files) => { const processFiles = async (files) => {
const promises = []; const promises = [];
for (let file of files) { for (let file of files) {
@ -293,9 +291,10 @@ const processFileUpload = async ({ req, res, file, metadata }) => {
file_id: id ?? file_id, file_id: id ?? file_id,
temp_file_id, temp_file_id,
bytes, bytes,
filepath: isAssistantUpload ? `${openai.baseURL}/files/${id}` : filepath,
filename: filename ?? file.originalname, filename: filename ?? file.originalname,
filepath: isAssistantUpload ? `${openai.baseURL}/files/${id}` : filepath,
context: isAssistantUpload ? FileContext.assistants : FileContext.message_attachment, context: isAssistantUpload ? FileContext.assistants : FileContext.message_attachment,
model: isAssistantUpload ? req.body.model : undefined,
type: file.mimetype, type: file.mimetype,
embedded, embedded,
source, source,
@ -305,6 +304,77 @@ const processFileUpload = async ({ req, res, file, metadata }) => {
res.status(200).json({ message: 'File uploaded and processed successfully', ...result }); res.status(200).json({ message: 'File uploaded and processed successfully', ...result });
}; };
/**
* @param {object} params - The params object.
* @param {OpenAI} params.openai - The OpenAI client instance.
* @param {string} params.file_id - The ID of the file to retrieve.
* @param {string} params.userId - The user ID.
* @param {string} params.filename - The name of the file.
* @param {boolean} [params.saveFile=false] - Whether to save the file metadata to the database.
* @param {boolean} [params.updateUsage=false] - Whether to update file usage in database.
*/
const processOpenAIFile = async ({
openai,
file_id,
userId,
filename,
saveFile = false,
updateUsage = false,
}) => {
const _file = await openai.files.retrieve(file_id);
const filepath = `${openai.baseURL}/files/${userId}/${file_id}/${filename}`;
const file = {
..._file,
file_id,
filepath,
usage: 1,
filename,
user: userId,
source: FileSources.openai,
model: openai.req.body.model,
type: mime.getType(filename),
context: FileContext.assistants_output,
};
if (saveFile) {
await createFile(file, true);
} else if (updateUsage) {
try {
await updateFileUsage({ file_id });
} catch (error) {
logger.error('Error updating file usage', error);
}
}
return file;
};
/**
* Process OpenAI image files, convert to webp, save and return file metadata.
* @param {object} params - The params object.
* @param {Express.Request} params.req - The Express request object.
* @param {Buffer} params.buffer - The image buffer.
* @param {string} params.file_id - The file ID.
* @param {string} params.filename - The filename.
* @param {string} params.fileExt - The file extension.
* @returns {Promise<MongoFile>} The file metadata.
*/
const processOpenAIImageOutput = async ({ req, buffer, file_id, filename, fileExt }) => {
const _file = await convertToWebP(req, buffer, 'high', `${file_id}${fileExt}`);
const file = {
..._file,
file_id,
usage: 1,
filename,
user: req.user.id,
type: 'image/webp',
source: req.app.locals.fileStrategy,
context: FileContext.assistants_output,
};
createFile(file, true);
return file;
};
/** /**
* Retrieves and processes an OpenAI file based on its type. * Retrieves and processes an OpenAI file based on its type.
* *
@ -328,107 +398,69 @@ async function retrieveAndProcessFile({
return null; return null;
} }
if (client.attachedFileIds?.has(file_id)) { if (client.attachedFileIds?.has(file_id) || client.processedFileIds?.has(file_id)) {
return { return processOpenAIFile({ ...processArgs, updateUsage: true });
file_id,
// filepath: TODO: local source filepath?,
source: FileSources.openai,
};
} }
let basename = _basename; let basename = _basename;
const downloadImages = isEnabled(GPTS_DOWNLOAD_IMAGES); const fileExt = path.extname(basename);
const processArgs = { openai, file_id, filename: basename, userId: client.req.user.id };
/** /**
* @param {string} file_id - The ID of the file to retrieve. * @returns {Promise<Buffer>} The file data buffer.
* @param {boolean} [save] - Whether to save the file metadata to the database.
*/ */
const retrieveFile = async (file_id, save = false) => { const getDataBuffer = async () => {
const _file = await openai.files.retrieve(file_id);
const filepath = `/api/files/download/${file_id}`;
const file = {
..._file,
type: mime.getType(_file.filename),
filepath,
usage: 1,
file_id,
context: _file.purpose ?? FileContext.message_attachment,
source: FileSources.openai,
};
if (save) {
await createFile(file, true);
} else {
try {
await updateFileUsage({ file_id });
} catch (error) {
logger.error('Error updating file usage', error);
}
}
return file;
};
// If image downloads are not enabled or no basename provided, return only the file metadata
if (!downloadImages || (!basename && !downloadImages)) {
return await retrieveFile(file_id, true);
}
let data;
try {
const response = await openai.files.content(file_id); const response = await openai.files.content(file_id);
data = await response.arrayBuffer(); const arrayBuffer = await response.arrayBuffer();
} catch (error) { return Buffer.from(arrayBuffer);
logger.error('Error downloading file from OpenAI:', error);
return await retrieveFile(file_id);
}
if (!data) {
return await retrieveFile(file_id);
}
const dataBuffer = Buffer.from(data);
/**
* @param {Buffer} dataBuffer
* @param {string} fileExt
*/
const processAsImage = async (dataBuffer, fileExt) => {
// Logic to process image files, convert to webp, etc.
const _file = await convertToWebP(client.req, dataBuffer, 'high', `${file_id}${fileExt}`);
const file = {
..._file,
type: 'image/webp',
usage: 1,
file_id,
source: FileSources.openai,
};
createFile(file, true);
return file;
}; };
/** @param {Buffer} dataBuffer */ // If no basename provided, return only the file metadata
const processOtherFileTypes = async (dataBuffer) => { if (!basename) {
// Logic to handle other file types return await processOpenAIFile({ ...processArgs, saveFile: true });
logger.debug('[retrieveAndProcessFile] Non-image file type detected'); }
return { filepath: `/api/files/download/${file_id}`, bytes: dataBuffer.length };
}; let dataBuffer;
if (unknownType || !fileExt || imageExtRegex.test(basename)) {
try {
dataBuffer = await getDataBuffer();
} catch (error) {
logger.error('Error downloading file from OpenAI:', error);
dataBuffer = null;
}
}
if (!dataBuffer) {
return await processOpenAIFile({ ...processArgs, saveFile: true });
}
// If the filetype is unknown, inspect the file // If the filetype is unknown, inspect the file
if (unknownType || !path.extname(basename)) { if (dataBuffer && (unknownType || !fileExt)) {
const detectedExt = await determineFileType(dataBuffer); const detectedExt = await determineFileType(dataBuffer);
if (detectedExt && imageExtRegex.test('.' + detectedExt)) { const isImageOutput = detectedExt && imageExtRegex.test('.' + detectedExt);
return await processAsImage(dataBuffer, detectedExt);
} else {
return await processOtherFileTypes(dataBuffer);
}
}
// Existing logic for processing known image types if (!isImageOutput) {
if (downloadImages && basename && path.extname(basename) && imageExtRegex.test(basename)) { return await processOpenAIFile({ ...processArgs, saveFile: true });
return await processAsImage(dataBuffer, path.extname(basename)); }
return await processOpenAIImageOutput({
file_id,
req: client.req,
buffer: dataBuffer,
filename: basename,
fileExt: detectedExt,
});
} else if (dataBuffer && imageExtRegex.test(basename)) {
return await processOpenAIImageOutput({
file_id,
req: client.req,
buffer: dataBuffer,
filename: basename,
fileExt,
});
} else { } else {
logger.debug('[retrieveAndProcessFile] Not an image or invalid extension: ', basename); logger.debug(`[retrieveAndProcessFile] Non-image file type detected: ${basename}`);
return await processOtherFileTypes(dataBuffer); return await processOpenAIFile({ ...processArgs, saveFile: true });
} }
} }

View file

@ -7,6 +7,7 @@ const {
saveBufferToFirebase, saveBufferToFirebase,
uploadImageToFirebase, uploadImageToFirebase,
processFirebaseAvatar, processFirebaseAvatar,
getFirebaseFileStream,
} = require('./Firebase'); } = require('./Firebase');
const { const {
getLocalFileURL, getLocalFileURL,
@ -16,8 +17,9 @@ const {
uploadLocalImage, uploadLocalImage,
prepareImagesLocal, prepareImagesLocal,
processLocalAvatar, processLocalAvatar,
getLocalFileStream,
} = require('./Local'); } = require('./Local');
const { uploadOpenAIFile, deleteOpenAIFile } = require('./OpenAI'); const { uploadOpenAIFile, deleteOpenAIFile, getOpenAIFileStream } = require('./OpenAI');
const { uploadVectors, deleteVectors } = require('./VectorDB'); const { uploadVectors, deleteVectors } = require('./VectorDB');
/** /**
@ -35,6 +37,7 @@ const firebaseStrategy = () => ({
prepareImagePayload: prepareImageURL, prepareImagePayload: prepareImageURL,
processAvatar: processFirebaseAvatar, processAvatar: processFirebaseAvatar,
handleImageUpload: uploadImageToFirebase, handleImageUpload: uploadImageToFirebase,
getDownloadStream: getFirebaseFileStream,
}); });
/** /**
@ -51,6 +54,7 @@ const localStrategy = () => ({
processAvatar: processLocalAvatar, processAvatar: processLocalAvatar,
handleImageUpload: uploadLocalImage, handleImageUpload: uploadLocalImage,
prepareImagePayload: prepareImagesLocal, prepareImagePayload: prepareImagesLocal,
getDownloadStream: getLocalFileStream,
}); });
/** /**
@ -70,6 +74,8 @@ const vectorStrategy = () => ({
handleImageUpload: null, handleImageUpload: null,
/** @type {typeof prepareImagesLocal | null} */ /** @type {typeof prepareImagesLocal | null} */
prepareImagePayload: null, prepareImagePayload: null,
/** @type {typeof getLocalFileStream | null} */
getDownloadStream: null,
handleFileUpload: uploadVectors, handleFileUpload: uploadVectors,
deleteFile: deleteVectors, deleteFile: deleteVectors,
}); });
@ -94,6 +100,7 @@ const openAIStrategy = () => ({
prepareImagePayload: null, prepareImagePayload: null,
deleteFile: deleteOpenAIFile, deleteFile: deleteOpenAIFile,
handleFileUpload: uploadOpenAIFile, handleFileUpload: uploadOpenAIFile,
getDownloadStream: getOpenAIFileStream,
}); });
// Strategy Selector // Strategy Selector

View file

@ -102,18 +102,20 @@ class StreamRunManager {
* @returns {Promise<void>} * @returns {Promise<void>}
*/ */
async addContentData(data) { async addContentData(data) {
const { type, index } = data; const { type, index, edited } = data;
this.finalMessage.content[index] = { type, [type]: data[type] }; /** @type {ContentPart} */
const contentPart = data[type];
this.finalMessage.content[index] = { type, [type]: contentPart };
if (type === ContentTypes.TEXT) { if (type === ContentTypes.TEXT && !edited) {
this.text += data[type].value; this.text += contentPart.value;
return; return;
} }
const contentData = { const contentData = {
index, index,
type, type,
[type]: data[type], [type]: contentPart,
thread_id: this.thread_id, thread_id: this.thread_id,
messageId: this.finalMessage.messageId, messageId: this.finalMessage.messageId,
conversationId: this.finalMessage.conversationId, conversationId: this.finalMessage.conversationId,
@ -593,7 +595,7 @@ class StreamRunManager {
*/ */
async handleMessageEvent(event) { async handleMessageEvent(event) {
if (event.event === AssistantStreamEvents.ThreadMessageCompleted) { if (event.event === AssistantStreamEvents.ThreadMessageCompleted) {
this.messageCompleted(event); await this.messageCompleted(event);
} }
} }
@ -613,6 +615,7 @@ class StreamRunManager {
this.addContentData({ this.addContentData({
[ContentTypes.TEXT]: { value: result.text }, [ContentTypes.TEXT]: { value: result.text },
type: ContentTypes.TEXT, type: ContentTypes.TEXT,
edited: result.edited,
index, index,
}); });
this.messages.push(message); this.messages.push(message);

View file

@ -2,10 +2,9 @@ const path = require('path');
const { v4 } = require('uuid'); const { v4 } = require('uuid');
const { const {
Constants, Constants,
FilePurpose,
ContentTypes, ContentTypes,
imageExtRegex,
EModelEndpoint, EModelEndpoint,
AnnotationTypes,
defaultOrderQuery, defaultOrderQuery,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { retrieveAndProcessFile } = require('~/server/services/Files/process'); const { retrieveAndProcessFile } = require('~/server/services/Files/process');
@ -434,13 +433,15 @@ async function checkMessageGaps({ openai, latestMessageId, thread_id, run_id, co
} }
let addedCurrentMessage = false; let addedCurrentMessage = false;
const apiMessages = response.data.map((msg) => { const apiMessages = response.data
if (msg.id === currentMessage.id) { .map((msg) => {
addedCurrentMessage = true; if (msg.id === currentMessage.id) {
return currentMessage; addedCurrentMessage = true;
} return currentMessage;
return msg; }
}); return msg;
})
.sort((a, b) => new Date(a.created_at) - new Date(b.created_at));
if (!addedCurrentMessage) { if (!addedCurrentMessage) {
apiMessages.push(currentMessage); apiMessages.push(currentMessage);
@ -496,6 +497,44 @@ const recordUsage = async ({
); );
}; };
/**
* Safely replaces the annotated text within the specified range denoted by start_index and end_index,
* after verifying that the text within that range matches the given annotation text.
* Proceeds with the replacement even if a mismatch is found, but logs a warning.
*
* @param {string} originalText The original text content.
* @param {number} start_index The starting index where replacement should begin.
* @param {number} end_index The ending index where replacement should end.
* @param {string} expectedText The text expected to be found in the specified range.
* @param {string} replacementText The text to insert in place of the existing content.
* @returns {string} The text with the replacement applied, regardless of text match.
*/
function replaceAnnotation(originalText, start_index, end_index, expectedText, replacementText) {
if (start_index < 0 || end_index > originalText.length || start_index > end_index) {
logger.warn(`Invalid range specified for annotation replacement.
Attempting replacement with \`replace\` method instead...
length: ${originalText.length}
start_index: ${start_index}
end_index: ${end_index}`);
return originalText.replace(originalText, replacementText);
}
const actualTextInRange = originalText.substring(start_index, end_index);
if (actualTextInRange !== expectedText) {
logger.warn(`The text within the specified range does not match the expected annotation text.
Attempting replacement with \`replace\` method instead...
Expected: ${expectedText}
Actual: ${actualTextInRange}`);
return originalText.replace(originalText, replacementText);
}
const beforeText = originalText.substring(0, start_index);
const afterText = originalText.substring(end_index);
return beforeText + replacementText + afterText;
}
/** /**
* Sorts, processes, and flattens messages to a single string. * Sorts, processes, and flattens messages to a single string.
* *
@ -509,89 +548,90 @@ async function processMessages({ openai, client, messages = [] }) {
const sorted = messages.sort((a, b) => a.created_at - b.created_at); const sorted = messages.sort((a, b) => a.created_at - b.created_at);
let text = ''; let text = '';
let edited = false;
for (const message of sorted) { for (const message of sorted) {
message.files = []; message.files = [];
for (const content of message.content) { for (const content of message.content) {
const processImageFile = const type = content.type;
content.type === 'image_file' && !client.processedFileIds.has(content.image_file?.file_id); const contentType = content[type];
if (processImageFile) { const currentFileId = contentType?.file_id;
const { file_id } = content.image_file;
if (type === ContentTypes.IMAGE_FILE && !client.processedFileIds.has(currentFileId)) {
const file = await retrieveAndProcessFile({ const file = await retrieveAndProcessFile({
openai, openai,
client, client,
file_id, file_id: currentFileId,
basename: `${file_id}.png`, basename: `${currentFileId}.png`,
}); });
client.processedFileIds.add(file_id);
client.processedFileIds.add(currentFileId);
message.files.push(file); message.files.push(file);
continue; continue;
} }
text += (content.text?.value ?? '') + ' '; let currentText = contentType?.value ?? '';
logger.debug('[processMessages] Processing message:', { value: text });
/** @type {{ annotations: Annotation[] }} */
const { annotations } = contentType ?? {};
// Process annotations if they exist // Process annotations if they exist
if (!content.text?.annotations?.length) { if (!annotations?.length) {
text += currentText + ' ';
continue; continue;
} }
logger.debug('[processMessages] Processing annotations:', content.text.annotations); logger.debug('[processMessages] Processing annotations:', annotations);
for (const annotation of content.text.annotations) { for (const annotation of annotations) {
logger.debug('Current annotation:', annotation);
let file; let file;
const processFilePath = const type = annotation.type;
annotation.file_path && !client.processedFileIds.has(annotation.file_path?.file_id); const annotationType = annotation[type];
const file_id = annotationType?.file_id;
const alreadyProcessed = client.processedFileIds.has(file_id);
if (processFilePath) { if (alreadyProcessed) {
const basename = imageExtRegex.test(annotation.text) const { file_id } = annotationType || {};
? path.basename(annotation.text) file = await retrieveAndProcessFile({ openai, client, file_id, unknownType: true });
: null; } else if (type === AnnotationTypes.FILE_PATH) {
const basename = path.basename(annotation.text);
file = await retrieveAndProcessFile({ file = await retrieveAndProcessFile({
openai, openai,
client, client,
file_id: annotation.file_path.file_id, file_id,
basename, basename,
}); });
client.processedFileIds.add(annotation.file_path.file_id); } else if (type === AnnotationTypes.FILE_CITATION) {
}
const processFileCitation =
annotation.file_citation &&
!client.processedFileIds.has(annotation.file_citation?.file_id);
if (processFileCitation) {
file = await retrieveAndProcessFile({ file = await retrieveAndProcessFile({
openai, openai,
client, client,
file_id: annotation.file_citation.file_id, file_id,
unknownType: true, unknownType: true,
}); });
client.processedFileIds.add(annotation.file_citation.file_id);
} }
if (!file && (annotation.file_path || annotation.file_citation)) { if (file.filepath) {
const { file_id } = annotation.file_citation || annotation.file_path || {}; currentText = replaceAnnotation(
file = await retrieveAndProcessFile({ openai, client, file_id, unknownType: true }); currentText,
client.processedFileIds.add(file_id); annotation.start_index,
annotation.end_index,
annotation.text,
file.filepath,
);
edited = true;
} }
text += currentText + ' ';
if (!file) { if (!file) {
continue; continue;
} }
if (file.purpose && file.purpose === FilePurpose.Assistants) { client.processedFileIds.add(file_id);
text = text.replace(annotation.text, file.filename);
} else if (file.filepath) {
text = text.replace(annotation.text, file.filepath);
}
message.files.push(file); message.files.push(file);
} }
} }
} }
return { messages: sorted, text }; return { messages: sorted, text, edited };
} }
module.exports = { module.exports = {

View file

@ -436,7 +436,13 @@
/** /**
* @exports ThreadMessage * @exports ThreadMessage
* @typedef {import('openai').OpenAI.Beta.Threads.ThreadMessage} ThreadMessage * @typedef {import('openai').OpenAI.Beta.Threads.Message} ThreadMessage
* @memberof typedefs
*/
/**
* @exports Annotation
* @typedef {import('openai').OpenAI.Beta.Threads.Messages.Annotation} Annotation
* @memberof typedefs * @memberof typedefs
*/ */

View file

@ -1,4 +1,4 @@
import { memo } from 'react'; import React, { memo, useMemo } from 'react';
import remarkGfm from 'remark-gfm'; import remarkGfm from 'remark-gfm';
import rehypeRaw from 'rehype-raw'; import rehypeRaw from 'rehype-raw';
import remarkMath from 'remark-math'; import remarkMath from 'remark-math';
@ -9,9 +9,11 @@ import ReactMarkdown from 'react-markdown';
import rehypeHighlight from 'rehype-highlight'; import rehypeHighlight from 'rehype-highlight';
import type { TMessage } from 'librechat-data-provider'; import type { TMessage } from 'librechat-data-provider';
import type { PluggableList } from 'unified'; import type { PluggableList } from 'unified';
import CodeBlock from '~/components/Messages/Content/CodeBlock';
import { cn, langSubset, validateIframe, processLaTeX } from '~/utils'; import { cn, langSubset, validateIframe, processLaTeX } from '~/utils';
import { useChatContext } from '~/Providers'; import CodeBlock from '~/components/Messages/Content/CodeBlock';
import { useChatContext, useToastContext } from '~/Providers';
import { useFileDownload } from '~/data-provider';
import useLocalize from '~/hooks/useLocalize';
import store from '~/store'; import store from '~/store';
type TCodeProps = { type TCodeProps = {
@ -37,6 +39,70 @@ export const code = memo(({ inline, className, children }: TCodeProps) => {
} }
}); });
export const a = memo(({ href, children }: { href: string; children: React.ReactNode }) => {
const user = useRecoilValue(store.user);
const { showToast } = useToastContext();
const localize = useLocalize();
const { filepath, filename } = useMemo(() => {
const pattern = new RegExp(`(?:files|outputs)/${user?.id}/([^\\s]+)`);
const match = href.match(pattern);
if (match && match[0]) {
const path = match[0];
const name = path.split('/').pop();
return { filepath: path, filename: name };
}
return { filepath: '', filename: '' };
}, [user?.id, href]);
const { refetch: downloadFile } = useFileDownload(user?.id ?? '', filepath);
const props: { target?: string; onClick?: React.MouseEventHandler } = { target: '_new' };
if (!filepath || !filename) {
return (
<a href={href} {...props}>
{children}
</a>
);
}
const handleDownload = async (event: React.MouseEvent<HTMLAnchorElement>) => {
event.preventDefault();
try {
const stream = await downloadFile();
if (!stream.data) {
console.error('Error downloading file: No data found');
showToast({
status: 'error',
message: localize('com_ui_download_error'),
});
return;
}
const link = document.createElement('a');
link.href = stream.data;
link.setAttribute('download', filename);
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
window.URL.revokeObjectURL(stream.data);
} catch (error) {
console.error('Error downloading file:', error);
}
};
props.onClick = handleDownload;
props.target = '_blank';
return (
<a
href={filepath.startsWith('files/') ? `/api/${filepath}` : `/api/files/${filepath}`}
{...props}
>
{children}
</a>
);
});
export const p = memo(({ children }: { children: React.ReactNode }) => { export const p = memo(({ children }: { children: React.ReactNode }) => {
return <p className="mb-2 whitespace-pre-wrap">{children}</p>; return <p className="mb-2 whitespace-pre-wrap">{children}</p>;
}); });
@ -98,6 +164,7 @@ const Markdown = memo(({ content, message, showCursor }: TContentProps) => {
components={ components={
{ {
code, code,
a,
p, p,
} as { } as {
[nodeType: string]: React.ElementType; [nodeType: string]: React.ElementType;

View file

@ -7,7 +7,7 @@ import ReactMarkdown from 'react-markdown';
import rehypeHighlight from 'rehype-highlight'; import rehypeHighlight from 'rehype-highlight';
import type { PluggableList } from 'unified'; import type { PluggableList } from 'unified';
import { langSubset } from '~/utils'; import { langSubset } from '~/utils';
import { code, p } from './Markdown'; import { code, a, p } from './Markdown';
const MarkdownLite = memo(({ content = '' }: { content?: string }) => { const MarkdownLite = memo(({ content = '' }: { content?: string }) => {
const rehypePlugins: PluggableList = [ const rehypePlugins: PluggableList = [
@ -30,6 +30,7 @@ const MarkdownLite = memo(({ content = '' }: { content?: string }) => {
components={ components={
{ {
code, code,
a,
p, p,
} as { } as {
[nodeType: string]: React.ElementType; [nodeType: string]: React.ElementType;

View file

@ -324,3 +324,21 @@ export const useGetAssistantDocsQuery = (
}, },
); );
}; };
export const useFileDownload = (userId: string, filepath: string): QueryObserverResult<string> => {
return useQuery(
[QueryKeys.fileDownload, filepath],
async () => {
if (!userId) {
console.warn('No user ID provided for file download');
}
const blob = await dataService.getFileDownload(userId, filepath);
const downloadUrl = window.URL.createObjectURL(blob);
return downloadUrl;
},
{
enabled: false,
retry: false,
},
);
};

View file

@ -1,9 +1,10 @@
import { ContentTypes } from 'librechat-data-provider'; import { ContentTypes } from 'librechat-data-provider';
import type { import type {
TSubmission, Text,
TMessage, TMessage,
TContentData, TSubmission,
ContentPart, ContentPart,
TContentData,
TMessageContentParts, TMessageContentParts,
} from 'librechat-data-provider'; } from 'librechat-data-provider';
import { useCallback, useMemo } from 'react'; import { useCallback, useMemo } from 'react';
@ -46,9 +47,9 @@ export default function useContentHandler({ setMessages, getMessages }: TUseCont
} }
// TODO: handle streaming for non-text // TODO: handle streaming for non-text
const part: ContentPart = data[ContentTypes.TEXT] const textPart: Text | string = data[ContentTypes.TEXT];
? { value: data[ContentTypes.TEXT] } const part: ContentPart =
: data[type]; textPart && typeof textPart === 'string' ? { value: textPart } : data[type];
/* spreading the content array to avoid mutation */ /* spreading the content array to avoid mutation */
response.content = [...(response.content ?? [])]; response.content = [...(response.content ?? [])];

View file

@ -46,6 +46,7 @@ export default {
com_assistants_update_error: 'There was an error updating your assistant.', com_assistants_update_error: 'There was an error updating your assistant.',
com_assistants_create_success: 'Successfully created', com_assistants_create_success: 'Successfully created',
com_assistants_create_error: 'There was an error creating your assistant.', com_assistants_create_error: 'There was an error creating your assistant.',
com_ui_download_error: 'Error downloading file. The file may have been deleted.',
com_ui_attach_error_type: 'Unsupported file type for endpoint:', com_ui_attach_error_type: 'Unsupported file type for endpoint:',
com_ui_attach_error_size: 'File size limit exceeded for endpoint:', com_ui_attach_error_size: 'File size limit exceeded for endpoint:',
com_ui_attach_error: com_ui_attach_error:

View file

@ -94,16 +94,27 @@ LibreChat has built-in central logging, see [Logging System](../../features/logg
- Keep debug logs active by default or disable them by setting `DEBUG_LOGGING=false` in the environment variable. - Keep debug logs active by default or disable them by setting `DEBUG_LOGGING=false` in the environment variable.
- For more information about this feature, read our docs: **[Logging System](../../features/logging_system.md)** - For more information about this feature, read our docs: **[Logging System](../../features/logging_system.md)**
- Enable verbose file logs with `DEBUG_LOGGING=TRUE`.
- Note: can be used with either `DEBUG_CONSOLE` or `CONSOLE_JSON` but not both.
```bash ```bash
DEBUG_LOGGING=true DEBUG_LOGGING=true
``` ```
- Enable verbose server output in the console with `DEBUG_CONSOLE=TRUE`, though it's not recommended due to high verbosity. - Enable verbose console/stdout logs with `DEBUG_CONSOLE=TRUE` in the same format as file debug logs.
- Note: can be used in conjunction with `DEBUG_LOGGING` but not `CONSOLE_JSON`.
```bash ```bash
DEBUG_CONSOLE=false DEBUG_CONSOLE=false
``` ```
- Enable verbose JSON console/stdout logs suitable for cloud deployments like GCP/AWS
- Note: can be used in conjunction with `DEBUG_LOGGING` but not `DEBUG_CONSOLE`.
```bash
CONSOLE_JSON=false
```
This is not recommend, however, as the outputs can be quite verbose, and so it's disabled by default. This is not recommend, however, as the outputs can be quite verbose, and so it's disabled by default.
### Permission ### Permission

View file

@ -7,10 +7,6 @@ import * as s from './schemas';
import request from './request'; import request from './request';
import * as endpoints from './api-endpoints'; import * as endpoints from './api-endpoints';
export function getConversations(pageNumber: string): Promise<t.TGetConversationsResponse> {
return request.get(endpoints.conversations(pageNumber));
}
export function abortRequestWithMessage( export function abortRequestWithMessage(
endpoint: string, endpoint: string,
abortKey: string, abortKey: string,
@ -19,15 +15,6 @@ export function abortRequestWithMessage(
return request.post(endpoints.abortRequest(endpoint), { arg: { abortKey, message } }); return request.post(endpoints.abortRequest(endpoint), { arg: { abortKey, message } });
} }
export function deleteConversation(payload: t.TDeleteConversationRequest) {
//todo: this should be a DELETE request
return request.post(endpoints.deleteConversation(), { arg: payload });
}
export function clearAllConversations(): Promise<unknown> {
return request.post(endpoints.deleteConversation(), { arg: {} });
}
export function revokeUserKey(name: string): Promise<unknown> { export function revokeUserKey(name: string): Promise<unknown> {
return request.delete(endpoints.revokeUserKey(name)); return request.delete(endpoints.revokeUserKey(name));
} }
@ -43,20 +30,6 @@ export function getMessagesByConvoId(conversationId: string): Promise<s.TMessage
return request.get(endpoints.messages(conversationId)); return request.get(endpoints.messages(conversationId));
} }
export function getConversationById(id: string): Promise<s.TConversation> {
return request.get(endpoints.conversationById(id));
}
export function updateConversation(
payload: t.TUpdateConversationRequest,
): Promise<t.TUpdateConversationResponse> {
return request.post(endpoints.updateConversation(), { arg: payload });
}
export function genTitle(payload: m.TGenTitleRequest): Promise<m.TGenTitleResponse> {
return request.post(endpoints.genTitle(), payload);
}
export function updateMessage(payload: t.TUpdateMessageRequest): Promise<unknown> { export function updateMessage(payload: t.TUpdateMessageRequest): Promise<unknown> {
const { conversationId, messageId, text } = payload; const { conversationId, messageId, text } = payload;
if (!conversationId) { if (!conversationId) {
@ -103,13 +76,6 @@ export function getUserBalance(): Promise<string> {
return request.get(endpoints.balance()); return request.get(endpoints.balance());
} }
export const searchConversations = async (
q: string,
pageNumber: string,
): Promise<t.TSearchResults> => {
return request.get(endpoints.search(q, pageNumber));
};
export const updateTokenCount = (text: string) => { export const updateTokenCount = (text: string) => {
return request.post(endpoints.tokenizer(), { arg: text }); return request.post(endpoints.tokenizer(), { arg: text });
}; };
@ -196,6 +162,10 @@ export const listAssistants = (
return request.get(endpoints.assistants(), { params }); return request.get(endpoints.assistants(), { params });
}; };
export function getAssistantDocs(): Promise<a.AssistantDocument[]> {
return request.get(endpoints.assistants('documents'));
}
/* Tools */ /* Tools */
export const getAvailableTools = (): Promise<s.TPlugin[]> => { export const getAvailableTools = (): Promise<s.TPlugin[]> => {
@ -231,19 +201,13 @@ export const uploadAssistantAvatar = (data: m.AssistantAvatarVariables): Promise
); );
}; };
export const updateAction = (data: m.UpdateActionVariables): Promise<m.UpdateActionResponse> => { export const getFileDownload = async (userId: string, filepath: string): Promise<Blob> => {
const { assistant_id, ...body } = data; const encodedFilePath = encodeURIComponent(filepath);
return request.post(endpoints.assistants(`actions/${assistant_id}`), body); return request.get(`${endpoints.files()}/download/${userId}/${encodedFilePath}`, {
responseType: 'blob',
});
}; };
export function getActions(): Promise<a.Action[]> {
return request.get(endpoints.assistants('actions'));
}
export function getAssistantDocs(): Promise<a.AssistantDocument[]> {
return request.get(endpoints.assistants('documents'));
}
export const deleteFiles = async ( export const deleteFiles = async (
files: f.BatchFile[], files: f.BatchFile[],
assistant_id?: string, assistant_id?: string,
@ -252,8 +216,35 @@ export const deleteFiles = async (
data: { files, assistant_id }, data: { files, assistant_id },
}); });
/* actions */
export const updateAction = (data: m.UpdateActionVariables): Promise<m.UpdateActionResponse> => {
const { assistant_id, ...body } = data;
return request.post(endpoints.assistants(`actions/${assistant_id}`), body);
};
export function getActions(): Promise<a.Action[]> {
return request.get(endpoints.assistants('actions'));
}
export const deleteAction = async (
assistant_id: string,
action_id: string,
model: string,
): Promise<void> =>
request.delete(endpoints.assistants(`actions/${assistant_id}/${action_id}/${model}`));
/* conversations */ /* conversations */
export function deleteConversation(payload: t.TDeleteConversationRequest) {
//todo: this should be a DELETE request
return request.post(endpoints.deleteConversation(), { arg: payload });
}
export function clearAllConversations(): Promise<unknown> {
return request.post(endpoints.deleteConversation(), { arg: {} });
}
export const listConversations = ( export const listConversations = (
params?: q.ConversationListParams, params?: q.ConversationListParams,
): Promise<q.ConversationListResponse> => { ): Promise<q.ConversationListResponse> => {
@ -275,9 +266,27 @@ export const listConversationsByQuery = (
} }
}; };
export const deleteAction = async ( export const searchConversations = async (
assistant_id: string, q: string,
action_id: string, pageNumber: string,
model: string, ): Promise<t.TSearchResults> => {
): Promise<void> => return request.get(endpoints.search(q, pageNumber));
request.delete(endpoints.assistants(`actions/${assistant_id}/${action_id}/${model}`)); };
export function getConversations(pageNumber: string): Promise<t.TGetConversationsResponse> {
return request.get(endpoints.conversations(pageNumber));
}
export function getConversationById(id: string): Promise<s.TConversation> {
return request.get(endpoints.conversationById(id));
}
export function updateConversation(
payload: t.TUpdateConversationRequest,
): Promise<t.TUpdateConversationResponse> {
return request.post(endpoints.updateConversation(), { arg: payload });
}
export function genTitle(payload: m.TGenTitleRequest): Promise<m.TGenTitleResponse> {
return request.post(endpoints.genTitle(), payload);
}

View file

@ -22,6 +22,7 @@ export enum QueryKeys {
tools = 'tools', tools = 'tools',
actions = 'actions', actions = 'actions',
assistantDocs = 'assistantDocs', assistantDocs = 'assistantDocs',
fileDownload = 'fileDownload',
} }
export enum MutationKeys { export enum MutationKeys {

View file

@ -182,6 +182,11 @@ export type Text = {
value: string; value: string;
}; };
export enum AnnotationTypes {
FILE_CITATION = 'file_citation',
FILE_PATH = 'file_path',
}
export enum ContentTypes { export enum ContentTypes {
TEXT = 'text', TEXT = 'text',
TOOL_CALL = 'tool_call', TOOL_CALL = 'tool_call',
@ -246,7 +251,10 @@ export type TMessageContentParts =
| { type: ContentTypes.IMAGE_FILE; image_file: ImageFile & PartMetadata }; | { type: ContentTypes.IMAGE_FILE; image_file: ImageFile & PartMetadata };
export type StreamContentData = TMessageContentParts & { export type StreamContentData = TMessageContentParts & {
/** The index of the current content part */
index: number; index: number;
/** The current text content was already served but edited to replace elements therein */
edited?: boolean;
}; };
export type TContentData = StreamContentData & { export type TContentData = StreamContentData & {