feat: Vision Support + New UI (#1203)

* feat: add timer duration to showToast, show toast for preset selection

* refactor: replace old /chat/ route with /c/. e2e tests will fail here

* refactor: move typedefs to root of /api/ and add a few to assistant types in TS

* refactor: reorganize data-provider imports, fix dependency cycle, strategize new plan to separate react dependent packages

* feat: add dataService for uploading images

* feat(data-provider): add mutation keys

* feat: file resizing and upload

* WIP: initial API image handling

* fix: catch JSON.parse of localStorage tools

* chore: experimental: use module-alias for absolute imports

* refactor: change temp_file_id strategy

* fix: updating files state by using Map and defining react query callbacks in a way that keeps them during component unmount, initial delete handling

* feat: properly handle file deletion

* refactor: unexpose complete filepath and resize from server for higher fidelity

* fix: make sure resized height, width is saved, catch bad requests

* refactor: use absolute imports

* fix: prevent setOptions from being called more than once for OpenAIClient, made note to fix for PluginsClient

* refactor: import supportsFiles and models vars from schemas

* fix: correctly replace temp file id

* refactor(BaseClient): use absolute imports, pass message 'opts' to buildMessages method, count tokens for nested objects/arrays

* feat: add validateVisionModel to determine if model has vision capabilities

* chore(checkBalance): update jsdoc

* feat: formatVisionMessage: change message content format dependent on role and image_urls passed

* refactor: add usage to File schema, make create and updateFile, correctly set and remove TTL

* feat: working vision support
TODO: file size, type, amount validations, making sure they are styled right, and making sure you can add images from the clipboard/dragging

* feat: clipboard support for uploading images

* feat: handle files on drop to screen, refactor top level view code to Presentation component so the useDragHelpers hook  has ChatContext

* fix(Images): replace uploaded images in place

* feat: add filepath validation to protect sensitive files

* fix: ensure correct file_ids are push and not the Map key values

* fix(ToastContext): type issue

* feat: add basic file validation

* fix(useDragHelpers): correct context issue with `files` dependency

* refactor: consolidate setErrors logic to setError

* feat: add dialog Image overlay on image click

* fix: close endpoints menu on click

* chore: set detail to auto, make note for configuration

* fix: react warning (button desc. of button)

* refactor: optimize filepath handling, pass file_ids to images for easier re-use

* refactor: optimize image file handling, allow re-using files in regen, pass more file metadata in messages

* feat: lazy loading images including use of upload preview

* fix: SetKeyDialog closing, stopPropagation on Dialog content click

* style(EndpointMenuItem): tighten up the style, fix dark theme showing in lightmode, make menu more ux friendly

* style: change maxheight of all settings textareas to 138px from 300px

* style: better styling for textarea and enclosing buttons

* refactor(PresetItems): swap back edit and delete icons

* feat: make textarea placeholder dynamic to endpoint

* style: show user hover buttons only on hover when message is streaming

* fix: ordered list not going past 9, fix css

* feat: add User/AI labels; style: hide loading spinner

* feat: add back custom footer, change original footer text

* feat: dynamic landing icons based on endpoint

* chore: comment out assistants route

* fix: autoScroll to newest on /c/ view

* fix: Export Conversation on new UI

* style: match message style of official more closely

* ci: fix api jest unit tests, comment out e2e tests for now as they will fail until addressed

* feat: more file validation and use blob in preview field, not filepath, to fix temp deletion

* feat: filefilter for multer

* feat: better AI labels based on custom name, model, and endpoint instead of  `ChatGPT`
This commit is contained in:
Danny Avila 2023-11-21 20:12:48 -05:00 committed by GitHub
parent 345f4b2e85
commit 317cdd3f77
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
113 changed files with 2680 additions and 675 deletions

View file

@ -8,7 +8,7 @@ const {
userProvidedOpenAI,
palmKey,
openAI,
assistant,
// assistant,
azureOpenAI,
bingAI,
chatGPTBrowser,
@ -57,7 +57,7 @@ async function endpointController(req, res) {
res.send(
JSON.stringify({
[EModelEndpoint.openAI]: openAI,
[EModelEndpoint.assistant]: assistant,
// [EModelEndpoint.assistant]: assistant,
[EModelEndpoint.azureOpenAI]: azureOpenAI,
[EModelEndpoint.google]: google,
[EModelEndpoint.bingAI]: bingAI,

View file

@ -1,12 +1,15 @@
const express = require('express');
const mongoSanitize = require('express-mongo-sanitize');
const { connectDb, indexSync } = require('../lib/db');
const path = require('path');
require('module-alias')({ base: path.resolve(__dirname, '..') });
const cors = require('cors');
const routes = require('./routes');
const errorController = require('./controllers/ErrorController');
const express = require('express');
const passport = require('passport');
const mongoSanitize = require('express-mongo-sanitize');
const errorController = require('./controllers/ErrorController');
const configureSocialLogins = require('./socialLogins');
const { connectDb, indexSync } = require('../lib/db');
const config = require('../config');
const routes = require('./routes');
const { PORT, HOST, ALLOW_SOCIAL_LOGIN } = process.env ?? {};
const port = Number(PORT) || 3080;
@ -20,6 +23,7 @@ const startServer = async () => {
await indexSync();
const app = express();
app.locals.config = config;
// Middleware
app.use(errorController);
@ -65,6 +69,7 @@ const startServer = async () => {
app.use('/api/plugins', routes.plugins);
app.use('/api/config', routes.config);
app.use('/api/assistants', routes.assistants);
app.use('/api/files', routes.files);
// Static files
app.get('/*', function (req, res) {

View file

@ -1,19 +1,24 @@
const openAI = require('../routes/endpoints/openAI');
const gptPlugins = require('../routes/endpoints/gptPlugins');
const anthropic = require('../routes/endpoints/anthropic');
const { parseConvo } = require('../routes/endpoints/schemas');
const openAI = require('~/server/routes/endpoints/openAI');
const gptPlugins = require('~/server/routes/endpoints/gptPlugins');
const anthropic = require('~/server/routes/endpoints/anthropic');
const { parseConvo, EModelEndpoint } = require('~/server/routes/endpoints/schemas');
const { processFiles } = require('~/server/services/Files');
const buildFunction = {
openAI: openAI.buildOptions,
azureOpenAI: openAI.buildOptions,
gptPlugins: gptPlugins.buildOptions,
anthropic: anthropic.buildOptions,
[EModelEndpoint.openAI]: openAI.buildOptions,
[EModelEndpoint.azureOpenAI]: openAI.buildOptions,
[EModelEndpoint.gptPlugins]: gptPlugins.buildOptions,
[EModelEndpoint.anthropic]: anthropic.buildOptions,
};
function buildEndpointOption(req, res, next) {
const { endpoint } = req.body;
const parsedBody = parseConvo(endpoint, req.body);
req.body.endpointOption = buildFunction[endpoint](endpoint, parsedBody);
if (req.body.files) {
// hold the promise
req.body.endpointOption.attachments = processFiles(req.body.files);
}
next();
}

View file

@ -1,9 +1,9 @@
const express = require('express');
const router = express.Router();
const { getResponseSender } = require('../endpoints/schemas');
const { sendMessage, createOnProgress } = require('../../utils');
const { addTitle, initializeClient } = require('../endpoints/openAI');
const { saveMessage, getConvoTitle, getConvo } = require('../../../models');
const { sendMessage, createOnProgress } = require('~/server/utils');
const { saveMessage, getConvoTitle, getConvo } = require('~/models');
const { getResponseSender } = require('~/server/routes/endpoints/schemas');
const { addTitle, initializeClient } = require('~/server/routes/endpoints/openAI');
const {
handleAbort,
createAbortController,
@ -11,7 +11,7 @@ const {
setHeaders,
validateEndpoint,
buildEndpointOption,
} = require('../../middleware');
} = require('~/server/middleware');
router.post('/abort', handleAbort());
@ -93,8 +93,7 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
try {
const { client } = await initializeClient({ req, res, endpointOption });
let response = await client.sendMessage(text, {
const messageOptions = {
user,
parentMessageId,
conversationId,
@ -108,7 +107,9 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
text,
parentMessageId: overrideParentMessageId || userMessageId,
}),
});
};
let response = await client.sendMessage(text, messageOptions);
if (overrideParentMessageId) {
response.parentMessageId = overrideParentMessageId;
@ -118,7 +119,10 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
response = { ...response, ...metadata };
}
await saveMessage({ ...response, user });
if (client.options.attachments) {
userMessage.files = client.options.attachments;
delete userMessage.image_urls;
}
sendMessage(res, {
title: await getConvoTitle(user, conversationId),
@ -129,6 +133,9 @@ router.post('/', validateEndpoint, buildEndpointOption, setHeaders, async (req,
});
res.end();
await saveMessage({ ...response, user });
await saveMessage(userMessage);
if (parentMessageId === '00000000-0000-0000-0000-000000000000' && newConvo) {
addTitle(req, {
text,

View file

@ -1,7 +1,7 @@
const { OpenAIClient } = require('../../../../app');
const { isEnabled } = require('../../../utils');
const { getAzureCredentials } = require('../../../../utils');
const { getUserKey, checkUserKeyExpiry } = require('../../../services/UserService');
const { OpenAIClient } = require('~/app');
const { isEnabled } = require('~/server/utils');
const { getAzureCredentials } = require('~/utils');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const initializeClient = async ({ req, res, endpointOption }) => {
const {

View file

@ -11,6 +11,41 @@ const EModelEndpoint = {
assistant: 'assistant',
};
const alternateName = {
[EModelEndpoint.openAI]: 'OpenAI',
[EModelEndpoint.assistant]: 'Assistants',
[EModelEndpoint.azureOpenAI]: 'Azure OpenAI',
[EModelEndpoint.bingAI]: 'Bing',
[EModelEndpoint.chatGPTBrowser]: 'ChatGPT',
[EModelEndpoint.gptPlugins]: 'Plugins',
[EModelEndpoint.google]: 'PaLM',
[EModelEndpoint.anthropic]: 'Anthropic',
};
const supportsFiles = {
[EModelEndpoint.openAI]: true,
[EModelEndpoint.assistant]: true,
};
const openAIModels = [
'gpt-3.5-turbo-16k-0613',
'gpt-3.5-turbo-16k',
'gpt-4-1106-preview',
'gpt-3.5-turbo',
'gpt-3.5-turbo-1106',
'gpt-4-vision-preview',
'gpt-4',
'gpt-3.5-turbo-instruct-0914',
'gpt-3.5-turbo-0613',
'gpt-3.5-turbo-0301',
'gpt-3.5-turbo-instruct',
'gpt-4-0613',
'text-davinci-003',
'gpt-4-0314',
];
const visionModels = ['gpt-4-vision', 'llava-13b'];
const eModelEndpointSchema = z.nativeEnum(EModelEndpoint);
const tPluginAuthConfigSchema = z.object({
@ -321,7 +356,7 @@ const parseConvo = (endpoint, conversation, possibleValues) => {
};
const getResponseSender = (endpointOption) => {
const { endpoint, chatGptLabel, modelLabel, jailbreak } = endpointOption;
const { model, endpoint, chatGptLabel, modelLabel, jailbreak } = endpointOption;
if (
[
@ -331,7 +366,14 @@ const getResponseSender = (endpointOption) => {
EModelEndpoint.chatGPTBrowser,
].includes(endpoint)
) {
return chatGptLabel ?? 'ChatGPT';
if (chatGptLabel) {
return chatGptLabel;
} else if (model && model.includes('gpt-3')) {
return 'GPT-3.5';
} else if (model && model.includes('gpt-4')) {
return 'GPT-4';
}
return alternateName[endpoint] ?? 'ChatGPT';
}
if (endpoint === EModelEndpoint.bingAI) {
@ -353,4 +395,8 @@ module.exports = {
parseConvo,
getResponseSender,
EModelEndpoint,
supportsFiles,
openAIModels,
visionModels,
alternateName,
};

View file

@ -0,0 +1,58 @@
const { z } = require('zod');
const fs = require('fs').promises;
const express = require('express');
const { deleteFiles } = require('~/models');
const path = require('path');
const router = express.Router();
const isUUID = z.string().uuid();
const isValidPath = (base, subfolder, filepath) => {
const normalizedBase = path.resolve(base, subfolder, 'temp');
const normalizedFilepath = path.resolve(filepath);
return normalizedFilepath.startsWith(normalizedBase);
};
const deleteFile = async (req, file) => {
const { publicPath } = req.app.locals.config;
const parts = file.filepath.split(path.sep);
const subfolder = parts[1];
const filepath = path.join(publicPath, file.filepath);
if (!isValidPath(publicPath, subfolder, filepath)) {
throw new Error('Invalid file path');
}
await fs.unlink(filepath);
};
router.delete('/', async (req, res) => {
try {
const { files: _files } = req.body;
const files = _files.filter((file) => {
if (!file.file_id) {
return false;
}
if (!file.filepath) {
return false;
}
return isUUID.safeParse(file.file_id).success;
});
const file_ids = files.map((file) => file.file_id);
const promises = [];
promises.push(await deleteFiles(file_ids));
for (const file of files) {
promises.push(deleteFile(req, file));
}
await Promise.all(promises);
res.status(200).json({ message: 'Files deleted successfully' });
} catch (error) {
console.error('Error deleting files:', error);
res.status(400).json({ message: 'Error in request', error: error.message });
}
});
module.exports = router;

View file

@ -0,0 +1,58 @@
const { z } = require('zod');
const fs = require('fs').promises;
const express = require('express');
const upload = require('./multer');
const { localStrategy } = require('~/server/services/Files');
const router = express.Router();
router.post('/', upload.single('file'), async (req, res) => {
const file = req.file;
const metadata = req.body;
// TODO: add file size/type validation
const uuidSchema = z.string().uuid();
try {
if (!file) {
throw new Error('No file provided');
}
if (!metadata.file_id) {
throw new Error('No file_id provided');
}
if (!metadata.width) {
throw new Error('No width provided');
}
if (!metadata.height) {
throw new Error('No height provided');
}
/* parse to validate api call */
uuidSchema.parse(metadata.file_id);
metadata.temp_file_id = metadata.file_id;
metadata.file_id = req.file_id;
await localStrategy({ res, file, metadata });
} catch (error) {
console.error('Error processing file:', error);
try {
await fs.unlink(file.path);
} catch (error) {
console.error('Error deleting file:', error);
}
res.status(500).json({ message: 'Error processing file' });
}
// do this if strategy is not local
// finally {
// try {
// // await fs.unlink(file.path);
// } catch (error) {
// console.error('Error deleting file:', error);
// }
// }
});
module.exports = router;

View file

@ -0,0 +1,22 @@
const express = require('express');
const router = express.Router();
const {
uaParser,
checkBan,
requireJwtAuth,
// concurrentLimiter,
// messageIpLimiter,
// messageUserLimiter,
} = require('../../middleware');
const files = require('./files');
const images = require('./images');
router.use(requireJwtAuth);
router.use(checkBan);
router.use(uaParser);
router.use('/', files);
router.use('/images', images);
module.exports = router;

View file

@ -0,0 +1,41 @@
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const multer = require('multer');
const supportedTypes = ['image/jpeg', 'image/jpg', 'image/png', 'image/webp'];
const sizeLimit = 20 * 1024 * 1024; // 20 MB
const storage = multer.diskStorage({
destination: function (req, file, cb) {
const outputPath = path.join(req.app.locals.config.imageOutput, 'temp');
if (!fs.existsSync(outputPath)) {
fs.mkdirSync(outputPath, { recursive: true });
}
cb(null, outputPath);
},
filename: function (req, file, cb) {
req.file_id = crypto.randomUUID();
const fileExt = path.extname(file.originalname);
cb(null, `img-${req.file_id}${fileExt}`);
},
});
const fileFilter = (req, file, cb) => {
if (!supportedTypes.includes(file.mimetype)) {
return cb(
new Error('Unsupported file type. Only JPEG, JPG, PNG, and WEBP files are allowed.'),
false,
);
}
if (file.size > sizeLimit) {
return cb(new Error(`File size exceeds ${sizeLimit / 1024 / 1024} MB.`), false);
}
cb(null, true);
};
const upload = multer({ storage, fileFilter });
module.exports = upload;

View file

@ -16,6 +16,7 @@ const plugins = require('./plugins');
const user = require('./user');
const config = require('./config');
const assistants = require('./assistants');
const files = require('./files');
module.exports = {
search,
@ -36,4 +37,5 @@ module.exports = {
plugins,
config,
assistants,
files,
};

View file

@ -1,21 +1,5 @@
const RunManager = require('./Runs/RunMananger');
/**
* @typedef {import('openai').OpenAI} OpenAI
* @typedef {import('openai').OpenAI.Beta.Threads.ThreadMessage} ThreadMessage
* @typedef {import('openai').OpenAI.Beta.Threads.RequiredActionFunctionToolCall} RequiredActionFunctionToolCall
* @typedef {import('./Runs/RunManager').RunManager} RunManager
*/
/**
* @typedef {Object} Thread
* @property {string} id - The identifier of the thread.
* @property {string} object - The object type, always 'thread'.
* @property {number} created_at - The Unix timestamp (in seconds) for when the thread was created.
* @property {Object} [metadata] - Optional metadata associated with the thread.
* @property {Message[]} [messages] - An array of messages associated with the thread.
*/
/**
* @typedef {Object} Message
* @property {string} id - The identifier of the message.
@ -247,27 +231,6 @@ async function waitForRun({ openai, run_id, thread_id, runManager, pollIntervalM
return run;
}
/**
* @typedef {Object} AgentAction
* @property {string} tool - The name of the tool used.
* @property {string} toolInput - The input provided to the tool.
* @property {string} log - A log or message associated with the action.
*/
/**
* @typedef {Object} AgentFinish
* @property {Record<string, any>} returnValues - The return values of the agent's execution.
* @property {string} log - A log or message associated with the finish.
*/
/**
* @typedef {AgentFinish & { run_id: string; thread_id: string; }} OpenAIAssistantFinish
*/
/**
* @typedef {AgentAction & { toolCallId: string; run_id: string; thread_id: string; }} OpenAIAssistantAction
*/
/**
* Retrieves the response from an OpenAI run.
*

View file

@ -0,0 +1,17 @@
const path = require('path');
const sharp = require('sharp');
const fs = require('fs').promises;
const { resizeImage } = require('./resize');
async function convertToWebP(inputFilePath, resolution = 'high') {
const { buffer: resizedBuffer, width, height } = await resizeImage(inputFilePath, resolution);
const outputFilePath = inputFilePath.replace(/\.[^/.]+$/, '') + '.webp';
const data = await sharp(resizedBuffer).toFormat('webp').toBuffer();
await fs.writeFile(outputFilePath, data);
const bytes = Buffer.byteLength(data);
const filepath = path.posix.join('/', 'images', 'temp', path.basename(outputFilePath));
await fs.unlink(inputFilePath);
return { filepath, bytes, width, height };
}
module.exports = { convertToWebP };

View file

@ -0,0 +1,80 @@
const fs = require('fs');
const path = require('path');
const { updateFile } = require('~/models');
function encodeImage(imagePath) {
return new Promise((resolve, reject) => {
fs.readFile(imagePath, (err, data) => {
if (err) {
reject(err);
} else {
resolve(data.toString('base64'));
}
});
});
}
async function encodeAndMove(req, file) {
const { publicPath, imageOutput } = req.app.locals.config;
const userPath = path.join(imageOutput, req.user.id);
if (!fs.existsSync(userPath)) {
fs.mkdirSync(userPath, { recursive: true });
}
const filepath = path.join(publicPath, file.filepath);
if (!filepath.includes('temp')) {
const base64 = await encodeImage(filepath);
return [file, base64];
}
const newPath = path.join(userPath, path.basename(file.filepath));
await fs.promises.rename(filepath, newPath);
const newFilePath = path.posix.join('/', 'images', req.user.id, path.basename(file.filepath));
const promises = [];
promises.push(updateFile({ file_id: file.file_id, filepath: newFilePath }));
promises.push(encodeImage(newPath));
return await Promise.all(promises);
}
async function encodeAndFormat(req, files) {
const promises = [];
for (let file of files) {
promises.push(encodeAndMove(req, file));
}
// TODO: make detail configurable, as of now resizing is done
// to prefer "high" but "low" may be used if the image is small enough
const detail = req.body.detail ?? 'auto';
const encodedImages = await Promise.all(promises);
const result = {
files: [],
image_urls: [],
};
for (const [file, base64] of encodedImages) {
result.image_urls.push({
type: 'image_url',
image_url: {
url: `data:image/webp;base64,${base64}`,
detail,
},
});
result.files.push({
file_id: file.file_id,
filepath: file.filepath,
filename: file.filename,
type: file.type,
height: file.height,
width: file.width,
});
}
return result;
}
module.exports = {
encodeImage,
encodeAndFormat,
};

View file

@ -0,0 +1,11 @@
const convert = require('./convert');
const encode = require('./encode');
const resize = require('./resize');
const validate = require('./validate');
module.exports = {
...convert,
...encode,
...resize,
...validate,
};

View file

@ -0,0 +1,52 @@
const sharp = require('sharp');
async function resizeImage(inputFilePath, resolution) {
const maxLowRes = 512;
const maxShortSideHighRes = 768;
const maxLongSideHighRes = 2000;
let newWidth, newHeight;
let resizeOptions = { fit: 'inside', withoutEnlargement: true };
if (resolution === 'low') {
resizeOptions.width = maxLowRes;
resizeOptions.height = maxLowRes;
} else if (resolution === 'high') {
const metadata = await sharp(inputFilePath).metadata();
const isWidthShorter = metadata.width < metadata.height;
if (isWidthShorter) {
// Width is the shorter side
newWidth = Math.min(metadata.width, maxShortSideHighRes);
// Calculate new height to maintain aspect ratio
newHeight = Math.round((metadata.height / metadata.width) * newWidth);
// Ensure the long side does not exceed the maximum allowed
if (newHeight > maxLongSideHighRes) {
newHeight = maxLongSideHighRes;
newWidth = Math.round((metadata.width / metadata.height) * newHeight);
}
} else {
// Height is the shorter side
newHeight = Math.min(metadata.height, maxShortSideHighRes);
// Calculate new width to maintain aspect ratio
newWidth = Math.round((metadata.width / metadata.height) * newHeight);
// Ensure the long side does not exceed the maximum allowed
if (newWidth > maxLongSideHighRes) {
newWidth = maxLongSideHighRes;
newHeight = Math.round((metadata.height / metadata.width) * newWidth);
}
}
resizeOptions.width = newWidth;
resizeOptions.height = newHeight;
} else {
throw new Error('Invalid resolution parameter');
}
const resizedBuffer = await sharp(inputFilePath).resize(resizeOptions).toBuffer();
const resizedMetadata = await sharp(resizedBuffer).metadata();
return { buffer: resizedBuffer, width: resizedMetadata.width, height: resizedMetadata.height };
}
module.exports = { resizeImage };

View file

@ -0,0 +1,13 @@
const { visionModels } = require('~/server/routes/endpoints/schemas');
function validateVisionModel(model) {
if (!model) {
return false;
}
return visionModels.some((visionModel) => model.includes(visionModel));
}
module.exports = {
validateVisionModel,
};

View file

@ -0,0 +1,9 @@
const localStrategy = require('./localStrategy');
const process = require('./process');
const save = require('./save');
module.exports = {
...save,
...process,
localStrategy,
};

View file

@ -0,0 +1,34 @@
const { createFile } = require('~/models');
const { convertToWebP } = require('./images/convert');
/**
* Applies the local strategy for image uploads.
* Saves file metadata to the database with an expiry TTL.
* Files must be deleted from the server filesystem manually.
*
* @param {Object} params - The parameters object.
* @param {Express.Response} params.res - The Express response object.
* @param {Express.Multer.File} params.file - The uploaded file.
* @param {ImageMetadata} params.metadata - Additional metadata for the file.
* @returns {Promise<void>}
*/
const localStrategy = async ({ res, file, metadata }) => {
const { file_id, temp_file_id } = metadata;
const { filepath, bytes, width, height } = await convertToWebP(file.path);
const result = await createFile(
{
file_id,
temp_file_id,
bytes,
filepath,
filename: file.originalname,
type: 'image/webp',
width,
height,
},
true,
);
res.status(200).json({ message: 'File uploaded and processed successfully', ...result });
};
module.exports = localStrategy;

View file

@ -0,0 +1,29 @@
const { updateFileUsage } = require('~/models');
// const mapImageUrls = (files, detail) => {
// return files
// .filter((file) => file.type.includes('image'))
// .map((file) => ({
// type: 'image_url',
// image_url: {
// /* Temporarily set to path to encode later */
// url: file.filepath,
// detail,
// },
// }));
// };
const processFiles = async (files) => {
const promises = [];
for (let file of files) {
const { file_id } = file;
promises.push(updateFileUsage({ file_id }));
}
// TODO: calculate token cost when image is first uploaded
return await Promise.all(promises);
};
module.exports = {
processFiles,
};

View file

@ -0,0 +1,47 @@
const fs = require('fs');
const path = require('path');
/**
* Saves a file to a specified output path with a new filename.
*
* @param {Express.Multer.File} file - The file object to be saved. Should contain properties like 'originalname' and 'path'.
* @param {string} outputPath - The path where the file should be saved.
* @param {string} outputFilename - The new filename for the saved file (without extension).
* @returns {Promise<string>} The full path of the saved file.
* @throws Will throw an error if the file saving process fails.
*/
async function saveFile(file, outputPath, outputFilename) {
try {
if (!fs.existsSync(outputPath)) {
fs.mkdirSync(outputPath, { recursive: true });
}
const fileExtension = path.extname(file.originalname);
const filenameWithExt = outputFilename + fileExtension;
const outputFilePath = path.join(outputPath, filenameWithExt);
fs.copyFileSync(file.path, outputFilePath);
fs.unlinkSync(file.path);
return outputFilePath;
} catch (error) {
console.error('Error while saving the file:', error);
throw error;
}
}
/**
* Saves an uploaded image file to a specified directory based on the user's ID and a filename.
*
* @param {Express.Request} req - The Express request object, containing the user's information and app configuration.
* @param {Express.Multer.File} file - The uploaded file object.
* @param {string} filename - The new filename to assign to the saved image (without extension).
* @returns {Promise<void>}
* @throws Will throw an error if the image saving process fails.
*/
const saveLocalImage = async (req, file, filename) => {
const imagePath = req.app.locals.config.imageOutput;
const outputPath = path.join(imagePath, req.user.id ?? '');
await saveFile(file, outputPath, filename);
};
module.exports = { saveFile, saveLocalImage };