mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-05 01:58:50 +01:00
🚀 Feat: Streamline File Strategies & GPT-4-Vision Settings (#1535)
* chore: fix `endpoint` typescript issues and typo in console info message * feat(api): files GET endpoint and save only file_id references to messages * refactor(client): `useGetFiles` query hook, update file types, optimistic update of filesQuery on file upload * refactor(buildTree): update to use params object and accept fileMap * feat: map files to messages; refactor(ChatView): messages only available after files are fetched * fix: fetch files only when authenticated * feat(api): AppService - rename app.locals.configs to app.locals.paths - load custom config use fileStrategy from yaml config in app.locals * refactor: separate Firebase and Local strategies, call based on config * refactor: modularize file strategies and employ with use of DALL-E * refactor(librechat.yaml): add fileStrategy field * feat: add source to MongoFile schema, as well as BatchFile, and ExtendedFile types * feat: employ file strategies for upload/delete files * refactor(deleteFirebaseFile): add user id validation for firebase file deletion * chore(deleteFirebaseFile): update jsdocs * feat: employ strategies for vision requests * fix(client): handle messages with deleted files * fix(client): ensure `filesToDelete` always saves/sends `file.source` * feat(openAI): configurable `resendImages` and `imageDetail` * refactor(getTokenCountForMessage): recursive process only when array of Objects and only their values (not keys) aside from `image_url` types * feat(OpenAIClient): calculateImageTokenCost * chore: remove comment * refactor(uploadAvatar): employ fileStrategy for avatars, from social logins or user upload * docs: update docs on how to configure fileStrategy * fix(ci): mock winston and winston related modules, update DALLE3.spec.js with changes made * refactor(redis): change terminal message to reflect current development state * fix(DALL-E-2): pass fileStrategy to dall-e
This commit is contained in:
parent
28a6807176
commit
d20970f5c5
81 changed files with 1729 additions and 855 deletions
|
|
@ -46,6 +46,10 @@ class BaseClient {
|
|||
logger.debug('`[BaseClient] recordTokenUsage` not implemented.', response);
|
||||
}
|
||||
|
||||
async addPreviousAttachments(messages) {
|
||||
return messages;
|
||||
}
|
||||
|
||||
async recordTokenUsage({ promptTokens, completionTokens }) {
|
||||
logger.debug('`[BaseClient] recordTokenUsage` not implemented.', {
|
||||
promptTokens,
|
||||
|
|
@ -484,20 +488,22 @@ class BaseClient {
|
|||
mapMethod = this.getMessageMapMethod();
|
||||
}
|
||||
|
||||
const orderedMessages = this.constructor.getMessagesForConversation({
|
||||
let _messages = this.constructor.getMessagesForConversation({
|
||||
messages,
|
||||
parentMessageId,
|
||||
mapMethod,
|
||||
});
|
||||
|
||||
_messages = await this.addPreviousAttachments(_messages);
|
||||
|
||||
if (!this.shouldSummarize) {
|
||||
return orderedMessages;
|
||||
return _messages;
|
||||
}
|
||||
|
||||
// Find the latest message with a 'summary' property
|
||||
for (let i = orderedMessages.length - 1; i >= 0; i--) {
|
||||
if (orderedMessages[i]?.summary) {
|
||||
this.previous_summary = orderedMessages[i];
|
||||
for (let i = _messages.length - 1; i >= 0; i--) {
|
||||
if (_messages[i]?.summary) {
|
||||
this.previous_summary = _messages[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
@ -512,7 +518,7 @@ class BaseClient {
|
|||
});
|
||||
}
|
||||
|
||||
return orderedMessages;
|
||||
return _messages;
|
||||
}
|
||||
|
||||
async saveMessageToDatabase(message, endpointOptions, user = null) {
|
||||
|
|
@ -618,6 +624,11 @@ class BaseClient {
|
|||
* An additional 3 tokens need to be added for assistant label priming after all messages have been counted.
|
||||
* In our implementation, this is accounted for in the getMessagesWithinTokenLimit method.
|
||||
*
|
||||
* The content parts example was adapted from the following example:
|
||||
* https://github.com/openai/openai-cookbook/pull/881/files
|
||||
*
|
||||
* Note: image token calculation is to be done elsewhere where we have access to the image metadata
|
||||
*
|
||||
* @param {Object} message
|
||||
*/
|
||||
getTokenCountForMessage(message) {
|
||||
|
|
@ -631,11 +642,18 @@ class BaseClient {
|
|||
}
|
||||
|
||||
const processValue = (value) => {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
for (let [nestedKey, nestedValue] of Object.entries(value)) {
|
||||
if (nestedKey === 'image_url' || nestedValue === 'image_url') {
|
||||
if (Array.isArray(value)) {
|
||||
for (let item of value) {
|
||||
if (!item || !item.type || item.type === 'image_url') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const nestedValue = item[item.type];
|
||||
|
||||
if (!nestedValue) {
|
||||
continue;
|
||||
}
|
||||
|
||||
processValue(nestedValue);
|
||||
}
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const OpenAI = require('openai');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { getResponseSender } = require('librechat-data-provider');
|
||||
const { getResponseSender, ImageDetailCost, ImageDetail } = require('librechat-data-provider');
|
||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||
const { encodeAndFormat, validateVisionModel } = require('~/server/services/Files/images');
|
||||
const { getModelMaxTokens, genAzureChatCompletion, extractBaseURL } = require('~/utils');
|
||||
|
|
@ -8,8 +8,9 @@ const { truncateText, formatMessage, CUT_OFF_PROMPT } = require('./prompts');
|
|||
const { handleOpenAIErrors } = require('./tools/util');
|
||||
const spendTokens = require('~/models/spendTokens');
|
||||
const { createLLM, RunManager } = require('./llm');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const ChatGPTClient = require('./ChatGPTClient');
|
||||
const { isEnabled } = require('~/server/utils');
|
||||
const { getFiles } = require('~/models/File');
|
||||
const { summaryBuffer } = require('./memory');
|
||||
const { runTitleChain } = require('./chains');
|
||||
const { tokenSplit } = require('./document');
|
||||
|
|
@ -76,16 +77,7 @@ class OpenAIClient extends BaseClient {
|
|||
};
|
||||
}
|
||||
|
||||
this.isVisionModel = validateVisionModel(this.modelOptions.model);
|
||||
|
||||
if (this.options.attachments && !this.isVisionModel) {
|
||||
this.modelOptions.model = 'gpt-4-vision-preview';
|
||||
this.isVisionModel = true;
|
||||
}
|
||||
|
||||
if (this.isVisionModel) {
|
||||
delete this.modelOptions.stop;
|
||||
}
|
||||
this.checkVisionRequest(this.options.attachments);
|
||||
|
||||
const { OPENROUTER_API_KEY, OPENAI_FORCE_PROMPT } = process.env ?? {};
|
||||
if (OPENROUTER_API_KEY && !this.azure) {
|
||||
|
|
@ -204,6 +196,27 @@ class OpenAIClient extends BaseClient {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Checks if the model is a vision model based on request attachments and sets the appropriate options:
|
||||
* - Sets `this.modelOptions.model` to `gpt-4-vision-preview` if the request is a vision request.
|
||||
* - Sets `this.isVisionModel` to `true` if vision request.
|
||||
* - Deletes `this.modelOptions.stop` if vision request.
|
||||
* @param {Array<Promise<MongoFile[]> | MongoFile[]> | Record<string, MongoFile[]>} attachments
|
||||
*/
|
||||
checkVisionRequest(attachments) {
|
||||
this.isVisionModel = validateVisionModel(this.modelOptions.model);
|
||||
|
||||
if (attachments && !this.isVisionModel) {
|
||||
this.modelOptions.model = 'gpt-4-vision-preview';
|
||||
this.isVisionModel = true;
|
||||
}
|
||||
|
||||
if (this.isVisionModel) {
|
||||
delete this.modelOptions.stop;
|
||||
}
|
||||
}
|
||||
|
||||
setupTokens() {
|
||||
if (this.isChatCompletion) {
|
||||
this.startToken = '||>';
|
||||
|
|
@ -288,7 +301,11 @@ class OpenAIClient extends BaseClient {
|
|||
tokenizerCallsCount++;
|
||||
}
|
||||
|
||||
// Returns the token count of a given text. It also checks and resets the tokenizers if necessary.
|
||||
/**
|
||||
* Returns the token count of a given text. It also checks and resets the tokenizers if necessary.
|
||||
* @param {string} text - The text to get the token count for.
|
||||
* @returns {number} The token count of the given text.
|
||||
*/
|
||||
getTokenCount(text) {
|
||||
this.resetTokenizersIfNecessary();
|
||||
try {
|
||||
|
|
@ -301,10 +318,33 @@ class OpenAIClient extends BaseClient {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the token cost for an image based on its dimensions and detail level.
|
||||
*
|
||||
* @param {Object} image - The image object.
|
||||
* @param {number} image.width - The width of the image.
|
||||
* @param {number} image.height - The height of the image.
|
||||
* @param {'low'|'high'|string|undefined} [image.detail] - The detail level ('low', 'high', or other).
|
||||
* @returns {number} The calculated token cost.
|
||||
*/
|
||||
calculateImageTokenCost({ width, height, detail }) {
|
||||
if (detail === 'low') {
|
||||
return ImageDetailCost.LOW;
|
||||
}
|
||||
|
||||
// Calculate the number of 512px squares
|
||||
const numSquares = Math.ceil(width / 512) * Math.ceil(height / 512);
|
||||
|
||||
// Default to high detail cost calculation
|
||||
return numSquares * ImageDetailCost.HIGH + ImageDetailCost.ADDITIONAL;
|
||||
}
|
||||
|
||||
getSaveOptions() {
|
||||
return {
|
||||
chatGptLabel: this.options.chatGptLabel,
|
||||
promptPrefix: this.options.promptPrefix,
|
||||
resendImages: this.options.resendImages,
|
||||
imageDetail: this.options.imageDetail,
|
||||
...this.modelOptions,
|
||||
};
|
||||
}
|
||||
|
|
@ -317,6 +357,69 @@ class OpenAIClient extends BaseClient {
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TMessage[]} _messages
|
||||
* @returns {TMessage[]}
|
||||
*/
|
||||
async addPreviousAttachments(_messages) {
|
||||
if (!this.options.resendImages) {
|
||||
return _messages;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {TMessage} message
|
||||
*/
|
||||
const processMessage = async (message) => {
|
||||
if (!this.message_file_map) {
|
||||
/** @type {Record<string, MongoFile[]> */
|
||||
this.message_file_map = {};
|
||||
}
|
||||
|
||||
const fileIds = message.files.map((file) => file.file_id);
|
||||
const files = await getFiles({
|
||||
file_id: { $in: fileIds },
|
||||
});
|
||||
|
||||
await this.addImageURLs(message, files);
|
||||
|
||||
this.message_file_map[message.messageId] = files;
|
||||
return message;
|
||||
};
|
||||
|
||||
const promises = [];
|
||||
|
||||
for (const message of _messages) {
|
||||
if (!message.files) {
|
||||
promises.push(message);
|
||||
continue;
|
||||
}
|
||||
|
||||
promises.push(processMessage(message));
|
||||
}
|
||||
|
||||
const messages = await Promise.all(promises);
|
||||
|
||||
this.checkVisionRequest(this.message_file_map);
|
||||
return messages;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Adds image URLs to the message object and returns the files
|
||||
*
|
||||
* @param {TMessage[]} messages
|
||||
* @param {MongoFile[]} files
|
||||
* @returns {Promise<MongoFile[]>}
|
||||
*/
|
||||
async addImageURLs(message, attachments) {
|
||||
const { files, image_urls } = await encodeAndFormat(this.options.req, attachments);
|
||||
|
||||
message.image_urls = image_urls;
|
||||
return files;
|
||||
}
|
||||
|
||||
async buildMessages(
|
||||
messages,
|
||||
parentMessageId,
|
||||
|
|
@ -355,13 +458,23 @@ class OpenAIClient extends BaseClient {
|
|||
}
|
||||
|
||||
if (this.options.attachments) {
|
||||
const attachments = await this.options.attachments;
|
||||
const { files, image_urls } = await encodeAndFormat(
|
||||
this.options.req,
|
||||
attachments.filter((file) => file.type.includes('image')),
|
||||
const attachments = (await this.options.attachments).filter((file) =>
|
||||
file.type.includes('image'),
|
||||
);
|
||||
|
||||
if (this.message_file_map) {
|
||||
this.message_file_map[orderedMessages[orderedMessages.length - 1].messageId] = attachments;
|
||||
} else {
|
||||
this.message_file_map = {
|
||||
[orderedMessages[orderedMessages.length - 1].messageId]: attachments,
|
||||
};
|
||||
}
|
||||
|
||||
const files = await this.addImageURLs(
|
||||
orderedMessages[orderedMessages.length - 1],
|
||||
attachments,
|
||||
);
|
||||
|
||||
orderedMessages[orderedMessages.length - 1].image_urls = image_urls;
|
||||
this.options.attachments = files;
|
||||
}
|
||||
|
||||
|
|
@ -372,10 +485,25 @@ class OpenAIClient extends BaseClient {
|
|||
assistantName: this.options?.chatGptLabel,
|
||||
});
|
||||
|
||||
if (this.contextStrategy && !orderedMessages[i].tokenCount) {
|
||||
const needsTokenCount = this.contextStrategy && !orderedMessages[i].tokenCount;
|
||||
|
||||
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
|
||||
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
|
||||
orderedMessages[i].tokenCount = this.getTokenCountForMessage(formattedMessage);
|
||||
}
|
||||
|
||||
/* If message has files, calculate image token cost */
|
||||
if (this.message_file_map && this.message_file_map[message.messageId]) {
|
||||
const attachments = this.message_file_map[message.messageId];
|
||||
for (const file of attachments) {
|
||||
orderedMessages[i].tokenCount += this.calculateImageTokenCost({
|
||||
width: file.width,
|
||||
height: file.height,
|
||||
detail: this.options.imageDetail ?? ImageDetail.auto,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return formattedMessage;
|
||||
});
|
||||
|
||||
|
|
@ -780,7 +908,6 @@ ${convo}
|
|||
if (this.isChatCompletion) {
|
||||
modelOptions.messages = payload;
|
||||
} else {
|
||||
// TODO: unreachable code. Need to implement completions call for non-chat models
|
||||
modelOptions.prompt = payload;
|
||||
}
|
||||
|
||||
|
|
@ -916,6 +1043,8 @@ ${convo}
|
|||
clientOptions.addMetadata({ finish_reason });
|
||||
}
|
||||
|
||||
logger.debug('[OpenAIClient] chatCompletion response', chatCompletion);
|
||||
|
||||
return message.content;
|
||||
} catch (err) {
|
||||
if (
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ class PluginsClient extends OpenAIClient {
|
|||
signal: this.abortController.signal,
|
||||
openAIApiKey: this.openAIApiKey,
|
||||
conversationId: this.conversationId,
|
||||
debug: this.options?.debug,
|
||||
fileStrategy: this.options.req.app.locals.fileStrategy,
|
||||
message,
|
||||
},
|
||||
});
|
||||
|
|
|
|||
|
|
@ -546,6 +546,39 @@ describe('OpenAIClient', () => {
|
|||
expect(totalTokens).toBe(testCase.expected);
|
||||
});
|
||||
});
|
||||
|
||||
const vision_request = [
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
{
|
||||
type: 'text',
|
||||
text: 'describe what is in this image?',
|
||||
},
|
||||
{
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: 'https://venturebeat.com/wp-content/uploads/2019/03/openai-1.png',
|
||||
detail: 'high',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const expectedTokens = 14;
|
||||
const visionModel = 'gpt-4-vision-preview';
|
||||
|
||||
it(`should return ${expectedTokens} tokens for model ${visionModel} (Vision Request)`, () => {
|
||||
client.modelOptions.model = visionModel;
|
||||
client.selectTokenizer();
|
||||
// 3 tokens for assistant label
|
||||
let totalTokens = 3;
|
||||
for (let message of vision_request) {
|
||||
totalTokens += client.getTokenCountForMessage(message);
|
||||
}
|
||||
expect(totalTokens).toBe(expectedTokens);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sendMessage/getCompletion/chatCompletion', () => {
|
||||
|
|
|
|||
|
|
@ -1,20 +1,13 @@
|
|||
// From https://platform.openai.com/docs/api-reference/images/create
|
||||
// To use this tool, you must pass in a configured OpenAIApi object.
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const OpenAI = require('openai');
|
||||
// const { genAzureEndpoint } = require('~/utils/genAzureEndpoints');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { Tool } = require('langchain/tools');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const {
|
||||
saveImageToFirebaseStorage,
|
||||
getFirebaseStorageImageUrl,
|
||||
getFirebaseStorage,
|
||||
} = require('~/server/services/Files/Firebase');
|
||||
const { getImageBasename } = require('~/server/services/Files/images');
|
||||
const { processFileURL } = require('~/server/services/Files/process');
|
||||
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||
const saveImageFromUrl = require('./saveImageFromUrl');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { DALLE_REVERSE_PROXY, PROXY } = process.env;
|
||||
|
|
@ -23,6 +16,7 @@ class OpenAICreateImage extends Tool {
|
|||
super();
|
||||
|
||||
this.userId = fields.userId;
|
||||
this.fileStrategy = fields.fileStrategy;
|
||||
let apiKey = fields.DALLE_API_KEY || this.getApiKey();
|
||||
|
||||
const config = { apiKey };
|
||||
|
|
@ -82,12 +76,8 @@ Guidelines:
|
|||
.trim();
|
||||
}
|
||||
|
||||
getMarkdownImageUrl(imageName) {
|
||||
const imageUrl = path
|
||||
.join(this.relativeImageUrl, imageName)
|
||||
.replace(/\\/g, '/')
|
||||
.replace('public/', '');
|
||||
return ``;
|
||||
wrapInMarkdown(imageUrl) {
|
||||
return ``;
|
||||
}
|
||||
|
||||
async _call(input) {
|
||||
|
|
@ -118,45 +108,21 @@ Guidelines:
|
|||
});
|
||||
}
|
||||
|
||||
this.outputPath = path.resolve(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'client',
|
||||
'public',
|
||||
'images',
|
||||
this.userId,
|
||||
);
|
||||
try {
|
||||
const result = await processFileURL({
|
||||
fileStrategy: this.fileStrategy,
|
||||
userId: this.userId,
|
||||
URL: theImageUrl,
|
||||
fileName: imageName,
|
||||
basePath: 'images',
|
||||
});
|
||||
|
||||
const appRoot = path.resolve(__dirname, '..', '..', '..', '..', 'client');
|
||||
this.relativeImageUrl = path.relative(appRoot, this.outputPath);
|
||||
|
||||
// Check if directory exists, if not create it
|
||||
if (!fs.existsSync(this.outputPath)) {
|
||||
fs.mkdirSync(this.outputPath, { recursive: true });
|
||||
this.result = this.wrapInMarkdown(result);
|
||||
} catch (error) {
|
||||
logger.error('Error while saving the image:', error);
|
||||
this.result = `Failed to save the image locally. ${error.message}`;
|
||||
}
|
||||
|
||||
const storage = getFirebaseStorage();
|
||||
if (storage) {
|
||||
try {
|
||||
await saveImageToFirebaseStorage(this.userId, theImageUrl, imageName);
|
||||
this.result = await getFirebaseStorageImageUrl(`${this.userId}/${imageName}`);
|
||||
logger.debug('[DALL-E] result: ' + this.result);
|
||||
} catch (error) {
|
||||
logger.error('Error while saving the image to Firebase Storage:', error);
|
||||
this.result = `Failed to save the image to Firebase Storage. ${error.message}`;
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
await saveImageFromUrl(theImageUrl, this.outputPath, imageName);
|
||||
this.result = this.getMarkdownImageUrl(imageName);
|
||||
} catch (error) {
|
||||
logger.error('Error while saving the image locally:', error);
|
||||
this.result = `Failed to save the image locally. ${error.message}`;
|
||||
}
|
||||
}
|
||||
return this.result;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,46 +0,0 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const axios = require('axios');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
async function saveImageFromUrl(url, outputPath, outputFilename) {
|
||||
try {
|
||||
// Fetch the image from the URL
|
||||
const response = await axios({
|
||||
url,
|
||||
responseType: 'stream',
|
||||
});
|
||||
|
||||
// Get the content type from the response headers
|
||||
const contentType = response.headers['content-type'];
|
||||
let extension = contentType.split('/').pop();
|
||||
|
||||
// Check if the output directory exists, if not, create it
|
||||
if (!fs.existsSync(outputPath)) {
|
||||
fs.mkdirSync(outputPath, { recursive: true });
|
||||
}
|
||||
|
||||
// Replace or append the correct extension
|
||||
const extRegExp = new RegExp(path.extname(outputFilename) + '$');
|
||||
outputFilename = outputFilename.replace(extRegExp, `.${extension}`);
|
||||
if (!path.extname(outputFilename)) {
|
||||
outputFilename += `.${extension}`;
|
||||
}
|
||||
|
||||
// Create a writable stream for the output path
|
||||
const outputFilePath = path.join(outputPath, outputFilename);
|
||||
const writer = fs.createWriteStream(outputFilePath);
|
||||
|
||||
// Pipe the response data to the output file
|
||||
response.data.pipe(writer);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
writer.on('finish', resolve);
|
||||
writer.on('error', reject);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[saveImageFromUrl] Error while saving the image:', error);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = saveImageFromUrl;
|
||||
|
|
@ -1,20 +1,13 @@
|
|||
// From https://platform.openai.com/docs/guides/images/usage?context=node
|
||||
// To use this tool, you must pass in a configured OpenAIApi object.
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { z } = require('zod');
|
||||
const OpenAI = require('openai');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { Tool } = require('langchain/tools');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const {
|
||||
saveImageToFirebaseStorage,
|
||||
getFirebaseStorageImageUrl,
|
||||
getFirebaseStorage,
|
||||
} = require('~/server/services/Files/Firebase');
|
||||
const { getImageBasename } = require('~/server/services/Files/images');
|
||||
const { processFileURL } = require('~/server/services/Files/process');
|
||||
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||
const saveImageFromUrl = require('../saveImageFromUrl');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const { DALLE3_SYSTEM_PROMPT, DALLE_REVERSE_PROXY, PROXY } = process.env;
|
||||
|
|
@ -23,6 +16,7 @@ class DALLE3 extends Tool {
|
|||
super();
|
||||
|
||||
this.userId = fields.userId;
|
||||
this.fileStrategy = fields.fileStrategy;
|
||||
let apiKey = fields.DALLE_API_KEY || this.getApiKey();
|
||||
const config = { apiKey };
|
||||
if (DALLE_REVERSE_PROXY) {
|
||||
|
|
@ -91,12 +85,8 @@ class DALLE3 extends Tool {
|
|||
.trim();
|
||||
}
|
||||
|
||||
getMarkdownImageUrl(imageName) {
|
||||
const imageUrl = path
|
||||
.join(this.relativeImageUrl, imageName)
|
||||
.replace(/\\/g, '/')
|
||||
.replace('public/', '');
|
||||
return ``;
|
||||
wrapInMarkdown(imageUrl) {
|
||||
return ``;
|
||||
}
|
||||
|
||||
async _call(data) {
|
||||
|
|
@ -143,43 +133,19 @@ Error Message: ${error.message}`;
|
|||
});
|
||||
}
|
||||
|
||||
this.outputPath = path.resolve(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'client',
|
||||
'public',
|
||||
'images',
|
||||
this.userId,
|
||||
);
|
||||
const appRoot = path.resolve(__dirname, '..', '..', '..', '..', '..', 'client');
|
||||
this.relativeImageUrl = path.relative(appRoot, this.outputPath);
|
||||
try {
|
||||
const result = await processFileURL({
|
||||
fileStrategy: this.fileStrategy,
|
||||
userId: this.userId,
|
||||
URL: theImageUrl,
|
||||
fileName: imageName,
|
||||
basePath: 'images',
|
||||
});
|
||||
|
||||
// Check if directory exists, if not create it
|
||||
if (!fs.existsSync(this.outputPath)) {
|
||||
fs.mkdirSync(this.outputPath, { recursive: true });
|
||||
}
|
||||
const storage = getFirebaseStorage();
|
||||
if (storage) {
|
||||
try {
|
||||
await saveImageToFirebaseStorage(this.userId, theImageUrl, imageName);
|
||||
this.result = await getFirebaseStorageImageUrl(`${this.userId}/${imageName}`);
|
||||
logger.debug('[DALL-E-3] result: ' + this.result);
|
||||
} catch (error) {
|
||||
logger.error('Error while saving the image to Firebase Storage:', error);
|
||||
this.result = `Failed to save the image to Firebase Storage. ${error.message}`;
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
await saveImageFromUrl(theImageUrl, this.outputPath, imageName);
|
||||
this.result = this.getMarkdownImageUrl(imageName);
|
||||
} catch (error) {
|
||||
logger.error('Error while saving the image locally:', error);
|
||||
this.result = `Failed to save the image locally. ${error.message}`;
|
||||
}
|
||||
this.result = this.wrapInMarkdown(result);
|
||||
} catch (error) {
|
||||
logger.error('Error while saving the image:', error);
|
||||
this.result = `Failed to save the image locally. ${error.message}`;
|
||||
}
|
||||
|
||||
return this.result;
|
||||
|
|
|
|||
|
|
@ -1,20 +1,13 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const OpenAI = require('openai');
|
||||
const DALLE3 = require('../DALLE3');
|
||||
const {
|
||||
getFirebaseStorage,
|
||||
saveImageToFirebaseStorage,
|
||||
} = require('~/server/services/Files/Firebase');
|
||||
const saveImageFromUrl = require('../../saveImageFromUrl');
|
||||
const { processFileURL } = require('~/server/services/Files/process');
|
||||
|
||||
const { logger } = require('~/config');
|
||||
|
||||
jest.mock('openai');
|
||||
|
||||
jest.mock('~/server/services/Files/Firebase', () => ({
|
||||
getFirebaseStorage: jest.fn(),
|
||||
saveImageToFirebaseStorage: jest.fn(),
|
||||
getFirebaseStorageImageUrl: jest.fn(),
|
||||
jest.mock('~/server/services/Files/process', () => ({
|
||||
processFileURL: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/services/Files/images', () => ({
|
||||
|
|
@ -50,10 +43,6 @@ jest.mock('fs', () => {
|
|||
};
|
||||
});
|
||||
|
||||
jest.mock('../../saveImageFromUrl', () => {
|
||||
return jest.fn();
|
||||
});
|
||||
|
||||
jest.mock('path', () => {
|
||||
return {
|
||||
resolve: jest.fn(),
|
||||
|
|
@ -99,10 +88,8 @@ describe('DALLE3', () => {
|
|||
|
||||
it('should generate markdown image URL correctly', () => {
|
||||
const imageName = 'test.png';
|
||||
path.join.mockReturnValue('images/test.png');
|
||||
path.relative.mockReturnValue('images/test.png');
|
||||
const markdownImage = dalle.getMarkdownImageUrl(imageName);
|
||||
expect(markdownImage).toBe('');
|
||||
const markdownImage = dalle.wrapInMarkdown(imageName);
|
||||
expect(markdownImage).toBe('');
|
||||
});
|
||||
|
||||
it('should call OpenAI API with correct parameters', async () => {
|
||||
|
|
@ -122,11 +109,7 @@ describe('DALLE3', () => {
|
|||
};
|
||||
|
||||
generate.mockResolvedValue(mockResponse);
|
||||
saveImageFromUrl.mockResolvedValue(true);
|
||||
fs.existsSync.mockReturnValue(true);
|
||||
path.resolve.mockReturnValue('/fakepath/images');
|
||||
path.join.mockReturnValue('/fakepath/images/img-test.png');
|
||||
path.relative.mockReturnValue('images/img-test.png');
|
||||
processFileURL.mockResolvedValue('http://example.com/img-test.png');
|
||||
|
||||
const result = await dalle._call(mockData);
|
||||
|
||||
|
|
@ -138,6 +121,7 @@ describe('DALLE3', () => {
|
|||
prompt: mockData.prompt,
|
||||
n: 1,
|
||||
});
|
||||
|
||||
expect(result).toContain('![generated image]');
|
||||
});
|
||||
|
||||
|
|
@ -184,23 +168,6 @@ describe('DALLE3', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should create the directory if it does not exist', async () => {
|
||||
const mockData = {
|
||||
prompt: 'A test prompt',
|
||||
};
|
||||
const mockResponse = {
|
||||
data: [
|
||||
{
|
||||
url: 'http://example.com/img-test.png',
|
||||
},
|
||||
],
|
||||
};
|
||||
generate.mockResolvedValue(mockResponse);
|
||||
fs.existsSync.mockReturnValue(false); // Simulate directory does not exist
|
||||
await dalle._call(mockData);
|
||||
expect(fs.mkdirSync).toHaveBeenCalledWith(expect.any(String), { recursive: true });
|
||||
});
|
||||
|
||||
it('should log an error and return the image URL if there is an error saving the image', async () => {
|
||||
const mockData = {
|
||||
prompt: 'A test prompt',
|
||||
|
|
@ -214,31 +181,12 @@ describe('DALLE3', () => {
|
|||
};
|
||||
const error = new Error('Error while saving the image');
|
||||
generate.mockResolvedValue(mockResponse);
|
||||
saveImageFromUrl.mockRejectedValue(error);
|
||||
processFileURL.mockRejectedValue(error);
|
||||
const result = await dalle._call(mockData);
|
||||
expect(logger.error).toHaveBeenCalledWith('Error while saving the image locally:', error);
|
||||
expect(logger.error).toHaveBeenCalledWith('Error while saving the image:', error);
|
||||
expect(result).toBe('Failed to save the image locally. Error while saving the image');
|
||||
});
|
||||
|
||||
it('should save image to Firebase Storage if Firebase is initialized', async () => {
|
||||
const mockData = {
|
||||
prompt: 'A test prompt',
|
||||
};
|
||||
const mockImageUrl = 'http://example.com/img-test.png';
|
||||
const mockResponse = { data: [{ url: mockImageUrl }] };
|
||||
generate.mockResolvedValue(mockResponse);
|
||||
getFirebaseStorage.mockReturnValue({}); // Simulate Firebase being initialized
|
||||
|
||||
await dalle._call(mockData);
|
||||
|
||||
expect(getFirebaseStorage).toHaveBeenCalled();
|
||||
expect(saveImageToFirebaseStorage).toHaveBeenCalledWith(
|
||||
undefined,
|
||||
mockImageUrl,
|
||||
expect.any(String),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle error when saving image to Firebase Storage fails', async () => {
|
||||
const mockData = {
|
||||
prompt: 'A test prompt',
|
||||
|
|
@ -247,17 +195,11 @@ describe('DALLE3', () => {
|
|||
const mockResponse = { data: [{ url: mockImageUrl }] };
|
||||
const error = new Error('Error while saving to Firebase');
|
||||
generate.mockResolvedValue(mockResponse);
|
||||
getFirebaseStorage.mockReturnValue({}); // Simulate Firebase being initialized
|
||||
saveImageToFirebaseStorage.mockRejectedValue(error);
|
||||
processFileURL.mockRejectedValue(error);
|
||||
|
||||
const result = await dalle._call(mockData);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
'Error while saving the image to Firebase Storage:',
|
||||
error,
|
||||
);
|
||||
expect(result).toBe(
|
||||
'Failed to save the image to Firebase Storage. Error while saving to Firebase',
|
||||
);
|
||||
expect(logger.error).toHaveBeenCalledWith('Error while saving the image:', error);
|
||||
expect(result).toContain('Failed to save the image');
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -170,6 +170,8 @@ const loadTools = async ({
|
|||
|
||||
const toolOptions = {
|
||||
serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' },
|
||||
dalle: { fileStrategy: options.fileStrategy },
|
||||
'dall-e': { fileStrategy: options.fileStrategy },
|
||||
};
|
||||
|
||||
const toolAuthFields = {};
|
||||
|
|
|
|||
5
api/cache/keyvRedis.js
vendored
5
api/cache/keyvRedis.js
vendored
|
|
@ -10,10 +10,11 @@ if (REDIS_URI && isEnabled(USE_REDIS)) {
|
|||
keyvRedis = new KeyvRedis(REDIS_URI, { useRedisSets: false });
|
||||
keyvRedis.on('error', (err) => logger.error('KeyvRedis connection error:', err));
|
||||
keyvRedis.setMaxListeners(20);
|
||||
} else {
|
||||
logger.info(
|
||||
'`REDIS_URI` not provided, or `USE_REDIS` not set. Redis module will not be initialized.',
|
||||
'[Optional] Redis initialized. Note: Redis support is experimental. If you have issues, disable it. Cache needs to be flushed for values to refresh.',
|
||||
);
|
||||
} else {
|
||||
logger.info('[Optional] Redis not initialized. Note: Redis support is experimental.');
|
||||
}
|
||||
|
||||
module.exports = keyvRedis;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
const path = require('path');
|
||||
|
||||
module.exports = {
|
||||
dist: path.resolve(__dirname, '..', '..', 'client', 'dist'),
|
||||
publicPath: path.resolve(__dirname, '..', '..', 'client', 'public'),
|
||||
imageOutput: path.resolve(__dirname, '..', '..', 'client', 'public', 'images'),
|
||||
};
|
||||
|
|
|
|||
|
|
@ -84,6 +84,12 @@ const conversationPreset = {
|
|||
type: String,
|
||||
// default: null,
|
||||
},
|
||||
resendImages: {
|
||||
type: Boolean,
|
||||
},
|
||||
imageDetail: {
|
||||
type: String,
|
||||
},
|
||||
};
|
||||
|
||||
const agentOptions = {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
const { FileSources } = require('librechat-data-provider');
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
/**
|
||||
|
|
@ -12,6 +13,7 @@ const mongoose = require('mongoose');
|
|||
* @property {'file'} object - Type of object, always 'file'
|
||||
* @property {string} type - Type of file
|
||||
* @property {number} usage - Number of uses of the file
|
||||
* @property {string} [source] - The source of the file
|
||||
* @property {number} [width] - Optional width of the file
|
||||
* @property {number} [height] - Optional height of the file
|
||||
* @property {Date} [expiresAt] - Optional height of the file
|
||||
|
|
@ -42,11 +44,6 @@ const fileSchema = mongoose.Schema(
|
|||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
usage: {
|
||||
type: Number,
|
||||
required: true,
|
||||
default: 0,
|
||||
},
|
||||
filename: {
|
||||
type: String,
|
||||
required: true,
|
||||
|
|
@ -64,6 +61,15 @@ const fileSchema = mongoose.Schema(
|
|||
type: String,
|
||||
required: true,
|
||||
},
|
||||
usage: {
|
||||
type: Number,
|
||||
required: true,
|
||||
default: 0,
|
||||
},
|
||||
source: {
|
||||
type: String,
|
||||
default: FileSources.local,
|
||||
},
|
||||
width: Number,
|
||||
height: Number,
|
||||
expiresAt: {
|
||||
|
|
|
|||
|
|
@ -5,33 +5,28 @@ const cors = require('cors');
|
|||
const express = require('express');
|
||||
const passport = require('passport');
|
||||
const mongoSanitize = require('express-mongo-sanitize');
|
||||
const { initializeFirebase } = require('~/server/services/Files/Firebase/initialize');
|
||||
const loadCustomConfig = require('~/server/services/Config/loadCustomConfig');
|
||||
const errorController = require('~/server/controllers/ErrorController');
|
||||
const configureSocialLogins = require('~/server/socialLogins');
|
||||
const noIndex = require('~/server/middleware/noIndex');
|
||||
const errorController = require('./controllers/ErrorController');
|
||||
const { jwtLogin, passportLogin } = require('~/strategies');
|
||||
const configureSocialLogins = require('./socialLogins');
|
||||
const { connectDb, indexSync } = require('~/lib/db');
|
||||
const AppService = require('./services/AppService');
|
||||
const noIndex = require('./middleware/noIndex');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const routes = require('~/server/routes');
|
||||
const paths = require('~/config/paths');
|
||||
const routes = require('./routes');
|
||||
|
||||
const { PORT, HOST, ALLOW_SOCIAL_LOGIN } = process.env ?? {};
|
||||
|
||||
const port = Number(PORT) || 3080;
|
||||
const host = HOST || 'localhost';
|
||||
const projectPath = path.join(__dirname, '..', '..', 'client');
|
||||
const { jwtLogin, passportLogin } = require('~/strategies');
|
||||
|
||||
const startServer = async () => {
|
||||
await connectDb();
|
||||
logger.info('Connected to MongoDB');
|
||||
await loadCustomConfig();
|
||||
initializeFirebase();
|
||||
await indexSync();
|
||||
|
||||
const app = express();
|
||||
app.locals.config = paths;
|
||||
await AppService(app);
|
||||
|
||||
// Middleware
|
||||
app.use(noIndex);
|
||||
|
|
@ -39,14 +34,14 @@ const startServer = async () => {
|
|||
app.use(express.json({ limit: '3mb' }));
|
||||
app.use(mongoSanitize());
|
||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||
app.use(express.static(path.join(projectPath, 'dist')));
|
||||
app.use(express.static(path.join(projectPath, 'public')));
|
||||
app.use(express.static(app.locals.paths.dist));
|
||||
app.use(express.static(app.locals.paths.publicPath));
|
||||
app.set('trust proxy', 1); // trust first proxy
|
||||
app.use(cors());
|
||||
|
||||
if (!ALLOW_SOCIAL_LOGIN) {
|
||||
console.warn(
|
||||
'Social logins are disabled. Set Envrionment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.',
|
||||
'Social logins are disabled. Set Environment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.',
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -81,7 +76,7 @@ const startServer = async () => {
|
|||
app.use('/api/files', routes.files);
|
||||
|
||||
app.use((req, res) => {
|
||||
res.status(404).sendFile(path.join(projectPath, 'dist', 'index.html'));
|
||||
res.status(404).sendFile(path.join(app.locals.paths.dist, 'index.html'));
|
||||
});
|
||||
|
||||
app.listen(port, host, () => {
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
const { processFiles } = require('~/server/services/Files');
|
||||
const { parseConvo, EModelEndpoint } = require('librechat-data-provider');
|
||||
const { processFiles } = require('~/server/services/Files/process');
|
||||
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
|
||||
const anthropic = require('~/server/services/Endpoints/anthropic');
|
||||
const openAI = require('~/server/services/Endpoints/openAI');
|
||||
const custom = require('~/server/services/Endpoints/custom');
|
||||
const google = require('~/server/services/Endpoints/google');
|
||||
const anthropic = require('~/server/services/Endpoints/anthropic');
|
||||
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
|
||||
const { parseConvo, EModelEndpoint } = require('librechat-data-provider');
|
||||
|
||||
const buildFunction = {
|
||||
[EModelEndpoint.openAI]: openAI.buildOptions,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const express = require('express');
|
||||
const multer = require('multer');
|
||||
|
||||
const uploadAvatar = require('~/server/services/Files/images/avatar/uploadAvatar');
|
||||
const uploadAvatar = require('~/server/services/Files/images/avatar');
|
||||
const { requireJwtAuth } = require('~/server/middleware/');
|
||||
const User = require('~/models/User');
|
||||
|
||||
|
|
@ -23,7 +23,12 @@ router.post('/', requireJwtAuth, upload.single('input'), async (req, res) => {
|
|||
if (!user) {
|
||||
throw new Error('User not found');
|
||||
}
|
||||
const url = await uploadAvatar(userId, input, manual);
|
||||
const url = await uploadAvatar({
|
||||
input,
|
||||
userId,
|
||||
manual,
|
||||
fileStrategy: req.app.locals.fileStrategy,
|
||||
});
|
||||
|
||||
res.json({ url });
|
||||
} catch (error) {
|
||||
|
|
|
|||
|
|
@ -1,36 +1,29 @@
|
|||
const { z } = require('zod');
|
||||
const path = require('path');
|
||||
const fs = require('fs').promises;
|
||||
const express = require('express');
|
||||
const { deleteFiles } = require('~/models');
|
||||
const { FileSources } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { deleteFiles, getFiles } = require('~/models');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const isUUID = z.string().uuid();
|
||||
|
||||
const isValidPath = (req, base, subfolder, filepath) => {
|
||||
const normalizedBase = path.resolve(base, subfolder, req.user.id);
|
||||
const normalizedFilepath = path.resolve(filepath);
|
||||
return normalizedFilepath.startsWith(normalizedBase);
|
||||
};
|
||||
|
||||
const deleteFile = async (req, file) => {
|
||||
const { publicPath } = req.app.locals.config;
|
||||
const parts = file.filepath.split(path.sep);
|
||||
const subfolder = parts[1];
|
||||
const filepath = path.join(publicPath, file.filepath);
|
||||
|
||||
if (!isValidPath(req, publicPath, subfolder, filepath)) {
|
||||
throw new Error('Invalid file path');
|
||||
router.get('/', async (req, res) => {
|
||||
try {
|
||||
const files = await getFiles({ user: req.user.id });
|
||||
res.status(200).send(files);
|
||||
} catch (error) {
|
||||
logger.error('[/files] Error getting files:', error);
|
||||
res.status(400).json({ message: 'Error in request', error: error.message });
|
||||
}
|
||||
|
||||
await fs.unlink(filepath);
|
||||
};
|
||||
});
|
||||
|
||||
router.delete('/', async (req, res) => {
|
||||
try {
|
||||
const { files: _files } = req.body;
|
||||
|
||||
/** @type {MongoFile[]} */
|
||||
const files = _files.filter((file) => {
|
||||
if (!file.file_id) {
|
||||
return false;
|
||||
|
|
@ -47,9 +40,24 @@ router.delete('/', async (req, res) => {
|
|||
}
|
||||
|
||||
const file_ids = files.map((file) => file.file_id);
|
||||
const deletionMethods = {};
|
||||
const promises = [];
|
||||
promises.push(await deleteFiles(file_ids));
|
||||
|
||||
for (const file of files) {
|
||||
const source = file.source ?? FileSources.local;
|
||||
|
||||
if (deletionMethods[source]) {
|
||||
promises.push(deletionMethods[source](req, file));
|
||||
continue;
|
||||
}
|
||||
|
||||
const { deleteFile } = getStrategyFunctions(source);
|
||||
if (!deleteFile) {
|
||||
throw new Error(`Delete function not implemented for ${source}`);
|
||||
}
|
||||
|
||||
deletionMethods[source] = deleteFile;
|
||||
promises.push(deleteFile(req, file));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ const { z } = require('zod');
|
|||
const fs = require('fs').promises;
|
||||
const express = require('express');
|
||||
const upload = require('./multer');
|
||||
const { localStrategy } = require('~/server/services/Files');
|
||||
const { processImageUpload } = require('~/server/services/Files/process');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const router = express.Router();
|
||||
|
|
@ -34,7 +34,8 @@ router.post('/', upload.single('file'), async (req, res) => {
|
|||
uuidSchema.parse(metadata.file_id);
|
||||
metadata.temp_file_id = metadata.file_id;
|
||||
metadata.file_id = req.file_id;
|
||||
await localStrategy({ req, res, file, metadata });
|
||||
|
||||
await processImageUpload({ req, res, file, metadata });
|
||||
} catch (error) {
|
||||
logger.error('[/files/images] Error processing file:', error);
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ const {
|
|||
|
||||
const files = require('./files');
|
||||
const images = require('./images');
|
||||
const avatar = require('./avatar');
|
||||
|
||||
router.use(requireJwtAuth);
|
||||
router.use(checkBan);
|
||||
|
|
@ -18,6 +19,6 @@ router.use(uaParser);
|
|||
|
||||
router.use('/', files);
|
||||
router.use('/images', images);
|
||||
router.use('/images/avatar', require('./avatar'));
|
||||
router.use('/images/avatar', avatar);
|
||||
|
||||
module.exports = router;
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ const sizeLimit = 20 * 1024 * 1024; // 20 MB
|
|||
|
||||
const storage = multer.diskStorage({
|
||||
destination: function (req, file, cb) {
|
||||
const outputPath = path.join(req.app.locals.config.imageOutput, 'temp');
|
||||
const outputPath = path.join(req.app.locals.paths.imageOutput, 'temp');
|
||||
if (!fs.existsSync(outputPath)) {
|
||||
fs.mkdirSync(outputPath, { recursive: true });
|
||||
}
|
||||
|
|
|
|||
27
api/server/services/AppService.js
Normal file
27
api/server/services/AppService.js
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
const { FileSources } = require('librechat-data-provider');
|
||||
const { initializeFirebase } = require('./Files/Firebase/initialize');
|
||||
const loadCustomConfig = require('./Config/loadCustomConfig');
|
||||
const paths = require('~/config/paths');
|
||||
|
||||
/**
|
||||
*
|
||||
* Loads custom config and initializes app-wide variables.
|
||||
* @function AppService
|
||||
* @param {Express.Application} app - The Express application object.
|
||||
*/
|
||||
const AppService = async (app) => {
|
||||
const config = (await loadCustomConfig()) ?? {};
|
||||
const fileStrategy = config.fileStrategy ?? FileSources.local;
|
||||
process.env.CDN_PROVIDER = fileStrategy;
|
||||
|
||||
if (fileStrategy === FileSources.firebase) {
|
||||
initializeFirebase();
|
||||
}
|
||||
|
||||
app.locals = {
|
||||
fileStrategy,
|
||||
paths,
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = AppService;
|
||||
|
|
@ -1,9 +1,11 @@
|
|||
const buildOptions = (endpoint, parsedBody) => {
|
||||
const { chatGptLabel, promptPrefix, ...rest } = parsedBody;
|
||||
const { chatGptLabel, promptPrefix, resendImages, imageDetail, ...rest } = parsedBody;
|
||||
const endpointOption = {
|
||||
endpoint,
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
resendImages,
|
||||
imageDetail,
|
||||
modelOptions: {
|
||||
...rest,
|
||||
},
|
||||
|
|
|
|||
174
api/server/services/Files/Firebase/crud.js
Normal file
174
api/server/services/Files/Firebase/crud.js
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
const fetch = require('node-fetch');
|
||||
const { ref, uploadBytes, getDownloadURL, deleteObject } = require('firebase/storage');
|
||||
const { getFirebaseStorage } = require('./initialize');
|
||||
|
||||
/**
|
||||
* Deletes a file from Firebase Storage.
|
||||
* @param {string} directory - The directory name
|
||||
* @param {string} fileName - The name of the file to delete.
|
||||
* @returns {Promise<void>} A promise that resolves when the file is deleted.
|
||||
*/
|
||||
async function deleteFile(basePath, fileName) {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
console.error('Firebase is not initialized. Cannot delete file from Firebase Storage.');
|
||||
throw new Error('Firebase is not initialized');
|
||||
}
|
||||
|
||||
const storageRef = ref(storage, `${basePath}/${fileName}`);
|
||||
|
||||
try {
|
||||
await deleteObject(storageRef);
|
||||
console.log('File deleted successfully from Firebase Storage');
|
||||
} catch (error) {
|
||||
console.error('Error deleting file from Firebase Storage:', error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves an file from a given URL to Firebase Storage. The function first initializes the Firebase Storage
|
||||
* reference, then uploads the file to a specified basePath in the Firebase Storage. It handles initialization
|
||||
* errors and upload errors, logging them to the console. If the upload is successful, the file name is returned.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The user's unique identifier. This is used to create a user-specific basePath
|
||||
* in Firebase Storage.
|
||||
* @param {string} params.URL - The URL of the file to be uploaded. The file at this URL will be fetched
|
||||
* and uploaded to Firebase Storage.
|
||||
* @param {string} params.fileName - The name that will be used to save the file in Firebase Storage. This
|
||||
* should include the file extension.
|
||||
* @param {string} [params.basePath='images'] - Optional. The base basePath in Firebase Storage where the file will
|
||||
* be stored. Defaults to 'images' if not specified.
|
||||
*
|
||||
* @returns {Promise<string|null>}
|
||||
* A promise that resolves to the file name if the file is successfully uploaded, or null if there
|
||||
* is an error in initialization or upload.
|
||||
*/
|
||||
async function saveURLToFirebase({ userId, URL, fileName, basePath = 'images' }) {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
console.error('Firebase is not initialized. Cannot save file to Firebase Storage.');
|
||||
return null;
|
||||
}
|
||||
|
||||
const storageRef = ref(storage, `${basePath}/${userId.toString()}/${fileName}`);
|
||||
|
||||
try {
|
||||
await uploadBytes(storageRef, await fetch(URL).then((response) => response.buffer()));
|
||||
return fileName;
|
||||
} catch (error) {
|
||||
console.error('Error uploading file to Firebase Storage:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the download URL for a specified file from Firebase Storage. This function initializes the
|
||||
* Firebase Storage and generates a reference to the file based on the provided basePath and file name. If
|
||||
* Firebase Storage is not initialized or if there is an error in fetching the URL, the error is logged
|
||||
* to the console.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.fileName - The name of the file for which the URL is to be retrieved. This should
|
||||
* include the file extension.
|
||||
* @param {string} [params.basePath='images'] - Optional. The base basePath in Firebase Storage where the file is
|
||||
* stored. Defaults to 'images' if not specified.
|
||||
*
|
||||
* @returns {Promise<string|null>}
|
||||
* A promise that resolves to the download URL of the file if successful, or null if there is an
|
||||
* error in initialization or fetching the URL.
|
||||
*/
|
||||
async function getFirebaseURL({ fileName, basePath = 'images' }) {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
console.error('Firebase is not initialized. Cannot get image URL from Firebase Storage.');
|
||||
return null;
|
||||
}
|
||||
|
||||
const storageRef = ref(storage, `${basePath}/${fileName}`);
|
||||
|
||||
try {
|
||||
return await getDownloadURL(storageRef);
|
||||
} catch (error) {
|
||||
console.error('Error fetching file URL from Firebase Storage:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a buffer to Firebase Storage.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The user's unique identifier. This is used to create a user-specific basePath
|
||||
* in Firebase Storage.
|
||||
* @param {string} params.fileName - The name of the file to be saved in Firebase Storage.
|
||||
* @param {string} params.buffer - The buffer to be uploaded.
|
||||
* @param {string} [params.basePath='images'] - Optional. The base basePath in Firebase Storage where the file will
|
||||
* be stored. Defaults to 'images' if not specified.
|
||||
*
|
||||
* @returns {Promise<string>} - A promise that resolves to the download URL of the uploaded file.
|
||||
*/
|
||||
async function saveBufferToFirebase({ userId, buffer, fileName, basePath = 'images' }) {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
throw new Error('Firebase is not initialized');
|
||||
}
|
||||
|
||||
const storageRef = ref(storage, `${basePath}/${userId}/${fileName}`);
|
||||
await uploadBytes(storageRef, buffer);
|
||||
|
||||
// Assuming you have a function to get the download URL
|
||||
return await getFirebaseURL({ fileName, basePath: `${basePath}/${userId}` });
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts and decodes the file path from a Firebase Storage URL.
|
||||
*
|
||||
* @param {string} urlString - The Firebase Storage URL.
|
||||
* @returns {string} The decoded file path.
|
||||
*/
|
||||
function extractFirebaseFilePath(urlString) {
|
||||
try {
|
||||
const url = new URL(urlString);
|
||||
const pathRegex = /\/o\/(.+?)(\?|$)/;
|
||||
const match = url.pathname.match(pathRegex);
|
||||
|
||||
if (match && match[1]) {
|
||||
return decodeURIComponent(match[1]);
|
||||
}
|
||||
|
||||
return '';
|
||||
} catch (error) {
|
||||
// If URL parsing fails, return an empty string
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a file from Firebase storage. This function determines the filepath from the
|
||||
* Firebase storage URL via regex for deletion. Validated by the user's ID.
|
||||
*
|
||||
* @param {Express.Request} req - The request object from Express.
|
||||
* It should contain a `user` object with an `id` property.
|
||||
* @param {MongoFile} file - The file object to be deleted.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
* A promise that resolves when the file has been successfully deleted from Firebase storage.
|
||||
* Throws an error if there is an issue with deletion.
|
||||
*/
|
||||
const deleteFirebaseFile = async (req, file) => {
|
||||
const fileName = extractFirebaseFilePath(file.filepath);
|
||||
if (!fileName.includes(req.user.id)) {
|
||||
throw new Error('Invalid file path');
|
||||
}
|
||||
await deleteFile('', fileName);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
deleteFile,
|
||||
getFirebaseURL,
|
||||
saveURLToFirebase,
|
||||
deleteFirebaseFile,
|
||||
saveBufferToFirebase,
|
||||
};
|
||||
|
|
@ -1,45 +1,105 @@
|
|||
const fetch = require('node-fetch');
|
||||
const { ref, uploadBytes, getDownloadURL } = require('firebase/storage');
|
||||
const { getFirebaseStorage } = require('./initialize');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const sharp = require('sharp');
|
||||
const { resizeImage } = require('../images/resize');
|
||||
const { saveBufferToFirebase } = require('./crud');
|
||||
const { updateFile } = require('~/models/File');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
async function saveImageToFirebaseStorage(userId, imageUrl, imageName) {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
console.error('Firebase is not initialized. Cannot save image to Firebase Storage.');
|
||||
return null;
|
||||
/**
|
||||
* Converts an image file to the WebP format. The function first resizes the image based on the specified
|
||||
* resolution.
|
||||
*
|
||||
*
|
||||
* @param {Object} req - The request object from Express. It should have a `user` property with an `id`
|
||||
* representing the user, and an `app.locals.paths` object with an `imageOutput` path.
|
||||
* @param {Express.Multer.File} file - The file object, which is part of the request. The file object should
|
||||
* have a `path` property that points to the location of the uploaded file.
|
||||
* @param {string} [resolution='high'] - Optional. The desired resolution for the image resizing. Default is 'high'.
|
||||
*
|
||||
* @returns {Promise<{ filepath: string, bytes: number, width: number, height: number}>}
|
||||
* A promise that resolves to an object containing:
|
||||
* - filepath: The path where the converted WebP image is saved.
|
||||
* - bytes: The size of the converted image in bytes.
|
||||
* - width: The width of the converted image.
|
||||
* - height: The height of the converted image.
|
||||
*/
|
||||
async function uploadImageToFirebase(req, file, resolution = 'high') {
|
||||
const inputFilePath = file.path;
|
||||
const { buffer: resizedBuffer, width, height } = await resizeImage(inputFilePath, resolution);
|
||||
const extension = path.extname(inputFilePath);
|
||||
const userId = req.user.id;
|
||||
|
||||
let webPBuffer;
|
||||
let fileName = path.basename(inputFilePath);
|
||||
if (extension.toLowerCase() === '.webp') {
|
||||
webPBuffer = resizedBuffer;
|
||||
} else {
|
||||
webPBuffer = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||
// Replace or append the correct extension
|
||||
const extRegExp = new RegExp(path.extname(fileName) + '$');
|
||||
fileName = fileName.replace(extRegExp, '.webp');
|
||||
if (!path.extname(fileName)) {
|
||||
fileName += '.webp';
|
||||
}
|
||||
}
|
||||
|
||||
const storageRef = ref(storage, `images/${userId.toString()}/${imageName}`);
|
||||
const downloadURL = await saveBufferToFirebase({ userId, buffer: webPBuffer, fileName });
|
||||
|
||||
await fs.promises.unlink(inputFilePath);
|
||||
|
||||
const bytes = Buffer.byteLength(webPBuffer);
|
||||
return { filepath: downloadURL, bytes, width, height };
|
||||
}
|
||||
|
||||
/**
|
||||
* Local: Updates the file and returns the URL in expected order/format
|
||||
* for image payload handling: tuple order of [filepath, URL].
|
||||
* @param {Object} req - The request object.
|
||||
* @param {MongoFile} file - The file object.
|
||||
* @returns {Promise<[MongoFile, string]>} - A promise that resolves to an array of results from updateFile and encodeImage.
|
||||
*/
|
||||
async function prepareImageURL(req, file) {
|
||||
const { filepath } = file;
|
||||
const promises = [];
|
||||
promises.push(updateFile({ file_id: file.file_id }));
|
||||
promises.push(filepath);
|
||||
return await Promise.all(promises);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a user's avatar to Firebase Storage and returns the URL.
|
||||
* If the 'manual' flag is set to 'true', it also updates the user's avatar URL in the database.
|
||||
*
|
||||
* @param {object} params - The parameters object.
|
||||
* @param {Buffer} params.buffer - The Buffer containing the avatar image in WebP format.
|
||||
* @param {object} params.User - The User document (mongoose); TODO: remove direct use of Model, `User`
|
||||
* @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false').
|
||||
* @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar.
|
||||
* @throws {Error} - Throws an error if Firebase is not initialized or if there is an error in uploading.
|
||||
*/
|
||||
async function processFirebaseAvatar({ buffer, User, manual }) {
|
||||
try {
|
||||
// Upload image to Firebase Storage using the image URL
|
||||
await uploadBytes(storageRef, await fetch(imageUrl).then((response) => response.buffer()));
|
||||
return imageName;
|
||||
const downloadURL = await saveBufferToFirebase({
|
||||
userId: User._id.toString(),
|
||||
buffer,
|
||||
fileName: 'avatar.png',
|
||||
});
|
||||
|
||||
const isManual = manual === 'true';
|
||||
|
||||
const url = `${downloadURL}?manual=${isManual}`;
|
||||
|
||||
if (isManual) {
|
||||
User.avatar = url;
|
||||
await User.save();
|
||||
}
|
||||
|
||||
return url;
|
||||
} catch (error) {
|
||||
console.error('Error uploading image to Firebase Storage:', error.message);
|
||||
return null;
|
||||
logger.error('Error uploading profile picture:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function getFirebaseStorageImageUrl(imageName) {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
console.error('Firebase is not initialized. Cannot get image URL from Firebase Storage.');
|
||||
return null;
|
||||
}
|
||||
|
||||
const storageRef = ref(storage, `images/${imageName}`);
|
||||
|
||||
try {
|
||||
// Get the download URL for the image from Firebase Storage
|
||||
return `})`;
|
||||
} catch (error) {
|
||||
console.error('Error fetching image URL from Firebase Storage:', error.message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
saveImageToFirebaseStorage,
|
||||
getFirebaseStorageImageUrl,
|
||||
};
|
||||
module.exports = { uploadImageToFirebase, prepareImageURL, processFirebaseAvatar };
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
const crud = require('./crud');
|
||||
const images = require('./images');
|
||||
const initialize = require('./initialize');
|
||||
|
||||
module.exports = {
|
||||
...crud,
|
||||
...images,
|
||||
...initialize,
|
||||
};
|
||||
|
|
|
|||
174
api/server/services/Files/Local/crud.js
Normal file
174
api/server/services/Files/Local/crud.js
Normal file
|
|
@ -0,0 +1,174 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const axios = require('axios');
|
||||
const { logger } = require('~/config');
|
||||
const paths = require('~/config/paths');
|
||||
|
||||
/**
|
||||
* Saves a file to a specified output path with a new filename.
|
||||
*
|
||||
* @param {Express.Multer.File} file - The file object to be saved. Should contain properties like 'originalname' and 'path'.
|
||||
* @param {string} outputPath - The path where the file should be saved.
|
||||
* @param {string} outputFilename - The new filename for the saved file (without extension).
|
||||
* @returns {Promise<string>} The full path of the saved file.
|
||||
* @throws Will throw an error if the file saving process fails.
|
||||
*/
|
||||
async function saveFile(file, outputPath, outputFilename) {
|
||||
try {
|
||||
if (!fs.existsSync(outputPath)) {
|
||||
fs.mkdirSync(outputPath, { recursive: true });
|
||||
}
|
||||
|
||||
const fileExtension = path.extname(file.originalname);
|
||||
const filenameWithExt = outputFilename + fileExtension;
|
||||
const outputFilePath = path.join(outputPath, filenameWithExt);
|
||||
fs.copyFileSync(file.path, outputFilePath);
|
||||
fs.unlinkSync(file.path);
|
||||
|
||||
return outputFilePath;
|
||||
} catch (error) {
|
||||
logger.error('[saveFile] Error while saving the file:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves an uploaded image file to a specified directory based on the user's ID and a filename.
|
||||
*
|
||||
* @param {Express.Request} req - The Express request object, containing the user's information and app configuration.
|
||||
* @param {Express.Multer.File} file - The uploaded file object.
|
||||
* @param {string} filename - The new filename to assign to the saved image (without extension).
|
||||
* @returns {Promise<void>}
|
||||
* @throws Will throw an error if the image saving process fails.
|
||||
*/
|
||||
const saveLocalImage = async (req, file, filename) => {
|
||||
const imagePath = req.app.locals.paths.imageOutput;
|
||||
const outputPath = path.join(imagePath, req.user.id ?? '');
|
||||
await saveFile(file, outputPath, filename);
|
||||
};
|
||||
|
||||
/**
|
||||
* Saves a file from a given URL to a local directory. The function fetches the file using the provided URL,
|
||||
* determines the content type, and saves it to a specified local directory with the correct file extension.
|
||||
* If the specified directory does not exist, it is created. The function returns the name of the saved file
|
||||
* or null in case of an error.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The user's unique identifier. This is used to create a user-specific path
|
||||
* in the local file system.
|
||||
* @param {string} params.URL - The URL of the file to be downloaded and saved.
|
||||
* @param {string} params.fileName - The desired file name for the saved file. This may be modified to include
|
||||
* the correct file extension based on the content type.
|
||||
* @param {string} [params.basePath='images'] - Optional. The base directory where the file will be saved.
|
||||
* Defaults to 'images' if not specified.
|
||||
*
|
||||
* @returns {Promise<string|null>}
|
||||
* A promise that resolves to the file name if the file is successfully saved, or null if there is an error.
|
||||
*/
|
||||
async function saveFileFromURL({ userId, URL, fileName, basePath = 'images' }) {
|
||||
try {
|
||||
// Fetch the file from the URL
|
||||
const response = await axios({
|
||||
url: URL,
|
||||
responseType: 'stream',
|
||||
});
|
||||
|
||||
// Get the content type from the response headers
|
||||
const contentType = response.headers['content-type'];
|
||||
let extension = contentType.split('/').pop();
|
||||
|
||||
// Construct the outputPath based on the basePath and userId
|
||||
const outputPath = path.join(paths.publicPath, basePath, userId.toString());
|
||||
|
||||
// Check if the output directory exists, if not, create it
|
||||
if (!fs.existsSync(outputPath)) {
|
||||
fs.mkdirSync(outputPath, { recursive: true });
|
||||
}
|
||||
|
||||
// Replace or append the correct extension
|
||||
const extRegExp = new RegExp(path.extname(fileName) + '$');
|
||||
fileName = fileName.replace(extRegExp, `.${extension}`);
|
||||
if (!path.extname(fileName)) {
|
||||
fileName += `.${extension}`;
|
||||
}
|
||||
|
||||
// Create a writable stream for the output path
|
||||
const outputFilePath = path.join(outputPath, fileName);
|
||||
const writer = fs.createWriteStream(outputFilePath);
|
||||
|
||||
// Pipe the response data to the output file
|
||||
response.data.pipe(writer);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
writer.on('finish', () => resolve(fileName));
|
||||
writer.on('error', reject);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[saveFileFromURL] Error while saving the file:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a local file path for a given file name and base path. This function simply joins the base
|
||||
* path and the file name to create a file path. It does not check for the existence of the file at the path.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.fileName - The name of the file for which the path is to be constructed. This should
|
||||
* include the file extension.
|
||||
* @param {string} [params.basePath='images'] - Optional. The base directory to be used for constructing the file path.
|
||||
* Defaults to 'images' if not specified.
|
||||
*
|
||||
* @returns {string}
|
||||
* The constructed local file path.
|
||||
*/
|
||||
async function getLocalFileURL({ fileName, basePath = 'images' }) {
|
||||
return path.posix.join('/', basePath, fileName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates if a given filepath is within a specified subdirectory under a base path. This function constructs
|
||||
* the expected base path using the base, subfolder, and user id from the request, and then checks if the
|
||||
* provided filepath starts with this constructed base path.
|
||||
*
|
||||
* @param {Express.Request} req - The request object from Express. It should contain a `user` property with an `id`.
|
||||
* @param {string} base - The base directory path.
|
||||
* @param {string} subfolder - The subdirectory under the base path.
|
||||
* @param {string} filepath - The complete file path to be validated.
|
||||
*
|
||||
* @returns {boolean}
|
||||
* Returns true if the filepath is within the specified base and subfolder, false otherwise.
|
||||
*/
|
||||
const isValidPath = (req, base, subfolder, filepath) => {
|
||||
const normalizedBase = path.resolve(base, subfolder, req.user.id);
|
||||
const normalizedFilepath = path.resolve(filepath);
|
||||
return normalizedFilepath.startsWith(normalizedBase);
|
||||
};
|
||||
|
||||
/**
|
||||
* Deletes a file from the filesystem. This function takes a file object, constructs the full path, and
|
||||
* verifies the path's validity before deleting the file. If the path is invalid, an error is thrown.
|
||||
*
|
||||
* @param {Express.Request} req - The request object from Express. It should have an `app.locals.paths` object with
|
||||
* a `publicPath` property.
|
||||
* @param {MongoFile} file - The file object to be deleted. It should have a `filepath` property that is
|
||||
* a string representing the path of the file relative to the publicPath.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
* A promise that resolves when the file has been successfully deleted, or throws an error if the
|
||||
* file path is invalid or if there is an error in deletion.
|
||||
*/
|
||||
const deleteLocalFile = async (req, file) => {
|
||||
const { publicPath } = req.app.locals.paths;
|
||||
const parts = file.filepath.split(path.sep);
|
||||
const subfolder = parts[1];
|
||||
const filepath = path.join(publicPath, file.filepath);
|
||||
|
||||
if (!isValidPath(req, publicPath, subfolder, filepath)) {
|
||||
throw new Error('Invalid file path');
|
||||
}
|
||||
|
||||
await fs.promises.unlink(filepath);
|
||||
};
|
||||
|
||||
module.exports = { saveFile, saveLocalImage, saveFileFromURL, getLocalFileURL, deleteLocalFile };
|
||||
140
api/server/services/Files/Local/images.js
Normal file
140
api/server/services/Files/Local/images.js
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const sharp = require('sharp');
|
||||
const { resizeImage } = require('../images/resize');
|
||||
const { updateFile } = require('~/models/File');
|
||||
|
||||
/**
|
||||
* Converts an image file to the WebP format. The function first resizes the image based on the specified
|
||||
* resolution.
|
||||
*
|
||||
* If the original image is already in WebP format, it writes the resized image back. Otherwise,
|
||||
* it converts the image to WebP format before saving.
|
||||
*
|
||||
* The original image is deleted after conversion.
|
||||
*
|
||||
* @param {Object} req - The request object from Express. It should have a `user` property with an `id`
|
||||
* representing the user, and an `app.locals.paths` object with an `imageOutput` path.
|
||||
* @param {Express.Multer.File} file - The file object, which is part of the request. The file object should
|
||||
* have a `path` property that points to the location of the uploaded file.
|
||||
* @param {string} [resolution='high'] - Optional. The desired resolution for the image resizing. Default is 'high'.
|
||||
*
|
||||
* @returns {Promise<{ filepath: string, bytes: number, width: number, height: number}>}
|
||||
* A promise that resolves to an object containing:
|
||||
* - filepath: The path where the converted WebP image is saved.
|
||||
* - bytes: The size of the converted image in bytes.
|
||||
* - width: The width of the converted image.
|
||||
* - height: The height of the converted image.
|
||||
*/
|
||||
async function uploadLocalImage(req, file, resolution = 'high') {
|
||||
const inputFilePath = file.path;
|
||||
const { buffer: resizedBuffer, width, height } = await resizeImage(inputFilePath, resolution);
|
||||
const extension = path.extname(inputFilePath);
|
||||
|
||||
const { imageOutput } = req.app.locals.paths;
|
||||
const userPath = path.join(imageOutput, req.user.id);
|
||||
|
||||
if (!fs.existsSync(userPath)) {
|
||||
fs.mkdirSync(userPath, { recursive: true });
|
||||
}
|
||||
|
||||
const newPath = path.join(userPath, path.basename(inputFilePath));
|
||||
|
||||
if (extension.toLowerCase() === '.webp') {
|
||||
const bytes = Buffer.byteLength(resizedBuffer);
|
||||
await fs.promises.writeFile(newPath, resizedBuffer);
|
||||
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(newPath));
|
||||
return { filepath, bytes, width, height };
|
||||
}
|
||||
|
||||
const outputFilePath = newPath.replace(extension, '.webp');
|
||||
const data = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||
await fs.promises.writeFile(outputFilePath, data);
|
||||
const bytes = Buffer.byteLength(data);
|
||||
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(outputFilePath));
|
||||
await fs.promises.unlink(inputFilePath);
|
||||
return { filepath, bytes, width, height };
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes an image file to base64.
|
||||
* @param {string} imagePath - The path to the image file.
|
||||
* @returns {Promise<string>} A promise that resolves with the base64 encoded image data.
|
||||
*/
|
||||
function encodeImage(imagePath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(imagePath, (err, data) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(data.toString('base64'));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Local: Updates the file and encodes the image to base64,
|
||||
* for image payload handling: tuple order of [filepath, base64].
|
||||
* @param {Object} req - The request object.
|
||||
* @param {MongoFile} file - The file object.
|
||||
* @returns {Promise<[MongoFile, string]>} - A promise that resolves to an array of results from updateFile and encodeImage.
|
||||
*/
|
||||
async function prepareImagesLocal(req, file) {
|
||||
const { publicPath, imageOutput } = req.app.locals.paths;
|
||||
const userPath = path.join(imageOutput, req.user.id);
|
||||
|
||||
if (!fs.existsSync(userPath)) {
|
||||
fs.mkdirSync(userPath, { recursive: true });
|
||||
}
|
||||
const filepath = path.join(publicPath, file.filepath);
|
||||
|
||||
const promises = [];
|
||||
promises.push(updateFile({ file_id: file.file_id }));
|
||||
promises.push(encodeImage(filepath));
|
||||
return await Promise.all(promises);
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a user's avatar to Firebase Storage and returns the URL.
|
||||
* If the 'manual' flag is set to 'true', it also updates the user's avatar URL in the database.
|
||||
*
|
||||
* @param {object} params - The parameters object.
|
||||
* @param {Buffer} params.buffer - The Buffer containing the avatar image in WebP format.
|
||||
* @param {object} params.User - The User document (mongoose); TODO: remove direct use of Model, `User`
|
||||
* @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false').
|
||||
* @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar.
|
||||
* @throws {Error} - Throws an error if Firebase is not initialized or if there is an error in uploading.
|
||||
*/
|
||||
async function processLocalAvatar({ buffer, User, manual }) {
|
||||
const userDir = path.resolve(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'client',
|
||||
'public',
|
||||
'images',
|
||||
User._id.toString(),
|
||||
);
|
||||
const fileName = `avatar-${new Date().getTime()}.png`;
|
||||
const urlRoute = `/images/${User._id.toString()}/${fileName}`;
|
||||
const avatarPath = path.join(userDir, fileName);
|
||||
|
||||
await fs.promises.mkdir(userDir, { recursive: true });
|
||||
await fs.promises.writeFile(avatarPath, buffer);
|
||||
|
||||
const isManual = manual === 'true';
|
||||
let url = `${urlRoute}?manual=${isManual}`;
|
||||
|
||||
if (isManual) {
|
||||
User.avatar = url;
|
||||
await User.save();
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
module.exports = { uploadLocalImage, encodeImage, prepareImagesLocal, processLocalAvatar };
|
||||
7
api/server/services/Files/Local/index.js
Normal file
7
api/server/services/Files/Local/index.js
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
const images = require('./images');
|
||||
const crud = require('./crud');
|
||||
|
||||
module.exports = {
|
||||
...crud,
|
||||
...images,
|
||||
};
|
||||
78
api/server/services/Files/images/avatar.js
Normal file
78
api/server/services/Files/images/avatar.js
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
const sharp = require('sharp');
|
||||
const fs = require('fs').promises;
|
||||
const fetch = require('node-fetch');
|
||||
const User = require('~/models/User');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
async function convertToWebP(inputBuffer) {
|
||||
return sharp(inputBuffer).resize({ width: 150 }).toFormat('webp').toBuffer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads an avatar image for a user. This function can handle various types of input (URL, Buffer, or File object),
|
||||
* processes the image to a square format, converts it to WebP format, and then uses a specified file strategy for
|
||||
* further processing. It performs validation on the user ID and the input type. The function can throw errors for
|
||||
* invalid input types, fetching issues, or other processing errors.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {string} params.userId - The unique identifier of the user for whom the avatar is being uploaded.
|
||||
* @param {FileSources} params.fileStrategy - The file handling strategy to use, determining how the avatar is processed.
|
||||
* @param {(string|Buffer|File)} params.input - The input representing the avatar image. Can be a URL (string),
|
||||
* a Buffer, or a File object.
|
||||
* @param {string} params.manual - A string flag indicating whether the upload process is manual.
|
||||
*
|
||||
* @returns {Promise<any>}
|
||||
* A promise that resolves to the result of the `processAvatar` function, specific to the chosen file
|
||||
* strategy. Throws an error if any step in the process fails.
|
||||
*
|
||||
* @throws {Error} Throws an error if the user ID is undefined, the input type is invalid, the image fetching fails,
|
||||
* or any other error occurs during the processing.
|
||||
*/
|
||||
async function uploadAvatar({ userId, fileStrategy, input, manual }) {
|
||||
try {
|
||||
if (userId === undefined) {
|
||||
throw new Error('User ID is undefined');
|
||||
}
|
||||
const _id = userId;
|
||||
// TODO: remove direct use of Model, `User`
|
||||
const oldUser = await User.findOne({ _id });
|
||||
|
||||
let imageBuffer;
|
||||
if (typeof input === 'string') {
|
||||
const response = await fetch(input);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch image from URL. Status: ${response.status}`);
|
||||
}
|
||||
imageBuffer = await response.buffer();
|
||||
} else if (input instanceof Buffer) {
|
||||
imageBuffer = input;
|
||||
} else if (typeof input === 'object' && input instanceof File) {
|
||||
const fileContent = await fs.readFile(input.path);
|
||||
imageBuffer = Buffer.from(fileContent);
|
||||
} else {
|
||||
throw new Error('Invalid input type. Expected URL, Buffer, or File.');
|
||||
}
|
||||
|
||||
const { width, height } = await sharp(imageBuffer).metadata();
|
||||
const minSize = Math.min(width, height);
|
||||
const squaredBuffer = await sharp(imageBuffer)
|
||||
.extract({
|
||||
left: Math.floor((width - minSize) / 2),
|
||||
top: Math.floor((height - minSize) / 2),
|
||||
width: minSize,
|
||||
height: minSize,
|
||||
})
|
||||
.toBuffer();
|
||||
|
||||
const webPBuffer = await convertToWebP(squaredBuffer);
|
||||
const { processAvatar } = getStrategyFunctions(fileStrategy);
|
||||
return await processAvatar({ buffer: webPBuffer, User: oldUser, manual });
|
||||
} catch (error) {
|
||||
logger.error('Error uploading the avatar:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = uploadAvatar;
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
const { ref, uploadBytes, getDownloadURL } = require('firebase/storage');
|
||||
const { getFirebaseStorage } = require('~/server/services/Files/Firebase/initialize');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
async function firebaseStrategy(userId, webPBuffer, oldUser, manual) {
|
||||
try {
|
||||
const storage = getFirebaseStorage();
|
||||
if (!storage) {
|
||||
throw new Error('Firebase is not initialized.');
|
||||
}
|
||||
const avatarRef = ref(storage, `images/${userId.toString()}/avatar`);
|
||||
|
||||
await uploadBytes(avatarRef, webPBuffer);
|
||||
const urlFirebase = await getDownloadURL(avatarRef);
|
||||
const isManual = manual === 'true';
|
||||
|
||||
const url = `${urlFirebase}?manual=${isManual}`;
|
||||
if (isManual) {
|
||||
oldUser.avatar = url;
|
||||
await oldUser.save();
|
||||
}
|
||||
return url;
|
||||
} catch (error) {
|
||||
logger.error('Error uploading profile picture:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = firebaseStrategy;
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
const fs = require('fs').promises;
|
||||
const path = require('path');
|
||||
|
||||
async function localStrategy(userId, webPBuffer, oldUser, manual) {
|
||||
const userDir = path.resolve(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'client',
|
||||
'public',
|
||||
'images',
|
||||
userId,
|
||||
);
|
||||
let avatarPath = path.join(userDir, 'avatar.png');
|
||||
const urlRoute = `/images/${userId}/avatar.png`;
|
||||
await fs.mkdir(userDir, { recursive: true });
|
||||
await fs.writeFile(avatarPath, webPBuffer);
|
||||
const isManual = manual === 'true';
|
||||
let url = `${urlRoute}?manual=${isManual}×tamp=${new Date().getTime()}`;
|
||||
if (isManual) {
|
||||
oldUser.avatar = url;
|
||||
await oldUser.save();
|
||||
}
|
||||
|
||||
return url;
|
||||
}
|
||||
|
||||
module.exports = localStrategy;
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
const sharp = require('sharp');
|
||||
const fetch = require('node-fetch');
|
||||
const fs = require('fs').promises;
|
||||
const User = require('~/models/User');
|
||||
const { getFirebaseStorage } = require('~/server/services/Files/Firebase/initialize');
|
||||
const firebaseStrategy = require('./firebaseStrategy');
|
||||
const localStrategy = require('./localStrategy');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
async function convertToWebP(inputBuffer) {
|
||||
return sharp(inputBuffer).resize({ width: 150 }).toFormat('webp').toBuffer();
|
||||
}
|
||||
|
||||
async function uploadAvatar(userId, input, manual) {
|
||||
try {
|
||||
if (userId === undefined) {
|
||||
throw new Error('User ID is undefined');
|
||||
}
|
||||
const _id = userId;
|
||||
// TODO: remove direct use of Model, `User`
|
||||
const oldUser = await User.findOne({ _id });
|
||||
let imageBuffer;
|
||||
if (typeof input === 'string') {
|
||||
const response = await fetch(input);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch image from URL. Status: ${response.status}`);
|
||||
}
|
||||
imageBuffer = await response.buffer();
|
||||
} else if (input instanceof Buffer) {
|
||||
imageBuffer = input;
|
||||
} else if (typeof input === 'object' && input instanceof File) {
|
||||
const fileContent = await fs.readFile(input.path);
|
||||
imageBuffer = Buffer.from(fileContent);
|
||||
} else {
|
||||
throw new Error('Invalid input type. Expected URL, Buffer, or File.');
|
||||
}
|
||||
const { width, height } = await sharp(imageBuffer).metadata();
|
||||
const minSize = Math.min(width, height);
|
||||
const squaredBuffer = await sharp(imageBuffer)
|
||||
.extract({
|
||||
left: Math.floor((width - minSize) / 2),
|
||||
top: Math.floor((height - minSize) / 2),
|
||||
width: minSize,
|
||||
height: minSize,
|
||||
})
|
||||
.toBuffer();
|
||||
const webPBuffer = await convertToWebP(squaredBuffer);
|
||||
const storage = getFirebaseStorage();
|
||||
if (storage) {
|
||||
const url = await firebaseStrategy(userId, webPBuffer, oldUser, manual);
|
||||
return url;
|
||||
}
|
||||
|
||||
const url = await localStrategy(userId, webPBuffer, oldUser, manual);
|
||||
return url;
|
||||
} catch (error) {
|
||||
logger.error('Error uploading the avatar:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = uploadAvatar;
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
const path = require('path');
|
||||
const sharp = require('sharp');
|
||||
const fs = require('fs');
|
||||
const { resizeImage } = require('./resize');
|
||||
|
||||
async function convertToWebP(req, file, resolution = 'high') {
|
||||
const inputFilePath = file.path;
|
||||
const { buffer: resizedBuffer, width, height } = await resizeImage(inputFilePath, resolution);
|
||||
const extension = path.extname(inputFilePath);
|
||||
|
||||
const { imageOutput } = req.app.locals.config;
|
||||
const userPath = path.join(imageOutput, req.user.id);
|
||||
|
||||
if (!fs.existsSync(userPath)) {
|
||||
fs.mkdirSync(userPath, { recursive: true });
|
||||
}
|
||||
|
||||
const newPath = path.join(userPath, path.basename(inputFilePath));
|
||||
|
||||
if (extension.toLowerCase() === '.webp') {
|
||||
const bytes = Buffer.byteLength(resizedBuffer);
|
||||
await fs.promises.writeFile(newPath, resizedBuffer);
|
||||
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(newPath));
|
||||
return { filepath, bytes, width, height };
|
||||
}
|
||||
|
||||
const outputFilePath = newPath.replace(extension, '.webp');
|
||||
const data = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||
await fs.promises.writeFile(outputFilePath, data);
|
||||
const bytes = Buffer.byteLength(data);
|
||||
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(outputFilePath));
|
||||
await fs.promises.unlink(inputFilePath);
|
||||
return { filepath, bytes, width, height };
|
||||
}
|
||||
|
||||
module.exports = { convertToWebP };
|
||||
|
|
@ -1,45 +1,5 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { updateFile } = require('~/models');
|
||||
|
||||
/**
|
||||
* Encodes an image file to base64.
|
||||
* @param {string} imagePath - The path to the image file.
|
||||
* @returns {Promise<string>} A promise that resolves with the base64 encoded image data.
|
||||
*/
|
||||
function encodeImage(imagePath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(imagePath, (err, data) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(data.toString('base64'));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the file and encodes the image.
|
||||
* @param {Object} req - The request object.
|
||||
* @param {Object} file - The file object.
|
||||
* @returns {Promise<[MongoFile, string]>} - A promise that resolves to an array of results from updateFile and encodeImage.
|
||||
*/
|
||||
async function updateAndEncode(req, file) {
|
||||
const { publicPath, imageOutput } = req.app.locals.config;
|
||||
const userPath = path.join(imageOutput, req.user.id);
|
||||
|
||||
if (!fs.existsSync(userPath)) {
|
||||
fs.mkdirSync(userPath, { recursive: true });
|
||||
}
|
||||
const filepath = path.join(publicPath, file.filepath);
|
||||
|
||||
const promises = [];
|
||||
promises.push(updateFile({ file_id: file.file_id }));
|
||||
promises.push(encodeImage(filepath));
|
||||
return await Promise.all(promises);
|
||||
}
|
||||
const { EModelEndpoint, FileSources } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('../strategies');
|
||||
|
||||
/**
|
||||
* Encodes and formats the given files.
|
||||
|
|
@ -50,25 +10,42 @@ async function updateAndEncode(req, file) {
|
|||
*/
|
||||
async function encodeAndFormat(req, files, endpoint) {
|
||||
const promises = [];
|
||||
const encodingMethods = {};
|
||||
|
||||
for (let file of files) {
|
||||
promises.push(updateAndEncode(req, file));
|
||||
const source = file.source ?? FileSources.local;
|
||||
|
||||
if (encodingMethods[source]) {
|
||||
promises.push(encodingMethods[source](req, file));
|
||||
continue;
|
||||
}
|
||||
|
||||
const { prepareImagePayload } = getStrategyFunctions(source);
|
||||
if (!prepareImagePayload) {
|
||||
throw new Error(`Encoding function not implemented for ${source}`);
|
||||
}
|
||||
|
||||
encodingMethods[source] = prepareImagePayload;
|
||||
promises.push(prepareImagePayload(req, file));
|
||||
}
|
||||
|
||||
// TODO: make detail configurable, as of now resizing is done
|
||||
// to prefer "high" but "low" may be used if the image is small enough
|
||||
const detail = req.body.detail ?? 'auto';
|
||||
const encodedImages = await Promise.all(promises);
|
||||
const detail = req.body.imageDetail ?? 'auto';
|
||||
|
||||
/** @type {Array<[MongoFile, string]>} */
|
||||
const formattedImages = await Promise.all(promises);
|
||||
|
||||
const result = {
|
||||
files: [],
|
||||
image_urls: [],
|
||||
};
|
||||
|
||||
for (const [file, base64] of encodedImages) {
|
||||
for (const [file, imageContent] of formattedImages) {
|
||||
const imagePart = {
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: `data:image/webp;base64,${base64}`,
|
||||
url: imageContent.startsWith('http')
|
||||
? imageContent
|
||||
: `data:image/webp;base64,${imageContent}`,
|
||||
detail,
|
||||
},
|
||||
};
|
||||
|
|
@ -81,17 +58,16 @@ async function encodeAndFormat(req, files, endpoint) {
|
|||
|
||||
result.files.push({
|
||||
file_id: file.file_id,
|
||||
filepath: file.filepath,
|
||||
filename: file.filename,
|
||||
type: file.type,
|
||||
height: file.height,
|
||||
width: file.width,
|
||||
// filepath: file.filepath,
|
||||
// filename: file.filename,
|
||||
// type: file.type,
|
||||
// height: file.height,
|
||||
// width: file.width,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
encodeImage,
|
||||
encodeAndFormat,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,15 +1,13 @@
|
|||
const convert = require('./convert');
|
||||
const avatar = require('./avatar');
|
||||
const encode = require('./encode');
|
||||
const parse = require('./parse');
|
||||
const resize = require('./resize');
|
||||
const validate = require('./validate');
|
||||
const uploadAvatar = require('./avatar/uploadAvatar');
|
||||
|
||||
module.exports = {
|
||||
...convert,
|
||||
...encode,
|
||||
...parse,
|
||||
...resize,
|
||||
...validate,
|
||||
uploadAvatar,
|
||||
avatar,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const URL = require('url').URL;
|
||||
const path = require('path');
|
||||
|
||||
const imageExtensionRegex = /\.(jpg|jpeg|png|gif|bmp|tiff|svg)$/i;
|
||||
const imageExtensionRegex = /\.(jpg|jpeg|png|gif|bmp|tiff|svg|webp)$/i;
|
||||
|
||||
/**
|
||||
* Extracts the image basename from a given URL.
|
||||
|
|
@ -22,6 +22,24 @@ function getImageBasename(urlString) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the basename of a file from a given URL.
|
||||
*
|
||||
* @param {string} urlString - The URL string from which the file basename is to be extracted.
|
||||
* @returns {string} The basename of the file from the URL.
|
||||
* Returns an empty string if the URL parsing fails.
|
||||
*/
|
||||
function getFileBasename(urlString) {
|
||||
try {
|
||||
const url = new URL(urlString);
|
||||
return path.basename(url.pathname);
|
||||
} catch (error) {
|
||||
// If URL parsing fails, return an empty string
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getImageBasename,
|
||||
getFileBasename,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,9 +0,0 @@
|
|||
const localStrategy = require('./localStrategy');
|
||||
const process = require('./process');
|
||||
const save = require('./save');
|
||||
|
||||
module.exports = {
|
||||
...save,
|
||||
...process,
|
||||
localStrategy,
|
||||
};
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
const { createFile } = require('~/models');
|
||||
const { convertToWebP } = require('./images/convert');
|
||||
|
||||
/**
|
||||
* Applies the local strategy for image uploads.
|
||||
* Saves file metadata to the database with an expiry TTL.
|
||||
* Files must be deleted from the server filesystem manually.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {Express.Request} params.req - The Express request object.
|
||||
* @param {Express.Response} params.res - The Express response object.
|
||||
* @param {Express.Multer.File} params.file - The uploaded file.
|
||||
* @param {ImageMetadata} params.metadata - Additional metadata for the file.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const localStrategy = async ({ req, res, file, metadata }) => {
|
||||
const { file_id, temp_file_id } = metadata;
|
||||
const { filepath, bytes, width, height } = await convertToWebP(req, file);
|
||||
const result = await createFile(
|
||||
{
|
||||
user: req.user.id,
|
||||
file_id,
|
||||
temp_file_id,
|
||||
bytes,
|
||||
filepath,
|
||||
filename: file.originalname,
|
||||
type: 'image/webp',
|
||||
width,
|
||||
height,
|
||||
},
|
||||
true,
|
||||
);
|
||||
res.status(200).json({ message: 'File uploaded and processed successfully', ...result });
|
||||
};
|
||||
|
||||
module.exports = localStrategy;
|
||||
|
|
@ -1,17 +1,6 @@
|
|||
const { updateFileUsage } = require('~/models');
|
||||
|
||||
// const mapImageUrls = (files, detail) => {
|
||||
// return files
|
||||
// .filter((file) => file.type.includes('image'))
|
||||
// .map((file) => ({
|
||||
// type: 'image_url',
|
||||
// image_url: {
|
||||
// /* Temporarily set to path to encode later */
|
||||
// url: file.filepath,
|
||||
// detail,
|
||||
// },
|
||||
// }));
|
||||
// };
|
||||
const { updateFileUsage, createFile } = require('~/models');
|
||||
const { getStrategyFunctions } = require('./strategies');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const processFiles = async (files) => {
|
||||
const promises = [];
|
||||
|
|
@ -24,6 +13,76 @@ const processFiles = async (files) => {
|
|||
return await Promise.all(promises);
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
processFiles,
|
||||
/**
|
||||
* Processes a file URL using a specified file handling strategy. This function accepts a strategy name,
|
||||
* fetches the corresponding file processing functions (for saving and retrieving file URLs), and then
|
||||
* executes these functions in sequence. It first saves the file using the provided URL and then retrieves
|
||||
* the URL of the saved file. If any error occurs during this process, it logs the error and throws an
|
||||
* exception with an appropriate message.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {FileSources} params.fileStrategy - The file handling strategy to use. Must be a value from the
|
||||
* `FileSources` enum, which defines different file handling
|
||||
* strategies (like saving to Firebase, local storage, etc.).
|
||||
* @param {string} params.userId - The user's unique identifier. Used for creating user-specific paths or
|
||||
* references in the file handling process.
|
||||
* @param {string} params.URL - The URL of the file to be processed.
|
||||
* @param {string} params.fileName - The name that will be used to save the file. This should include the
|
||||
* file extension.
|
||||
* @param {string} params.basePath - The base path or directory where the file will be saved or retrieved from.
|
||||
*
|
||||
* @returns {Promise<string>}
|
||||
* A promise that resolves to the URL of the processed file. It throws an error if the file processing
|
||||
* fails at any stage.
|
||||
*/
|
||||
const processFileURL = async ({ fileStrategy, userId, URL, fileName, basePath }) => {
|
||||
const { saveURL, getFileURL } = getStrategyFunctions(fileStrategy);
|
||||
try {
|
||||
await saveURL({ userId, URL, fileName, basePath });
|
||||
return await getFileURL({ fileName: `${userId}/${fileName}`, basePath });
|
||||
} catch (error) {
|
||||
logger.error(`Error while processing the image with ${fileStrategy}:`, error);
|
||||
throw new Error(`Failed to process the image with ${fileStrategy}. ${error.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Applies the current strategy for image uploads.
|
||||
* Saves file metadata to the database with an expiry TTL.
|
||||
* Files must be deleted from the server filesystem manually.
|
||||
*
|
||||
* @param {Object} params - The parameters object.
|
||||
* @param {Express.Request} params.req - The Express request object.
|
||||
* @param {Express.Response} params.res - The Express response object.
|
||||
* @param {Express.Multer.File} params.file - The uploaded file.
|
||||
* @param {ImageMetadata} params.metadata - Additional metadata for the file.
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
const processImageUpload = async ({ req, res, file, metadata }) => {
|
||||
const source = req.app.locals.fileStrategy;
|
||||
const { handleImageUpload } = getStrategyFunctions(source);
|
||||
const { file_id, temp_file_id } = metadata;
|
||||
const { filepath, bytes, width, height } = await handleImageUpload(req, file);
|
||||
const result = await createFile(
|
||||
{
|
||||
user: req.user.id,
|
||||
file_id,
|
||||
temp_file_id,
|
||||
bytes,
|
||||
filepath,
|
||||
filename: file.originalname,
|
||||
source,
|
||||
type: 'image/webp',
|
||||
width,
|
||||
height,
|
||||
},
|
||||
true,
|
||||
);
|
||||
res.status(200).json({ message: 'File uploaded and processed successfully', ...result });
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
processImageUpload,
|
||||
processFiles,
|
||||
processFileURL,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,48 +0,0 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Saves a file to a specified output path with a new filename.
|
||||
*
|
||||
* @param {Express.Multer.File} file - The file object to be saved. Should contain properties like 'originalname' and 'path'.
|
||||
* @param {string} outputPath - The path where the file should be saved.
|
||||
* @param {string} outputFilename - The new filename for the saved file (without extension).
|
||||
* @returns {Promise<string>} The full path of the saved file.
|
||||
* @throws Will throw an error if the file saving process fails.
|
||||
*/
|
||||
async function saveFile(file, outputPath, outputFilename) {
|
||||
try {
|
||||
if (!fs.existsSync(outputPath)) {
|
||||
fs.mkdirSync(outputPath, { recursive: true });
|
||||
}
|
||||
|
||||
const fileExtension = path.extname(file.originalname);
|
||||
const filenameWithExt = outputFilename + fileExtension;
|
||||
const outputFilePath = path.join(outputPath, filenameWithExt);
|
||||
fs.copyFileSync(file.path, outputFilePath);
|
||||
fs.unlinkSync(file.path);
|
||||
|
||||
return outputFilePath;
|
||||
} catch (error) {
|
||||
logger.error('[saveFile] Error while saving the file:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves an uploaded image file to a specified directory based on the user's ID and a filename.
|
||||
*
|
||||
* @param {Express.Request} req - The Express request object, containing the user's information and app configuration.
|
||||
* @param {Express.Multer.File} file - The uploaded file object.
|
||||
* @param {string} filename - The new filename to assign to the saved image (without extension).
|
||||
* @returns {Promise<void>}
|
||||
* @throws Will throw an error if the image saving process fails.
|
||||
*/
|
||||
const saveLocalImage = async (req, file, filename) => {
|
||||
const imagePath = req.app.locals.config.imageOutput;
|
||||
const outputPath = path.join(imagePath, req.user.id ?? '');
|
||||
await saveFile(file, outputPath, filename);
|
||||
};
|
||||
|
||||
module.exports = { saveFile, saveLocalImage };
|
||||
54
api/server/services/Files/strategies.js
Normal file
54
api/server/services/Files/strategies.js
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
const { FileSources } = require('librechat-data-provider');
|
||||
const {
|
||||
getFirebaseURL,
|
||||
prepareImageURL,
|
||||
saveURLToFirebase,
|
||||
deleteFirebaseFile,
|
||||
uploadImageToFirebase,
|
||||
processFirebaseAvatar,
|
||||
} = require('./Firebase');
|
||||
const {
|
||||
getLocalFileURL,
|
||||
saveFileFromURL,
|
||||
deleteLocalFile,
|
||||
uploadLocalImage,
|
||||
prepareImagesLocal,
|
||||
processLocalAvatar,
|
||||
} = require('./Local');
|
||||
|
||||
// Firebase Strategy Functions
|
||||
const firebaseStrategy = () => ({
|
||||
// saveFile:
|
||||
saveURL: saveURLToFirebase,
|
||||
getFileURL: getFirebaseURL,
|
||||
deleteFile: deleteFirebaseFile,
|
||||
prepareImagePayload: prepareImageURL,
|
||||
processAvatar: processFirebaseAvatar,
|
||||
handleImageUpload: uploadImageToFirebase,
|
||||
});
|
||||
|
||||
// Local Strategy Functions
|
||||
const localStrategy = () => ({
|
||||
// saveFile: ,
|
||||
saveURL: saveFileFromURL,
|
||||
getFileURL: getLocalFileURL,
|
||||
deleteFile: deleteLocalFile,
|
||||
processAvatar: processLocalAvatar,
|
||||
handleImageUpload: uploadLocalImage,
|
||||
prepareImagePayload: prepareImagesLocal,
|
||||
});
|
||||
|
||||
// Strategy Selector
|
||||
const getStrategyFunctions = (fileSource) => {
|
||||
if (fileSource === FileSources.firebase) {
|
||||
return firebaseStrategy();
|
||||
} else if (fileSource === FileSources.local) {
|
||||
return localStrategy();
|
||||
} else {
|
||||
throw new Error('Invalid file source');
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
getStrategyFunctions,
|
||||
};
|
||||
|
|
@ -10,6 +10,10 @@ const {
|
|||
} = require('../strategies');
|
||||
const client = require('../cache/redis');
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Express.Application} app
|
||||
*/
|
||||
const configureSocialLogins = (app) => {
|
||||
if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) {
|
||||
passport.use(googleLogin());
|
||||
|
|
|
|||
|
|
@ -1,12 +1,14 @@
|
|||
const { Strategy: DiscordStrategy } = require('passport-discord');
|
||||
const { createNewUser, handleExistingUser } = require('./process');
|
||||
const { logger } = require('~/config');
|
||||
const User = require('~/models/User');
|
||||
const { useFirebase, uploadAvatar } = require('~/server/services/Files/images');
|
||||
|
||||
const discordLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||
try {
|
||||
const email = profile.email;
|
||||
const discordId = profile.id;
|
||||
|
||||
// TODO: remove direct access of User model
|
||||
const oldUser = await User.findOne({ email });
|
||||
const ALLOW_SOCIAL_REGISTRATION =
|
||||
process.env.ALLOW_SOCIAL_REGISTRATION?.toLowerCase() === 'true';
|
||||
|
|
@ -21,12 +23,20 @@ const discordLogin = async (accessToken, refreshToken, profile, cb) => {
|
|||
}
|
||||
|
||||
if (oldUser) {
|
||||
await handleExistingUser(oldUser, avatarUrl, useFirebase);
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
return cb(null, oldUser);
|
||||
}
|
||||
|
||||
if (ALLOW_SOCIAL_REGISTRATION) {
|
||||
const newUser = await createNewUser(profile, discordId, email, avatarUrl, useFirebase);
|
||||
const newUser = await createNewUser({
|
||||
email,
|
||||
avatarUrl,
|
||||
provider: 'discord',
|
||||
providerKey: 'discordId',
|
||||
providerId: discordId,
|
||||
username: profile.username,
|
||||
name: profile.global_name,
|
||||
});
|
||||
return cb(null, newUser);
|
||||
}
|
||||
} catch (err) {
|
||||
|
|
@ -35,38 +45,6 @@ const discordLogin = async (accessToken, refreshToken, profile, cb) => {
|
|||
}
|
||||
};
|
||||
|
||||
const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => {
|
||||
if (!useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
||||
oldUser.avatar = avatarUrl;
|
||||
await oldUser.save();
|
||||
} else if (useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
||||
const userId = oldUser._id;
|
||||
const newavatarUrl = await uploadAvatar(userId, avatarUrl);
|
||||
oldUser.avatar = newavatarUrl;
|
||||
await oldUser.save();
|
||||
}
|
||||
};
|
||||
|
||||
const createNewUser = async (profile, discordId, email, avatarUrl, useFirebase) => {
|
||||
const newUser = await new User({
|
||||
provider: 'discord',
|
||||
discordId,
|
||||
username: profile.username,
|
||||
email,
|
||||
name: profile.global_name,
|
||||
avatar: avatarUrl,
|
||||
}).save();
|
||||
|
||||
if (useFirebase) {
|
||||
const userId = newUser._id;
|
||||
const newavatarUrl = await uploadAvatar(userId, avatarUrl);
|
||||
newUser.avatar = newavatarUrl;
|
||||
await newUser.save();
|
||||
}
|
||||
|
||||
return newUser;
|
||||
};
|
||||
|
||||
module.exports = () =>
|
||||
new DiscordStrategy(
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const FacebookStrategy = require('passport-facebook').Strategy;
|
||||
const { createNewUser, handleExistingUser } = require('./process');
|
||||
const { logger } = require('~/config');
|
||||
const User = require('~/models/User');
|
||||
const { useFirebase, uploadAvatar } = require('~/server/services/Files/images');
|
||||
|
||||
const facebookLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||
try {
|
||||
|
|
@ -13,12 +13,20 @@ const facebookLogin = async (accessToken, refreshToken, profile, cb) => {
|
|||
const avatarUrl = profile.photos[0]?.value;
|
||||
|
||||
if (oldUser) {
|
||||
await handleExistingUser(oldUser, avatarUrl, useFirebase);
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
return cb(null, oldUser);
|
||||
}
|
||||
|
||||
if (ALLOW_SOCIAL_REGISTRATION) {
|
||||
const newUser = await createNewUser(profile, facebookId, email, avatarUrl, useFirebase);
|
||||
const newUser = await createNewUser({
|
||||
email,
|
||||
avatarUrl,
|
||||
provider: 'facebook',
|
||||
providerKey: 'facebookId',
|
||||
providerId: facebookId,
|
||||
username: profile.displayName,
|
||||
name: profile.name?.givenName + ' ' + profile.name?.familyName,
|
||||
});
|
||||
return cb(null, newUser);
|
||||
}
|
||||
} catch (err) {
|
||||
|
|
@ -27,38 +35,6 @@ const facebookLogin = async (accessToken, refreshToken, profile, cb) => {
|
|||
}
|
||||
};
|
||||
|
||||
const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => {
|
||||
if (!useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
||||
oldUser.avatar = avatarUrl;
|
||||
await oldUser.save();
|
||||
} else if (useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
||||
const userId = oldUser._id;
|
||||
const newavatarUrl = await uploadAvatar(userId, avatarUrl);
|
||||
oldUser.avatar = newavatarUrl;
|
||||
await oldUser.save();
|
||||
}
|
||||
};
|
||||
|
||||
const createNewUser = async (profile, facebookId, email, avatarUrl, useFirebase) => {
|
||||
const newUser = await new User({
|
||||
provider: 'facebook',
|
||||
facebookId,
|
||||
username: profile.displayName,
|
||||
email,
|
||||
name: profile.name?.givenName + ' ' + profile.name?.familyName,
|
||||
avatar: avatarUrl,
|
||||
}).save();
|
||||
|
||||
if (useFirebase) {
|
||||
const userId = newUser._id;
|
||||
const newavatarUrl = await uploadAvatar(userId, avatarUrl);
|
||||
newUser.avatar = newavatarUrl;
|
||||
await newUser.save();
|
||||
}
|
||||
|
||||
return newUser;
|
||||
};
|
||||
|
||||
module.exports = () =>
|
||||
new FacebookStrategy(
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const { Strategy: GitHubStrategy } = require('passport-github2');
|
||||
const { createNewUser, handleExistingUser } = require('./process');
|
||||
const { logger } = require('~/config');
|
||||
const User = require('~/models/User');
|
||||
const { useFirebase, uploadAvatar } = require('~/server/services/Files/images');
|
||||
|
||||
const githubLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||
try {
|
||||
|
|
@ -13,12 +13,21 @@ const githubLogin = async (accessToken, refreshToken, profile, cb) => {
|
|||
const avatarUrl = profile.photos[0].value;
|
||||
|
||||
if (oldUser) {
|
||||
await handleExistingUser(oldUser, avatarUrl, useFirebase);
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
return cb(null, oldUser);
|
||||
}
|
||||
|
||||
if (ALLOW_SOCIAL_REGISTRATION) {
|
||||
const newUser = await createNewUser(profile, githubId, email, avatarUrl, useFirebase);
|
||||
const newUser = await createNewUser({
|
||||
email,
|
||||
avatarUrl,
|
||||
provider: 'github',
|
||||
providerKey: 'githubId',
|
||||
providerId: githubId,
|
||||
username: profile.username,
|
||||
name: profile.displayName,
|
||||
emailVerified: profile.emails[0].verified,
|
||||
});
|
||||
return cb(null, newUser);
|
||||
}
|
||||
} catch (err) {
|
||||
|
|
@ -27,39 +36,6 @@ const githubLogin = async (accessToken, refreshToken, profile, cb) => {
|
|||
}
|
||||
};
|
||||
|
||||
const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => {
|
||||
if (!useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
||||
oldUser.avatar = avatarUrl;
|
||||
await oldUser.save();
|
||||
} else if (useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
||||
const userId = oldUser._id;
|
||||
const avatarURL = await uploadAvatar(userId, avatarUrl);
|
||||
oldUser.avatar = avatarURL;
|
||||
await oldUser.save();
|
||||
}
|
||||
};
|
||||
|
||||
const createNewUser = async (profile, githubId, email, avatarUrl, useFirebase) => {
|
||||
const newUser = await new User({
|
||||
provider: 'github',
|
||||
githubId,
|
||||
username: profile.username,
|
||||
email,
|
||||
emailVerified: profile.emails[0].verified,
|
||||
name: profile.displayName,
|
||||
avatar: avatarUrl,
|
||||
}).save();
|
||||
|
||||
if (useFirebase) {
|
||||
const userId = newUser._id;
|
||||
const avatarURL = await uploadAvatar(userId, avatarUrl);
|
||||
newUser.avatar = avatarURL;
|
||||
await newUser.save();
|
||||
}
|
||||
|
||||
return newUser;
|
||||
};
|
||||
|
||||
module.exports = () =>
|
||||
new GitHubStrategy(
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const { Strategy: GoogleStrategy } = require('passport-google-oauth20');
|
||||
const { createNewUser, handleExistingUser } = require('./process');
|
||||
const { logger } = require('~/config');
|
||||
const User = require('~/models/User');
|
||||
const { useFirebase, uploadAvatar } = require('~/server/services/Files/images');
|
||||
|
||||
const googleLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||
try {
|
||||
|
|
@ -13,12 +13,21 @@ const googleLogin = async (accessToken, refreshToken, profile, cb) => {
|
|||
const avatarUrl = profile.photos[0].value;
|
||||
|
||||
if (oldUser) {
|
||||
await handleExistingUser(oldUser, avatarUrl, useFirebase);
|
||||
await handleExistingUser(oldUser, avatarUrl);
|
||||
return cb(null, oldUser);
|
||||
}
|
||||
|
||||
if (ALLOW_SOCIAL_REGISTRATION) {
|
||||
const newUser = await createNewUser(profile, googleId, email, avatarUrl, useFirebase);
|
||||
const newUser = await createNewUser({
|
||||
email,
|
||||
avatarUrl,
|
||||
provider: 'google',
|
||||
providerKey: 'googleId',
|
||||
providerId: googleId,
|
||||
username: profile.name.givenName,
|
||||
name: `${profile.name.givenName} ${profile.name.familyName}`,
|
||||
emailVerified: profile.emails[0].verified,
|
||||
});
|
||||
return cb(null, newUser);
|
||||
}
|
||||
} catch (err) {
|
||||
|
|
@ -27,39 +36,6 @@ const googleLogin = async (accessToken, refreshToken, profile, cb) => {
|
|||
}
|
||||
};
|
||||
|
||||
const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => {
|
||||
if ((!useFirebase && !oldUser.avatar.includes('?manual=true')) || oldUser.avatar === null) {
|
||||
oldUser.avatar = avatarUrl;
|
||||
await oldUser.save();
|
||||
} else if (useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
||||
const userId = oldUser._id;
|
||||
const avatarURL = await uploadAvatar(userId, avatarUrl);
|
||||
oldUser.avatar = avatarURL;
|
||||
await oldUser.save();
|
||||
}
|
||||
};
|
||||
|
||||
const createNewUser = async (profile, googleId, email, avatarUrl, useFirebase) => {
|
||||
const newUser = await new User({
|
||||
provider: 'google',
|
||||
googleId,
|
||||
username: profile.name.givenName,
|
||||
email,
|
||||
emailVerified: profile.emails[0].verified,
|
||||
name: `${profile.name.givenName} ${profile.name.familyName}`,
|
||||
avatar: avatarUrl,
|
||||
}).save();
|
||||
|
||||
if (useFirebase) {
|
||||
const userId = newUser._id;
|
||||
const avatarURL = await uploadAvatar(userId, avatarUrl);
|
||||
newUser.avatar = avatarURL;
|
||||
await newUser.save();
|
||||
}
|
||||
|
||||
return newUser;
|
||||
};
|
||||
|
||||
module.exports = () =>
|
||||
new GoogleStrategy(
|
||||
{
|
||||
|
|
|
|||
92
api/strategies/process.js
Normal file
92
api/strategies/process.js
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
const { FileSources } = require('librechat-data-provider');
|
||||
const uploadAvatar = require('~/server/services/Files/images/avatar');
|
||||
const User = require('~/models/User');
|
||||
|
||||
/**
|
||||
* Updates the avatar URL of an existing user. If the user's avatar URL does not include the query parameter
|
||||
* '?manual=true', it updates the user's avatar with the provided URL. For local file storage, it directly updates
|
||||
* the avatar URL, while for other storage types, it processes the avatar URL using the specified file strategy.
|
||||
*
|
||||
* @param {User} oldUser - The existing user object that needs to be updated. Expected to have an 'avatar' property.
|
||||
* @param {string} avatarUrl - The new avatar URL to be set for the user.
|
||||
*
|
||||
* @returns {Promise<void>}
|
||||
* The function updates the user's avatar and saves the user object. It does not return any value.
|
||||
*
|
||||
* @throws {Error} Throws an error if there's an issue saving the updated user object.
|
||||
*/
|
||||
const handleExistingUser = async (oldUser, avatarUrl) => {
|
||||
const fileStrategy = process.env.CDN_PROVIDER;
|
||||
const isLocal = fileStrategy === FileSources.local;
|
||||
|
||||
if (isLocal && !oldUser.avatar.includes('?manual=true')) {
|
||||
oldUser.avatar = avatarUrl;
|
||||
await oldUser.save();
|
||||
} else if (!isLocal && !oldUser.avatar.includes('?manual=true')) {
|
||||
const userId = oldUser._id;
|
||||
const newavatarUrl = await uploadAvatar({ userId, input: avatarUrl, fileStrategy });
|
||||
oldUser.avatar = newavatarUrl;
|
||||
await oldUser.save();
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a new user with the provided user details. If the file strategy is not local, the avatar URL is
|
||||
* processed using the specified file strategy. The new user is saved to the database with the processed or
|
||||
* original avatar URL.
|
||||
*
|
||||
* @param {Object} params - The parameters object for user creation.
|
||||
* @param {string} params.email - The email of the new user.
|
||||
* @param {string} params.avatarUrl - The avatar URL of the new user.
|
||||
* @param {string} params.provider - The provider of the user's account.
|
||||
* @param {string} params.providerKey - The key to identify the provider in the user model.
|
||||
* @param {string} params.providerId - The provider-specific ID of the user.
|
||||
* @param {string} params.username - The username of the new user.
|
||||
* @param {string} params.name - The name of the new user.
|
||||
* @param {boolean} [params.emailVerified=false] - Optional. Indicates whether the user's email is verified. Defaults to false.
|
||||
*
|
||||
* @returns {Promise<User>}
|
||||
* A promise that resolves to the newly created user object.
|
||||
*
|
||||
* @throws {Error} Throws an error if there's an issue creating or saving the new user object.
|
||||
*/
|
||||
const createNewUser = async ({
|
||||
email,
|
||||
avatarUrl,
|
||||
provider,
|
||||
providerKey,
|
||||
providerId,
|
||||
username,
|
||||
name,
|
||||
emailVerified,
|
||||
}) => {
|
||||
const update = {
|
||||
email,
|
||||
avatar: avatarUrl,
|
||||
provider,
|
||||
[providerKey]: providerId,
|
||||
username,
|
||||
name,
|
||||
emailVerified,
|
||||
};
|
||||
|
||||
// TODO: remove direct access of User model
|
||||
const newUser = await new User(update).save();
|
||||
|
||||
const fileStrategy = process.env.CDN_PROVIDER;
|
||||
const isLocal = fileStrategy === FileSources.local;
|
||||
|
||||
if (!isLocal) {
|
||||
const userId = newUser._id;
|
||||
const newavatarUrl = await uploadAvatar({ userId, input: avatarUrl, fileStrategy });
|
||||
newUser.avatar = newavatarUrl;
|
||||
await newUser.save();
|
||||
}
|
||||
|
||||
return newUser;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
handleExistingUser,
|
||||
createNewUser,
|
||||
};
|
||||
|
|
@ -1,3 +1,43 @@
|
|||
jest.mock('winston', () => {
|
||||
const mockFormatFunction = jest.fn((fn) => fn);
|
||||
|
||||
mockFormatFunction.colorize = jest.fn();
|
||||
mockFormatFunction.combine = jest.fn();
|
||||
mockFormatFunction.label = jest.fn();
|
||||
mockFormatFunction.timestamp = jest.fn();
|
||||
mockFormatFunction.printf = jest.fn();
|
||||
mockFormatFunction.errors = jest.fn();
|
||||
mockFormatFunction.splat = jest.fn();
|
||||
return {
|
||||
format: mockFormatFunction,
|
||||
createLogger: jest.fn().mockReturnValue({
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
}),
|
||||
transports: {
|
||||
Console: jest.fn(),
|
||||
DailyRotateFile: jest.fn(),
|
||||
},
|
||||
addColors: jest.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('winston-daily-rotate-file', () => {
|
||||
return jest.fn().mockImplementation(() => {
|
||||
return {
|
||||
level: 'error',
|
||||
filename: '../logs/error-%DATE%.log',
|
||||
datePattern: 'YYYY-MM-DD',
|
||||
zippedArchive: true,
|
||||
maxSize: '20m',
|
||||
maxFiles: '14d',
|
||||
format: 'format',
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
jest.mock('~/config', () => {
|
||||
return {
|
||||
logger: {
|
||||
|
|
@ -8,3 +48,11 @@ jest.mock('~/config', () => {
|
|||
},
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('~/config/parsers', () => {
|
||||
return {
|
||||
redactMessage: jest.fn(),
|
||||
redactFormat: jest.fn(),
|
||||
debugTraverse: jest.fn(),
|
||||
};
|
||||
});
|
||||
|
|
|
|||
|
|
@ -26,6 +26,18 @@
|
|||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports TMessage
|
||||
* @typedef {import('librechat-data-provider').TMessage} TMessage
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports FileSources
|
||||
* @typedef {import('librechat-data-provider').FileSources} FileSources
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @exports ImageMetadata
|
||||
* @typedef {Object} ImageMetadata
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue