mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 16:30:15 +01:00
🚀 Feat: Streamline File Strategies & GPT-4-Vision Settings (#1535)
* chore: fix `endpoint` typescript issues and typo in console info message * feat(api): files GET endpoint and save only file_id references to messages * refactor(client): `useGetFiles` query hook, update file types, optimistic update of filesQuery on file upload * refactor(buildTree): update to use params object and accept fileMap * feat: map files to messages; refactor(ChatView): messages only available after files are fetched * fix: fetch files only when authenticated * feat(api): AppService - rename app.locals.configs to app.locals.paths - load custom config use fileStrategy from yaml config in app.locals * refactor: separate Firebase and Local strategies, call based on config * refactor: modularize file strategies and employ with use of DALL-E * refactor(librechat.yaml): add fileStrategy field * feat: add source to MongoFile schema, as well as BatchFile, and ExtendedFile types * feat: employ file strategies for upload/delete files * refactor(deleteFirebaseFile): add user id validation for firebase file deletion * chore(deleteFirebaseFile): update jsdocs * feat: employ strategies for vision requests * fix(client): handle messages with deleted files * fix(client): ensure `filesToDelete` always saves/sends `file.source` * feat(openAI): configurable `resendImages` and `imageDetail` * refactor(getTokenCountForMessage): recursive process only when array of Objects and only their values (not keys) aside from `image_url` types * feat(OpenAIClient): calculateImageTokenCost * chore: remove comment * refactor(uploadAvatar): employ fileStrategy for avatars, from social logins or user upload * docs: update docs on how to configure fileStrategy * fix(ci): mock winston and winston related modules, update DALLE3.spec.js with changes made * refactor(redis): change terminal message to reflect current development state * fix(DALL-E-2): pass fileStrategy to dall-e
This commit is contained in:
parent
28a6807176
commit
d20970f5c5
81 changed files with 1729 additions and 855 deletions
|
|
@ -46,6 +46,10 @@ class BaseClient {
|
||||||
logger.debug('`[BaseClient] recordTokenUsage` not implemented.', response);
|
logger.debug('`[BaseClient] recordTokenUsage` not implemented.', response);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async addPreviousAttachments(messages) {
|
||||||
|
return messages;
|
||||||
|
}
|
||||||
|
|
||||||
async recordTokenUsage({ promptTokens, completionTokens }) {
|
async recordTokenUsage({ promptTokens, completionTokens }) {
|
||||||
logger.debug('`[BaseClient] recordTokenUsage` not implemented.', {
|
logger.debug('`[BaseClient] recordTokenUsage` not implemented.', {
|
||||||
promptTokens,
|
promptTokens,
|
||||||
|
|
@ -484,20 +488,22 @@ class BaseClient {
|
||||||
mapMethod = this.getMessageMapMethod();
|
mapMethod = this.getMessageMapMethod();
|
||||||
}
|
}
|
||||||
|
|
||||||
const orderedMessages = this.constructor.getMessagesForConversation({
|
let _messages = this.constructor.getMessagesForConversation({
|
||||||
messages,
|
messages,
|
||||||
parentMessageId,
|
parentMessageId,
|
||||||
mapMethod,
|
mapMethod,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
_messages = await this.addPreviousAttachments(_messages);
|
||||||
|
|
||||||
if (!this.shouldSummarize) {
|
if (!this.shouldSummarize) {
|
||||||
return orderedMessages;
|
return _messages;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the latest message with a 'summary' property
|
// Find the latest message with a 'summary' property
|
||||||
for (let i = orderedMessages.length - 1; i >= 0; i--) {
|
for (let i = _messages.length - 1; i >= 0; i--) {
|
||||||
if (orderedMessages[i]?.summary) {
|
if (_messages[i]?.summary) {
|
||||||
this.previous_summary = orderedMessages[i];
|
this.previous_summary = _messages[i];
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -512,7 +518,7 @@ class BaseClient {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return orderedMessages;
|
return _messages;
|
||||||
}
|
}
|
||||||
|
|
||||||
async saveMessageToDatabase(message, endpointOptions, user = null) {
|
async saveMessageToDatabase(message, endpointOptions, user = null) {
|
||||||
|
|
@ -618,6 +624,11 @@ class BaseClient {
|
||||||
* An additional 3 tokens need to be added for assistant label priming after all messages have been counted.
|
* An additional 3 tokens need to be added for assistant label priming after all messages have been counted.
|
||||||
* In our implementation, this is accounted for in the getMessagesWithinTokenLimit method.
|
* In our implementation, this is accounted for in the getMessagesWithinTokenLimit method.
|
||||||
*
|
*
|
||||||
|
* The content parts example was adapted from the following example:
|
||||||
|
* https://github.com/openai/openai-cookbook/pull/881/files
|
||||||
|
*
|
||||||
|
* Note: image token calculation is to be done elsewhere where we have access to the image metadata
|
||||||
|
*
|
||||||
* @param {Object} message
|
* @param {Object} message
|
||||||
*/
|
*/
|
||||||
getTokenCountForMessage(message) {
|
getTokenCountForMessage(message) {
|
||||||
|
|
@ -631,11 +642,18 @@ class BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
const processValue = (value) => {
|
const processValue = (value) => {
|
||||||
if (typeof value === 'object' && value !== null) {
|
if (Array.isArray(value)) {
|
||||||
for (let [nestedKey, nestedValue] of Object.entries(value)) {
|
for (let item of value) {
|
||||||
if (nestedKey === 'image_url' || nestedValue === 'image_url') {
|
if (!item || !item.type || item.type === 'image_url') {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const nestedValue = item[item.type];
|
||||||
|
|
||||||
|
if (!nestedValue) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
processValue(nestedValue);
|
processValue(nestedValue);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const { getResponseSender } = require('librechat-data-provider');
|
const { getResponseSender, ImageDetailCost, ImageDetail } = require('librechat-data-provider');
|
||||||
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
|
||||||
const { encodeAndFormat, validateVisionModel } = require('~/server/services/Files/images');
|
const { encodeAndFormat, validateVisionModel } = require('~/server/services/Files/images');
|
||||||
const { getModelMaxTokens, genAzureChatCompletion, extractBaseURL } = require('~/utils');
|
const { getModelMaxTokens, genAzureChatCompletion, extractBaseURL } = require('~/utils');
|
||||||
|
|
@ -8,8 +8,9 @@ const { truncateText, formatMessage, CUT_OFF_PROMPT } = require('./prompts');
|
||||||
const { handleOpenAIErrors } = require('./tools/util');
|
const { handleOpenAIErrors } = require('./tools/util');
|
||||||
const spendTokens = require('~/models/spendTokens');
|
const spendTokens = require('~/models/spendTokens');
|
||||||
const { createLLM, RunManager } = require('./llm');
|
const { createLLM, RunManager } = require('./llm');
|
||||||
const { isEnabled } = require('~/server/utils');
|
|
||||||
const ChatGPTClient = require('./ChatGPTClient');
|
const ChatGPTClient = require('./ChatGPTClient');
|
||||||
|
const { isEnabled } = require('~/server/utils');
|
||||||
|
const { getFiles } = require('~/models/File');
|
||||||
const { summaryBuffer } = require('./memory');
|
const { summaryBuffer } = require('./memory');
|
||||||
const { runTitleChain } = require('./chains');
|
const { runTitleChain } = require('./chains');
|
||||||
const { tokenSplit } = require('./document');
|
const { tokenSplit } = require('./document');
|
||||||
|
|
@ -76,16 +77,7 @@ class OpenAIClient extends BaseClient {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
this.isVisionModel = validateVisionModel(this.modelOptions.model);
|
this.checkVisionRequest(this.options.attachments);
|
||||||
|
|
||||||
if (this.options.attachments && !this.isVisionModel) {
|
|
||||||
this.modelOptions.model = 'gpt-4-vision-preview';
|
|
||||||
this.isVisionModel = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.isVisionModel) {
|
|
||||||
delete this.modelOptions.stop;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { OPENROUTER_API_KEY, OPENAI_FORCE_PROMPT } = process.env ?? {};
|
const { OPENROUTER_API_KEY, OPENAI_FORCE_PROMPT } = process.env ?? {};
|
||||||
if (OPENROUTER_API_KEY && !this.azure) {
|
if (OPENROUTER_API_KEY && !this.azure) {
|
||||||
|
|
@ -204,6 +196,27 @@ class OpenAIClient extends BaseClient {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* Checks if the model is a vision model based on request attachments and sets the appropriate options:
|
||||||
|
* - Sets `this.modelOptions.model` to `gpt-4-vision-preview` if the request is a vision request.
|
||||||
|
* - Sets `this.isVisionModel` to `true` if vision request.
|
||||||
|
* - Deletes `this.modelOptions.stop` if vision request.
|
||||||
|
* @param {Array<Promise<MongoFile[]> | MongoFile[]> | Record<string, MongoFile[]>} attachments
|
||||||
|
*/
|
||||||
|
checkVisionRequest(attachments) {
|
||||||
|
this.isVisionModel = validateVisionModel(this.modelOptions.model);
|
||||||
|
|
||||||
|
if (attachments && !this.isVisionModel) {
|
||||||
|
this.modelOptions.model = 'gpt-4-vision-preview';
|
||||||
|
this.isVisionModel = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.isVisionModel) {
|
||||||
|
delete this.modelOptions.stop;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
setupTokens() {
|
setupTokens() {
|
||||||
if (this.isChatCompletion) {
|
if (this.isChatCompletion) {
|
||||||
this.startToken = '||>';
|
this.startToken = '||>';
|
||||||
|
|
@ -288,7 +301,11 @@ class OpenAIClient extends BaseClient {
|
||||||
tokenizerCallsCount++;
|
tokenizerCallsCount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the token count of a given text. It also checks and resets the tokenizers if necessary.
|
/**
|
||||||
|
* Returns the token count of a given text. It also checks and resets the tokenizers if necessary.
|
||||||
|
* @param {string} text - The text to get the token count for.
|
||||||
|
* @returns {number} The token count of the given text.
|
||||||
|
*/
|
||||||
getTokenCount(text) {
|
getTokenCount(text) {
|
||||||
this.resetTokenizersIfNecessary();
|
this.resetTokenizersIfNecessary();
|
||||||
try {
|
try {
|
||||||
|
|
@ -301,10 +318,33 @@ class OpenAIClient extends BaseClient {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate the token cost for an image based on its dimensions and detail level.
|
||||||
|
*
|
||||||
|
* @param {Object} image - The image object.
|
||||||
|
* @param {number} image.width - The width of the image.
|
||||||
|
* @param {number} image.height - The height of the image.
|
||||||
|
* @param {'low'|'high'|string|undefined} [image.detail] - The detail level ('low', 'high', or other).
|
||||||
|
* @returns {number} The calculated token cost.
|
||||||
|
*/
|
||||||
|
calculateImageTokenCost({ width, height, detail }) {
|
||||||
|
if (detail === 'low') {
|
||||||
|
return ImageDetailCost.LOW;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate the number of 512px squares
|
||||||
|
const numSquares = Math.ceil(width / 512) * Math.ceil(height / 512);
|
||||||
|
|
||||||
|
// Default to high detail cost calculation
|
||||||
|
return numSquares * ImageDetailCost.HIGH + ImageDetailCost.ADDITIONAL;
|
||||||
|
}
|
||||||
|
|
||||||
getSaveOptions() {
|
getSaveOptions() {
|
||||||
return {
|
return {
|
||||||
chatGptLabel: this.options.chatGptLabel,
|
chatGptLabel: this.options.chatGptLabel,
|
||||||
promptPrefix: this.options.promptPrefix,
|
promptPrefix: this.options.promptPrefix,
|
||||||
|
resendImages: this.options.resendImages,
|
||||||
|
imageDetail: this.options.imageDetail,
|
||||||
...this.modelOptions,
|
...this.modelOptions,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
@ -317,6 +357,69 @@ class OpenAIClient extends BaseClient {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {TMessage[]} _messages
|
||||||
|
* @returns {TMessage[]}
|
||||||
|
*/
|
||||||
|
async addPreviousAttachments(_messages) {
|
||||||
|
if (!this.options.resendImages) {
|
||||||
|
return _messages;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {TMessage} message
|
||||||
|
*/
|
||||||
|
const processMessage = async (message) => {
|
||||||
|
if (!this.message_file_map) {
|
||||||
|
/** @type {Record<string, MongoFile[]> */
|
||||||
|
this.message_file_map = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileIds = message.files.map((file) => file.file_id);
|
||||||
|
const files = await getFiles({
|
||||||
|
file_id: { $in: fileIds },
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.addImageURLs(message, files);
|
||||||
|
|
||||||
|
this.message_file_map[message.messageId] = files;
|
||||||
|
return message;
|
||||||
|
};
|
||||||
|
|
||||||
|
const promises = [];
|
||||||
|
|
||||||
|
for (const message of _messages) {
|
||||||
|
if (!message.files) {
|
||||||
|
promises.push(message);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
promises.push(processMessage(message));
|
||||||
|
}
|
||||||
|
|
||||||
|
const messages = await Promise.all(promises);
|
||||||
|
|
||||||
|
this.checkVisionRequest(this.message_file_map);
|
||||||
|
return messages;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* Adds image URLs to the message object and returns the files
|
||||||
|
*
|
||||||
|
* @param {TMessage[]} messages
|
||||||
|
* @param {MongoFile[]} files
|
||||||
|
* @returns {Promise<MongoFile[]>}
|
||||||
|
*/
|
||||||
|
async addImageURLs(message, attachments) {
|
||||||
|
const { files, image_urls } = await encodeAndFormat(this.options.req, attachments);
|
||||||
|
|
||||||
|
message.image_urls = image_urls;
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
|
||||||
async buildMessages(
|
async buildMessages(
|
||||||
messages,
|
messages,
|
||||||
parentMessageId,
|
parentMessageId,
|
||||||
|
|
@ -355,13 +458,23 @@ class OpenAIClient extends BaseClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.options.attachments) {
|
if (this.options.attachments) {
|
||||||
const attachments = await this.options.attachments;
|
const attachments = (await this.options.attachments).filter((file) =>
|
||||||
const { files, image_urls } = await encodeAndFormat(
|
file.type.includes('image'),
|
||||||
this.options.req,
|
);
|
||||||
attachments.filter((file) => file.type.includes('image')),
|
|
||||||
|
if (this.message_file_map) {
|
||||||
|
this.message_file_map[orderedMessages[orderedMessages.length - 1].messageId] = attachments;
|
||||||
|
} else {
|
||||||
|
this.message_file_map = {
|
||||||
|
[orderedMessages[orderedMessages.length - 1].messageId]: attachments,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = await this.addImageURLs(
|
||||||
|
orderedMessages[orderedMessages.length - 1],
|
||||||
|
attachments,
|
||||||
);
|
);
|
||||||
|
|
||||||
orderedMessages[orderedMessages.length - 1].image_urls = image_urls;
|
|
||||||
this.options.attachments = files;
|
this.options.attachments = files;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -372,10 +485,25 @@ class OpenAIClient extends BaseClient {
|
||||||
assistantName: this.options?.chatGptLabel,
|
assistantName: this.options?.chatGptLabel,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (this.contextStrategy && !orderedMessages[i].tokenCount) {
|
const needsTokenCount = this.contextStrategy && !orderedMessages[i].tokenCount;
|
||||||
|
|
||||||
|
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
|
||||||
|
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
|
||||||
orderedMessages[i].tokenCount = this.getTokenCountForMessage(formattedMessage);
|
orderedMessages[i].tokenCount = this.getTokenCountForMessage(formattedMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* If message has files, calculate image token cost */
|
||||||
|
if (this.message_file_map && this.message_file_map[message.messageId]) {
|
||||||
|
const attachments = this.message_file_map[message.messageId];
|
||||||
|
for (const file of attachments) {
|
||||||
|
orderedMessages[i].tokenCount += this.calculateImageTokenCost({
|
||||||
|
width: file.width,
|
||||||
|
height: file.height,
|
||||||
|
detail: this.options.imageDetail ?? ImageDetail.auto,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return formattedMessage;
|
return formattedMessage;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -780,7 +908,6 @@ ${convo}
|
||||||
if (this.isChatCompletion) {
|
if (this.isChatCompletion) {
|
||||||
modelOptions.messages = payload;
|
modelOptions.messages = payload;
|
||||||
} else {
|
} else {
|
||||||
// TODO: unreachable code. Need to implement completions call for non-chat models
|
|
||||||
modelOptions.prompt = payload;
|
modelOptions.prompt = payload;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -916,6 +1043,8 @@ ${convo}
|
||||||
clientOptions.addMetadata({ finish_reason });
|
clientOptions.addMetadata({ finish_reason });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.debug('[OpenAIClient] chatCompletion response', chatCompletion);
|
||||||
|
|
||||||
return message.content;
|
return message.content;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (
|
if (
|
||||||
|
|
|
||||||
|
|
@ -112,7 +112,7 @@ class PluginsClient extends OpenAIClient {
|
||||||
signal: this.abortController.signal,
|
signal: this.abortController.signal,
|
||||||
openAIApiKey: this.openAIApiKey,
|
openAIApiKey: this.openAIApiKey,
|
||||||
conversationId: this.conversationId,
|
conversationId: this.conversationId,
|
||||||
debug: this.options?.debug,
|
fileStrategy: this.options.req.app.locals.fileStrategy,
|
||||||
message,
|
message,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -546,6 +546,39 @@ describe('OpenAIClient', () => {
|
||||||
expect(totalTokens).toBe(testCase.expected);
|
expect(totalTokens).toBe(testCase.expected);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const vision_request = [
|
||||||
|
{
|
||||||
|
role: 'user',
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: 'describe what is in this image?',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: {
|
||||||
|
url: 'https://venturebeat.com/wp-content/uploads/2019/03/openai-1.png',
|
||||||
|
detail: 'high',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const expectedTokens = 14;
|
||||||
|
const visionModel = 'gpt-4-vision-preview';
|
||||||
|
|
||||||
|
it(`should return ${expectedTokens} tokens for model ${visionModel} (Vision Request)`, () => {
|
||||||
|
client.modelOptions.model = visionModel;
|
||||||
|
client.selectTokenizer();
|
||||||
|
// 3 tokens for assistant label
|
||||||
|
let totalTokens = 3;
|
||||||
|
for (let message of vision_request) {
|
||||||
|
totalTokens += client.getTokenCountForMessage(message);
|
||||||
|
}
|
||||||
|
expect(totalTokens).toBe(expectedTokens);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('sendMessage/getCompletion/chatCompletion', () => {
|
describe('sendMessage/getCompletion/chatCompletion', () => {
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,13 @@
|
||||||
// From https://platform.openai.com/docs/api-reference/images/create
|
// From https://platform.openai.com/docs/api-reference/images/create
|
||||||
// To use this tool, you must pass in a configured OpenAIApi object.
|
// To use this tool, you must pass in a configured OpenAIApi object.
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
// const { genAzureEndpoint } = require('~/utils/genAzureEndpoints');
|
// const { genAzureEndpoint } = require('~/utils/genAzureEndpoints');
|
||||||
const { v4: uuidv4 } = require('uuid');
|
const { v4: uuidv4 } = require('uuid');
|
||||||
const { Tool } = require('langchain/tools');
|
const { Tool } = require('langchain/tools');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const {
|
|
||||||
saveImageToFirebaseStorage,
|
|
||||||
getFirebaseStorageImageUrl,
|
|
||||||
getFirebaseStorage,
|
|
||||||
} = require('~/server/services/Files/Firebase');
|
|
||||||
const { getImageBasename } = require('~/server/services/Files/images');
|
const { getImageBasename } = require('~/server/services/Files/images');
|
||||||
|
const { processFileURL } = require('~/server/services/Files/process');
|
||||||
const extractBaseURL = require('~/utils/extractBaseURL');
|
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||||
const saveImageFromUrl = require('./saveImageFromUrl');
|
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const { DALLE_REVERSE_PROXY, PROXY } = process.env;
|
const { DALLE_REVERSE_PROXY, PROXY } = process.env;
|
||||||
|
|
@ -23,6 +16,7 @@ class OpenAICreateImage extends Tool {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
this.userId = fields.userId;
|
this.userId = fields.userId;
|
||||||
|
this.fileStrategy = fields.fileStrategy;
|
||||||
let apiKey = fields.DALLE_API_KEY || this.getApiKey();
|
let apiKey = fields.DALLE_API_KEY || this.getApiKey();
|
||||||
|
|
||||||
const config = { apiKey };
|
const config = { apiKey };
|
||||||
|
|
@ -82,12 +76,8 @@ Guidelines:
|
||||||
.trim();
|
.trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
getMarkdownImageUrl(imageName) {
|
wrapInMarkdown(imageUrl) {
|
||||||
const imageUrl = path
|
return ``;
|
||||||
.join(this.relativeImageUrl, imageName)
|
|
||||||
.replace(/\\/g, '/')
|
|
||||||
.replace('public/', '');
|
|
||||||
return ``;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async _call(input) {
|
async _call(input) {
|
||||||
|
|
@ -118,45 +108,21 @@ Guidelines:
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
this.outputPath = path.resolve(
|
try {
|
||||||
__dirname,
|
const result = await processFileURL({
|
||||||
'..',
|
fileStrategy: this.fileStrategy,
|
||||||
'..',
|
userId: this.userId,
|
||||||
'..',
|
URL: theImageUrl,
|
||||||
'..',
|
fileName: imageName,
|
||||||
'client',
|
basePath: 'images',
|
||||||
'public',
|
});
|
||||||
'images',
|
|
||||||
this.userId,
|
|
||||||
);
|
|
||||||
|
|
||||||
const appRoot = path.resolve(__dirname, '..', '..', '..', '..', 'client');
|
this.result = this.wrapInMarkdown(result);
|
||||||
this.relativeImageUrl = path.relative(appRoot, this.outputPath);
|
} catch (error) {
|
||||||
|
logger.error('Error while saving the image:', error);
|
||||||
// Check if directory exists, if not create it
|
this.result = `Failed to save the image locally. ${error.message}`;
|
||||||
if (!fs.existsSync(this.outputPath)) {
|
|
||||||
fs.mkdirSync(this.outputPath, { recursive: true });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const storage = getFirebaseStorage();
|
|
||||||
if (storage) {
|
|
||||||
try {
|
|
||||||
await saveImageToFirebaseStorage(this.userId, theImageUrl, imageName);
|
|
||||||
this.result = await getFirebaseStorageImageUrl(`${this.userId}/${imageName}`);
|
|
||||||
logger.debug('[DALL-E] result: ' + this.result);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error while saving the image to Firebase Storage:', error);
|
|
||||||
this.result = `Failed to save the image to Firebase Storage. ${error.message}`;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
await saveImageFromUrl(theImageUrl, this.outputPath, imageName);
|
|
||||||
this.result = this.getMarkdownImageUrl(imageName);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error while saving the image locally:', error);
|
|
||||||
this.result = `Failed to save the image locally. ${error.message}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.result;
|
return this.result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,46 +0,0 @@
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const axios = require('axios');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
async function saveImageFromUrl(url, outputPath, outputFilename) {
|
|
||||||
try {
|
|
||||||
// Fetch the image from the URL
|
|
||||||
const response = await axios({
|
|
||||||
url,
|
|
||||||
responseType: 'stream',
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get the content type from the response headers
|
|
||||||
const contentType = response.headers['content-type'];
|
|
||||||
let extension = contentType.split('/').pop();
|
|
||||||
|
|
||||||
// Check if the output directory exists, if not, create it
|
|
||||||
if (!fs.existsSync(outputPath)) {
|
|
||||||
fs.mkdirSync(outputPath, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Replace or append the correct extension
|
|
||||||
const extRegExp = new RegExp(path.extname(outputFilename) + '$');
|
|
||||||
outputFilename = outputFilename.replace(extRegExp, `.${extension}`);
|
|
||||||
if (!path.extname(outputFilename)) {
|
|
||||||
outputFilename += `.${extension}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a writable stream for the output path
|
|
||||||
const outputFilePath = path.join(outputPath, outputFilename);
|
|
||||||
const writer = fs.createWriteStream(outputFilePath);
|
|
||||||
|
|
||||||
// Pipe the response data to the output file
|
|
||||||
response.data.pipe(writer);
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
writer.on('finish', resolve);
|
|
||||||
writer.on('error', reject);
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[saveImageFromUrl] Error while saving the image:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = saveImageFromUrl;
|
|
||||||
|
|
@ -1,20 +1,13 @@
|
||||||
// From https://platform.openai.com/docs/guides/images/usage?context=node
|
// From https://platform.openai.com/docs/guides/images/usage?context=node
|
||||||
// To use this tool, you must pass in a configured OpenAIApi object.
|
// To use this tool, you must pass in a configured OpenAIApi object.
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const { v4: uuidv4 } = require('uuid');
|
const { v4: uuidv4 } = require('uuid');
|
||||||
const { Tool } = require('langchain/tools');
|
const { Tool } = require('langchain/tools');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const {
|
|
||||||
saveImageToFirebaseStorage,
|
|
||||||
getFirebaseStorageImageUrl,
|
|
||||||
getFirebaseStorage,
|
|
||||||
} = require('~/server/services/Files/Firebase');
|
|
||||||
const { getImageBasename } = require('~/server/services/Files/images');
|
const { getImageBasename } = require('~/server/services/Files/images');
|
||||||
|
const { processFileURL } = require('~/server/services/Files/process');
|
||||||
const extractBaseURL = require('~/utils/extractBaseURL');
|
const extractBaseURL = require('~/utils/extractBaseURL');
|
||||||
const saveImageFromUrl = require('../saveImageFromUrl');
|
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const { DALLE3_SYSTEM_PROMPT, DALLE_REVERSE_PROXY, PROXY } = process.env;
|
const { DALLE3_SYSTEM_PROMPT, DALLE_REVERSE_PROXY, PROXY } = process.env;
|
||||||
|
|
@ -23,6 +16,7 @@ class DALLE3 extends Tool {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
this.userId = fields.userId;
|
this.userId = fields.userId;
|
||||||
|
this.fileStrategy = fields.fileStrategy;
|
||||||
let apiKey = fields.DALLE_API_KEY || this.getApiKey();
|
let apiKey = fields.DALLE_API_KEY || this.getApiKey();
|
||||||
const config = { apiKey };
|
const config = { apiKey };
|
||||||
if (DALLE_REVERSE_PROXY) {
|
if (DALLE_REVERSE_PROXY) {
|
||||||
|
|
@ -91,12 +85,8 @@ class DALLE3 extends Tool {
|
||||||
.trim();
|
.trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
getMarkdownImageUrl(imageName) {
|
wrapInMarkdown(imageUrl) {
|
||||||
const imageUrl = path
|
return ``;
|
||||||
.join(this.relativeImageUrl, imageName)
|
|
||||||
.replace(/\\/g, '/')
|
|
||||||
.replace('public/', '');
|
|
||||||
return ``;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async _call(data) {
|
async _call(data) {
|
||||||
|
|
@ -143,43 +133,19 @@ Error Message: ${error.message}`;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
this.outputPath = path.resolve(
|
try {
|
||||||
__dirname,
|
const result = await processFileURL({
|
||||||
'..',
|
fileStrategy: this.fileStrategy,
|
||||||
'..',
|
userId: this.userId,
|
||||||
'..',
|
URL: theImageUrl,
|
||||||
'..',
|
fileName: imageName,
|
||||||
'..',
|
basePath: 'images',
|
||||||
'client',
|
});
|
||||||
'public',
|
|
||||||
'images',
|
|
||||||
this.userId,
|
|
||||||
);
|
|
||||||
const appRoot = path.resolve(__dirname, '..', '..', '..', '..', '..', 'client');
|
|
||||||
this.relativeImageUrl = path.relative(appRoot, this.outputPath);
|
|
||||||
|
|
||||||
// Check if directory exists, if not create it
|
this.result = this.wrapInMarkdown(result);
|
||||||
if (!fs.existsSync(this.outputPath)) {
|
} catch (error) {
|
||||||
fs.mkdirSync(this.outputPath, { recursive: true });
|
logger.error('Error while saving the image:', error);
|
||||||
}
|
this.result = `Failed to save the image locally. ${error.message}`;
|
||||||
const storage = getFirebaseStorage();
|
|
||||||
if (storage) {
|
|
||||||
try {
|
|
||||||
await saveImageToFirebaseStorage(this.userId, theImageUrl, imageName);
|
|
||||||
this.result = await getFirebaseStorageImageUrl(`${this.userId}/${imageName}`);
|
|
||||||
logger.debug('[DALL-E-3] result: ' + this.result);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error while saving the image to Firebase Storage:', error);
|
|
||||||
this.result = `Failed to save the image to Firebase Storage. ${error.message}`;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
await saveImageFromUrl(theImageUrl, this.outputPath, imageName);
|
|
||||||
this.result = this.getMarkdownImageUrl(imageName);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error while saving the image locally:', error);
|
|
||||||
this.result = `Failed to save the image locally. ${error.message}`;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.result;
|
return this.result;
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,13 @@
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const DALLE3 = require('../DALLE3');
|
const DALLE3 = require('../DALLE3');
|
||||||
const {
|
const { processFileURL } = require('~/server/services/Files/process');
|
||||||
getFirebaseStorage,
|
|
||||||
saveImageToFirebaseStorage,
|
|
||||||
} = require('~/server/services/Files/Firebase');
|
|
||||||
const saveImageFromUrl = require('../../saveImageFromUrl');
|
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
jest.mock('openai');
|
jest.mock('openai');
|
||||||
|
|
||||||
jest.mock('~/server/services/Files/Firebase', () => ({
|
jest.mock('~/server/services/Files/process', () => ({
|
||||||
getFirebaseStorage: jest.fn(),
|
processFileURL: jest.fn(),
|
||||||
saveImageToFirebaseStorage: jest.fn(),
|
|
||||||
getFirebaseStorageImageUrl: jest.fn(),
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
jest.mock('~/server/services/Files/images', () => ({
|
jest.mock('~/server/services/Files/images', () => ({
|
||||||
|
|
@ -50,10 +43,6 @@ jest.mock('fs', () => {
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
jest.mock('../../saveImageFromUrl', () => {
|
|
||||||
return jest.fn();
|
|
||||||
});
|
|
||||||
|
|
||||||
jest.mock('path', () => {
|
jest.mock('path', () => {
|
||||||
return {
|
return {
|
||||||
resolve: jest.fn(),
|
resolve: jest.fn(),
|
||||||
|
|
@ -99,10 +88,8 @@ describe('DALLE3', () => {
|
||||||
|
|
||||||
it('should generate markdown image URL correctly', () => {
|
it('should generate markdown image URL correctly', () => {
|
||||||
const imageName = 'test.png';
|
const imageName = 'test.png';
|
||||||
path.join.mockReturnValue('images/test.png');
|
const markdownImage = dalle.wrapInMarkdown(imageName);
|
||||||
path.relative.mockReturnValue('images/test.png');
|
expect(markdownImage).toBe('');
|
||||||
const markdownImage = dalle.getMarkdownImageUrl(imageName);
|
|
||||||
expect(markdownImage).toBe('');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should call OpenAI API with correct parameters', async () => {
|
it('should call OpenAI API with correct parameters', async () => {
|
||||||
|
|
@ -122,11 +109,7 @@ describe('DALLE3', () => {
|
||||||
};
|
};
|
||||||
|
|
||||||
generate.mockResolvedValue(mockResponse);
|
generate.mockResolvedValue(mockResponse);
|
||||||
saveImageFromUrl.mockResolvedValue(true);
|
processFileURL.mockResolvedValue('http://example.com/img-test.png');
|
||||||
fs.existsSync.mockReturnValue(true);
|
|
||||||
path.resolve.mockReturnValue('/fakepath/images');
|
|
||||||
path.join.mockReturnValue('/fakepath/images/img-test.png');
|
|
||||||
path.relative.mockReturnValue('images/img-test.png');
|
|
||||||
|
|
||||||
const result = await dalle._call(mockData);
|
const result = await dalle._call(mockData);
|
||||||
|
|
||||||
|
|
@ -138,6 +121,7 @@ describe('DALLE3', () => {
|
||||||
prompt: mockData.prompt,
|
prompt: mockData.prompt,
|
||||||
n: 1,
|
n: 1,
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(result).toContain('![generated image]');
|
expect(result).toContain('![generated image]');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -184,23 +168,6 @@ describe('DALLE3', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should create the directory if it does not exist', async () => {
|
|
||||||
const mockData = {
|
|
||||||
prompt: 'A test prompt',
|
|
||||||
};
|
|
||||||
const mockResponse = {
|
|
||||||
data: [
|
|
||||||
{
|
|
||||||
url: 'http://example.com/img-test.png',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
generate.mockResolvedValue(mockResponse);
|
|
||||||
fs.existsSync.mockReturnValue(false); // Simulate directory does not exist
|
|
||||||
await dalle._call(mockData);
|
|
||||||
expect(fs.mkdirSync).toHaveBeenCalledWith(expect.any(String), { recursive: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should log an error and return the image URL if there is an error saving the image', async () => {
|
it('should log an error and return the image URL if there is an error saving the image', async () => {
|
||||||
const mockData = {
|
const mockData = {
|
||||||
prompt: 'A test prompt',
|
prompt: 'A test prompt',
|
||||||
|
|
@ -214,31 +181,12 @@ describe('DALLE3', () => {
|
||||||
};
|
};
|
||||||
const error = new Error('Error while saving the image');
|
const error = new Error('Error while saving the image');
|
||||||
generate.mockResolvedValue(mockResponse);
|
generate.mockResolvedValue(mockResponse);
|
||||||
saveImageFromUrl.mockRejectedValue(error);
|
processFileURL.mockRejectedValue(error);
|
||||||
const result = await dalle._call(mockData);
|
const result = await dalle._call(mockData);
|
||||||
expect(logger.error).toHaveBeenCalledWith('Error while saving the image locally:', error);
|
expect(logger.error).toHaveBeenCalledWith('Error while saving the image:', error);
|
||||||
expect(result).toBe('Failed to save the image locally. Error while saving the image');
|
expect(result).toBe('Failed to save the image locally. Error while saving the image');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should save image to Firebase Storage if Firebase is initialized', async () => {
|
|
||||||
const mockData = {
|
|
||||||
prompt: 'A test prompt',
|
|
||||||
};
|
|
||||||
const mockImageUrl = 'http://example.com/img-test.png';
|
|
||||||
const mockResponse = { data: [{ url: mockImageUrl }] };
|
|
||||||
generate.mockResolvedValue(mockResponse);
|
|
||||||
getFirebaseStorage.mockReturnValue({}); // Simulate Firebase being initialized
|
|
||||||
|
|
||||||
await dalle._call(mockData);
|
|
||||||
|
|
||||||
expect(getFirebaseStorage).toHaveBeenCalled();
|
|
||||||
expect(saveImageToFirebaseStorage).toHaveBeenCalledWith(
|
|
||||||
undefined,
|
|
||||||
mockImageUrl,
|
|
||||||
expect.any(String),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle error when saving image to Firebase Storage fails', async () => {
|
it('should handle error when saving image to Firebase Storage fails', async () => {
|
||||||
const mockData = {
|
const mockData = {
|
||||||
prompt: 'A test prompt',
|
prompt: 'A test prompt',
|
||||||
|
|
@ -247,17 +195,11 @@ describe('DALLE3', () => {
|
||||||
const mockResponse = { data: [{ url: mockImageUrl }] };
|
const mockResponse = { data: [{ url: mockImageUrl }] };
|
||||||
const error = new Error('Error while saving to Firebase');
|
const error = new Error('Error while saving to Firebase');
|
||||||
generate.mockResolvedValue(mockResponse);
|
generate.mockResolvedValue(mockResponse);
|
||||||
getFirebaseStorage.mockReturnValue({}); // Simulate Firebase being initialized
|
processFileURL.mockRejectedValue(error);
|
||||||
saveImageToFirebaseStorage.mockRejectedValue(error);
|
|
||||||
|
|
||||||
const result = await dalle._call(mockData);
|
const result = await dalle._call(mockData);
|
||||||
|
|
||||||
expect(logger.error).toHaveBeenCalledWith(
|
expect(logger.error).toHaveBeenCalledWith('Error while saving the image:', error);
|
||||||
'Error while saving the image to Firebase Storage:',
|
expect(result).toContain('Failed to save the image');
|
||||||
error,
|
|
||||||
);
|
|
||||||
expect(result).toBe(
|
|
||||||
'Failed to save the image to Firebase Storage. Error while saving to Firebase',
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -170,6 +170,8 @@ const loadTools = async ({
|
||||||
|
|
||||||
const toolOptions = {
|
const toolOptions = {
|
||||||
serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' },
|
serpapi: { location: 'Austin,Texas,United States', hl: 'en', gl: 'us' },
|
||||||
|
dalle: { fileStrategy: options.fileStrategy },
|
||||||
|
'dall-e': { fileStrategy: options.fileStrategy },
|
||||||
};
|
};
|
||||||
|
|
||||||
const toolAuthFields = {};
|
const toolAuthFields = {};
|
||||||
|
|
|
||||||
5
api/cache/keyvRedis.js
vendored
5
api/cache/keyvRedis.js
vendored
|
|
@ -10,10 +10,11 @@ if (REDIS_URI && isEnabled(USE_REDIS)) {
|
||||||
keyvRedis = new KeyvRedis(REDIS_URI, { useRedisSets: false });
|
keyvRedis = new KeyvRedis(REDIS_URI, { useRedisSets: false });
|
||||||
keyvRedis.on('error', (err) => logger.error('KeyvRedis connection error:', err));
|
keyvRedis.on('error', (err) => logger.error('KeyvRedis connection error:', err));
|
||||||
keyvRedis.setMaxListeners(20);
|
keyvRedis.setMaxListeners(20);
|
||||||
} else {
|
|
||||||
logger.info(
|
logger.info(
|
||||||
'`REDIS_URI` not provided, or `USE_REDIS` not set. Redis module will not be initialized.',
|
'[Optional] Redis initialized. Note: Redis support is experimental. If you have issues, disable it. Cache needs to be flushed for values to refresh.',
|
||||||
);
|
);
|
||||||
|
} else {
|
||||||
|
logger.info('[Optional] Redis not initialized. Note: Redis support is experimental.');
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = keyvRedis;
|
module.exports = keyvRedis;
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
dist: path.resolve(__dirname, '..', '..', 'client', 'dist'),
|
||||||
publicPath: path.resolve(__dirname, '..', '..', 'client', 'public'),
|
publicPath: path.resolve(__dirname, '..', '..', 'client', 'public'),
|
||||||
imageOutput: path.resolve(__dirname, '..', '..', 'client', 'public', 'images'),
|
imageOutput: path.resolve(__dirname, '..', '..', 'client', 'public', 'images'),
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -84,6 +84,12 @@ const conversationPreset = {
|
||||||
type: String,
|
type: String,
|
||||||
// default: null,
|
// default: null,
|
||||||
},
|
},
|
||||||
|
resendImages: {
|
||||||
|
type: Boolean,
|
||||||
|
},
|
||||||
|
imageDetail: {
|
||||||
|
type: String,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const agentOptions = {
|
const agentOptions = {
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
const { FileSources } = require('librechat-data-provider');
|
||||||
const mongoose = require('mongoose');
|
const mongoose = require('mongoose');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -12,6 +13,7 @@ const mongoose = require('mongoose');
|
||||||
* @property {'file'} object - Type of object, always 'file'
|
* @property {'file'} object - Type of object, always 'file'
|
||||||
* @property {string} type - Type of file
|
* @property {string} type - Type of file
|
||||||
* @property {number} usage - Number of uses of the file
|
* @property {number} usage - Number of uses of the file
|
||||||
|
* @property {string} [source] - The source of the file
|
||||||
* @property {number} [width] - Optional width of the file
|
* @property {number} [width] - Optional width of the file
|
||||||
* @property {number} [height] - Optional height of the file
|
* @property {number} [height] - Optional height of the file
|
||||||
* @property {Date} [expiresAt] - Optional height of the file
|
* @property {Date} [expiresAt] - Optional height of the file
|
||||||
|
|
@ -42,11 +44,6 @@ const fileSchema = mongoose.Schema(
|
||||||
type: Number,
|
type: Number,
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
usage: {
|
|
||||||
type: Number,
|
|
||||||
required: true,
|
|
||||||
default: 0,
|
|
||||||
},
|
|
||||||
filename: {
|
filename: {
|
||||||
type: String,
|
type: String,
|
||||||
required: true,
|
required: true,
|
||||||
|
|
@ -64,6 +61,15 @@ const fileSchema = mongoose.Schema(
|
||||||
type: String,
|
type: String,
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
|
usage: {
|
||||||
|
type: Number,
|
||||||
|
required: true,
|
||||||
|
default: 0,
|
||||||
|
},
|
||||||
|
source: {
|
||||||
|
type: String,
|
||||||
|
default: FileSources.local,
|
||||||
|
},
|
||||||
width: Number,
|
width: Number,
|
||||||
height: Number,
|
height: Number,
|
||||||
expiresAt: {
|
expiresAt: {
|
||||||
|
|
|
||||||
|
|
@ -5,33 +5,28 @@ const cors = require('cors');
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const passport = require('passport');
|
const passport = require('passport');
|
||||||
const mongoSanitize = require('express-mongo-sanitize');
|
const mongoSanitize = require('express-mongo-sanitize');
|
||||||
const { initializeFirebase } = require('~/server/services/Files/Firebase/initialize');
|
const errorController = require('./controllers/ErrorController');
|
||||||
const loadCustomConfig = require('~/server/services/Config/loadCustomConfig');
|
const { jwtLogin, passportLogin } = require('~/strategies');
|
||||||
const errorController = require('~/server/controllers/ErrorController');
|
const configureSocialLogins = require('./socialLogins');
|
||||||
const configureSocialLogins = require('~/server/socialLogins');
|
|
||||||
const noIndex = require('~/server/middleware/noIndex');
|
|
||||||
const { connectDb, indexSync } = require('~/lib/db');
|
const { connectDb, indexSync } = require('~/lib/db');
|
||||||
|
const AppService = require('./services/AppService');
|
||||||
|
const noIndex = require('./middleware/noIndex');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const routes = require('~/server/routes');
|
const routes = require('./routes');
|
||||||
const paths = require('~/config/paths');
|
|
||||||
|
|
||||||
const { PORT, HOST, ALLOW_SOCIAL_LOGIN } = process.env ?? {};
|
const { PORT, HOST, ALLOW_SOCIAL_LOGIN } = process.env ?? {};
|
||||||
|
|
||||||
const port = Number(PORT) || 3080;
|
const port = Number(PORT) || 3080;
|
||||||
const host = HOST || 'localhost';
|
const host = HOST || 'localhost';
|
||||||
const projectPath = path.join(__dirname, '..', '..', 'client');
|
|
||||||
const { jwtLogin, passportLogin } = require('~/strategies');
|
|
||||||
|
|
||||||
const startServer = async () => {
|
const startServer = async () => {
|
||||||
await connectDb();
|
await connectDb();
|
||||||
logger.info('Connected to MongoDB');
|
logger.info('Connected to MongoDB');
|
||||||
await loadCustomConfig();
|
|
||||||
initializeFirebase();
|
|
||||||
await indexSync();
|
await indexSync();
|
||||||
|
|
||||||
const app = express();
|
const app = express();
|
||||||
app.locals.config = paths;
|
await AppService(app);
|
||||||
|
|
||||||
// Middleware
|
// Middleware
|
||||||
app.use(noIndex);
|
app.use(noIndex);
|
||||||
|
|
@ -39,14 +34,14 @@ const startServer = async () => {
|
||||||
app.use(express.json({ limit: '3mb' }));
|
app.use(express.json({ limit: '3mb' }));
|
||||||
app.use(mongoSanitize());
|
app.use(mongoSanitize());
|
||||||
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
app.use(express.urlencoded({ extended: true, limit: '3mb' }));
|
||||||
app.use(express.static(path.join(projectPath, 'dist')));
|
app.use(express.static(app.locals.paths.dist));
|
||||||
app.use(express.static(path.join(projectPath, 'public')));
|
app.use(express.static(app.locals.paths.publicPath));
|
||||||
app.set('trust proxy', 1); // trust first proxy
|
app.set('trust proxy', 1); // trust first proxy
|
||||||
app.use(cors());
|
app.use(cors());
|
||||||
|
|
||||||
if (!ALLOW_SOCIAL_LOGIN) {
|
if (!ALLOW_SOCIAL_LOGIN) {
|
||||||
console.warn(
|
console.warn(
|
||||||
'Social logins are disabled. Set Envrionment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.',
|
'Social logins are disabled. Set Environment Variable "ALLOW_SOCIAL_LOGIN" to true to enable them.',
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -81,7 +76,7 @@ const startServer = async () => {
|
||||||
app.use('/api/files', routes.files);
|
app.use('/api/files', routes.files);
|
||||||
|
|
||||||
app.use((req, res) => {
|
app.use((req, res) => {
|
||||||
res.status(404).sendFile(path.join(projectPath, 'dist', 'index.html'));
|
res.status(404).sendFile(path.join(app.locals.paths.dist, 'index.html'));
|
||||||
});
|
});
|
||||||
|
|
||||||
app.listen(port, host, () => {
|
app.listen(port, host, () => {
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
const { processFiles } = require('~/server/services/Files');
|
const { parseConvo, EModelEndpoint } = require('librechat-data-provider');
|
||||||
|
const { processFiles } = require('~/server/services/Files/process');
|
||||||
|
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
|
||||||
|
const anthropic = require('~/server/services/Endpoints/anthropic');
|
||||||
const openAI = require('~/server/services/Endpoints/openAI');
|
const openAI = require('~/server/services/Endpoints/openAI');
|
||||||
const custom = require('~/server/services/Endpoints/custom');
|
const custom = require('~/server/services/Endpoints/custom');
|
||||||
const google = require('~/server/services/Endpoints/google');
|
const google = require('~/server/services/Endpoints/google');
|
||||||
const anthropic = require('~/server/services/Endpoints/anthropic');
|
|
||||||
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
|
|
||||||
const { parseConvo, EModelEndpoint } = require('librechat-data-provider');
|
|
||||||
|
|
||||||
const buildFunction = {
|
const buildFunction = {
|
||||||
[EModelEndpoint.openAI]: openAI.buildOptions,
|
[EModelEndpoint.openAI]: openAI.buildOptions,
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const multer = require('multer');
|
const multer = require('multer');
|
||||||
|
|
||||||
const uploadAvatar = require('~/server/services/Files/images/avatar/uploadAvatar');
|
const uploadAvatar = require('~/server/services/Files/images/avatar');
|
||||||
const { requireJwtAuth } = require('~/server/middleware/');
|
const { requireJwtAuth } = require('~/server/middleware/');
|
||||||
const User = require('~/models/User');
|
const User = require('~/models/User');
|
||||||
|
|
||||||
|
|
@ -23,7 +23,12 @@ router.post('/', requireJwtAuth, upload.single('input'), async (req, res) => {
|
||||||
if (!user) {
|
if (!user) {
|
||||||
throw new Error('User not found');
|
throw new Error('User not found');
|
||||||
}
|
}
|
||||||
const url = await uploadAvatar(userId, input, manual);
|
const url = await uploadAvatar({
|
||||||
|
input,
|
||||||
|
userId,
|
||||||
|
manual,
|
||||||
|
fileStrategy: req.app.locals.fileStrategy,
|
||||||
|
});
|
||||||
|
|
||||||
res.json({ url });
|
res.json({ url });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
|
||||||
|
|
@ -1,36 +1,29 @@
|
||||||
const { z } = require('zod');
|
const { z } = require('zod');
|
||||||
const path = require('path');
|
|
||||||
const fs = require('fs').promises;
|
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { deleteFiles } = require('~/models');
|
const { FileSources } = require('librechat-data-provider');
|
||||||
|
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||||
|
const { deleteFiles, getFiles } = require('~/models');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
const isUUID = z.string().uuid();
|
const isUUID = z.string().uuid();
|
||||||
|
|
||||||
const isValidPath = (req, base, subfolder, filepath) => {
|
router.get('/', async (req, res) => {
|
||||||
const normalizedBase = path.resolve(base, subfolder, req.user.id);
|
try {
|
||||||
const normalizedFilepath = path.resolve(filepath);
|
const files = await getFiles({ user: req.user.id });
|
||||||
return normalizedFilepath.startsWith(normalizedBase);
|
res.status(200).send(files);
|
||||||
};
|
} catch (error) {
|
||||||
|
logger.error('[/files] Error getting files:', error);
|
||||||
const deleteFile = async (req, file) => {
|
res.status(400).json({ message: 'Error in request', error: error.message });
|
||||||
const { publicPath } = req.app.locals.config;
|
|
||||||
const parts = file.filepath.split(path.sep);
|
|
||||||
const subfolder = parts[1];
|
|
||||||
const filepath = path.join(publicPath, file.filepath);
|
|
||||||
|
|
||||||
if (!isValidPath(req, publicPath, subfolder, filepath)) {
|
|
||||||
throw new Error('Invalid file path');
|
|
||||||
}
|
}
|
||||||
|
});
|
||||||
await fs.unlink(filepath);
|
|
||||||
};
|
|
||||||
|
|
||||||
router.delete('/', async (req, res) => {
|
router.delete('/', async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { files: _files } = req.body;
|
const { files: _files } = req.body;
|
||||||
|
|
||||||
|
/** @type {MongoFile[]} */
|
||||||
const files = _files.filter((file) => {
|
const files = _files.filter((file) => {
|
||||||
if (!file.file_id) {
|
if (!file.file_id) {
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -47,9 +40,24 @@ router.delete('/', async (req, res) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const file_ids = files.map((file) => file.file_id);
|
const file_ids = files.map((file) => file.file_id);
|
||||||
|
const deletionMethods = {};
|
||||||
const promises = [];
|
const promises = [];
|
||||||
promises.push(await deleteFiles(file_ids));
|
promises.push(await deleteFiles(file_ids));
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
|
const source = file.source ?? FileSources.local;
|
||||||
|
|
||||||
|
if (deletionMethods[source]) {
|
||||||
|
promises.push(deletionMethods[source](req, file));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { deleteFile } = getStrategyFunctions(source);
|
||||||
|
if (!deleteFile) {
|
||||||
|
throw new Error(`Delete function not implemented for ${source}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
deletionMethods[source] = deleteFile;
|
||||||
promises.push(deleteFile(req, file));
|
promises.push(deleteFile(req, file));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ const { z } = require('zod');
|
||||||
const fs = require('fs').promises;
|
const fs = require('fs').promises;
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const upload = require('./multer');
|
const upload = require('./multer');
|
||||||
const { localStrategy } = require('~/server/services/Files');
|
const { processImageUpload } = require('~/server/services/Files/process');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
@ -34,7 +34,8 @@ router.post('/', upload.single('file'), async (req, res) => {
|
||||||
uuidSchema.parse(metadata.file_id);
|
uuidSchema.parse(metadata.file_id);
|
||||||
metadata.temp_file_id = metadata.file_id;
|
metadata.temp_file_id = metadata.file_id;
|
||||||
metadata.file_id = req.file_id;
|
metadata.file_id = req.file_id;
|
||||||
await localStrategy({ req, res, file, metadata });
|
|
||||||
|
await processImageUpload({ req, res, file, metadata });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/files/images] Error processing file:', error);
|
logger.error('[/files/images] Error processing file:', error);
|
||||||
try {
|
try {
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ const {
|
||||||
|
|
||||||
const files = require('./files');
|
const files = require('./files');
|
||||||
const images = require('./images');
|
const images = require('./images');
|
||||||
|
const avatar = require('./avatar');
|
||||||
|
|
||||||
router.use(requireJwtAuth);
|
router.use(requireJwtAuth);
|
||||||
router.use(checkBan);
|
router.use(checkBan);
|
||||||
|
|
@ -18,6 +19,6 @@ router.use(uaParser);
|
||||||
|
|
||||||
router.use('/', files);
|
router.use('/', files);
|
||||||
router.use('/images', images);
|
router.use('/images', images);
|
||||||
router.use('/images/avatar', require('./avatar'));
|
router.use('/images/avatar', avatar);
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = router;
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ const sizeLimit = 20 * 1024 * 1024; // 20 MB
|
||||||
|
|
||||||
const storage = multer.diskStorage({
|
const storage = multer.diskStorage({
|
||||||
destination: function (req, file, cb) {
|
destination: function (req, file, cb) {
|
||||||
const outputPath = path.join(req.app.locals.config.imageOutput, 'temp');
|
const outputPath = path.join(req.app.locals.paths.imageOutput, 'temp');
|
||||||
if (!fs.existsSync(outputPath)) {
|
if (!fs.existsSync(outputPath)) {
|
||||||
fs.mkdirSync(outputPath, { recursive: true });
|
fs.mkdirSync(outputPath, { recursive: true });
|
||||||
}
|
}
|
||||||
|
|
|
||||||
27
api/server/services/AppService.js
Normal file
27
api/server/services/AppService.js
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
const { FileSources } = require('librechat-data-provider');
|
||||||
|
const { initializeFirebase } = require('./Files/Firebase/initialize');
|
||||||
|
const loadCustomConfig = require('./Config/loadCustomConfig');
|
||||||
|
const paths = require('~/config/paths');
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* Loads custom config and initializes app-wide variables.
|
||||||
|
* @function AppService
|
||||||
|
* @param {Express.Application} app - The Express application object.
|
||||||
|
*/
|
||||||
|
const AppService = async (app) => {
|
||||||
|
const config = (await loadCustomConfig()) ?? {};
|
||||||
|
const fileStrategy = config.fileStrategy ?? FileSources.local;
|
||||||
|
process.env.CDN_PROVIDER = fileStrategy;
|
||||||
|
|
||||||
|
if (fileStrategy === FileSources.firebase) {
|
||||||
|
initializeFirebase();
|
||||||
|
}
|
||||||
|
|
||||||
|
app.locals = {
|
||||||
|
fileStrategy,
|
||||||
|
paths,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = AppService;
|
||||||
|
|
@ -1,9 +1,11 @@
|
||||||
const buildOptions = (endpoint, parsedBody) => {
|
const buildOptions = (endpoint, parsedBody) => {
|
||||||
const { chatGptLabel, promptPrefix, ...rest } = parsedBody;
|
const { chatGptLabel, promptPrefix, resendImages, imageDetail, ...rest } = parsedBody;
|
||||||
const endpointOption = {
|
const endpointOption = {
|
||||||
endpoint,
|
endpoint,
|
||||||
chatGptLabel,
|
chatGptLabel,
|
||||||
promptPrefix,
|
promptPrefix,
|
||||||
|
resendImages,
|
||||||
|
imageDetail,
|
||||||
modelOptions: {
|
modelOptions: {
|
||||||
...rest,
|
...rest,
|
||||||
},
|
},
|
||||||
|
|
|
||||||
174
api/server/services/Files/Firebase/crud.js
Normal file
174
api/server/services/Files/Firebase/crud.js
Normal file
|
|
@ -0,0 +1,174 @@
|
||||||
|
const fetch = require('node-fetch');
|
||||||
|
const { ref, uploadBytes, getDownloadURL, deleteObject } = require('firebase/storage');
|
||||||
|
const { getFirebaseStorage } = require('./initialize');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes a file from Firebase Storage.
|
||||||
|
* @param {string} directory - The directory name
|
||||||
|
* @param {string} fileName - The name of the file to delete.
|
||||||
|
* @returns {Promise<void>} A promise that resolves when the file is deleted.
|
||||||
|
*/
|
||||||
|
async function deleteFile(basePath, fileName) {
|
||||||
|
const storage = getFirebaseStorage();
|
||||||
|
if (!storage) {
|
||||||
|
console.error('Firebase is not initialized. Cannot delete file from Firebase Storage.');
|
||||||
|
throw new Error('Firebase is not initialized');
|
||||||
|
}
|
||||||
|
|
||||||
|
const storageRef = ref(storage, `${basePath}/${fileName}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await deleteObject(storageRef);
|
||||||
|
console.log('File deleted successfully from Firebase Storage');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error deleting file from Firebase Storage:', error.message);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves an file from a given URL to Firebase Storage. The function first initializes the Firebase Storage
|
||||||
|
* reference, then uploads the file to a specified basePath in the Firebase Storage. It handles initialization
|
||||||
|
* errors and upload errors, logging them to the console. If the upload is successful, the file name is returned.
|
||||||
|
*
|
||||||
|
* @param {Object} params - The parameters object.
|
||||||
|
* @param {string} params.userId - The user's unique identifier. This is used to create a user-specific basePath
|
||||||
|
* in Firebase Storage.
|
||||||
|
* @param {string} params.URL - The URL of the file to be uploaded. The file at this URL will be fetched
|
||||||
|
* and uploaded to Firebase Storage.
|
||||||
|
* @param {string} params.fileName - The name that will be used to save the file in Firebase Storage. This
|
||||||
|
* should include the file extension.
|
||||||
|
* @param {string} [params.basePath='images'] - Optional. The base basePath in Firebase Storage where the file will
|
||||||
|
* be stored. Defaults to 'images' if not specified.
|
||||||
|
*
|
||||||
|
* @returns {Promise<string|null>}
|
||||||
|
* A promise that resolves to the file name if the file is successfully uploaded, or null if there
|
||||||
|
* is an error in initialization or upload.
|
||||||
|
*/
|
||||||
|
async function saveURLToFirebase({ userId, URL, fileName, basePath = 'images' }) {
|
||||||
|
const storage = getFirebaseStorage();
|
||||||
|
if (!storage) {
|
||||||
|
console.error('Firebase is not initialized. Cannot save file to Firebase Storage.');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const storageRef = ref(storage, `${basePath}/${userId.toString()}/${fileName}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await uploadBytes(storageRef, await fetch(URL).then((response) => response.buffer()));
|
||||||
|
return fileName;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error uploading file to Firebase Storage:', error.message);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves the download URL for a specified file from Firebase Storage. This function initializes the
|
||||||
|
* Firebase Storage and generates a reference to the file based on the provided basePath and file name. If
|
||||||
|
* Firebase Storage is not initialized or if there is an error in fetching the URL, the error is logged
|
||||||
|
* to the console.
|
||||||
|
*
|
||||||
|
* @param {Object} params - The parameters object.
|
||||||
|
* @param {string} params.fileName - The name of the file for which the URL is to be retrieved. This should
|
||||||
|
* include the file extension.
|
||||||
|
* @param {string} [params.basePath='images'] - Optional. The base basePath in Firebase Storage where the file is
|
||||||
|
* stored. Defaults to 'images' if not specified.
|
||||||
|
*
|
||||||
|
* @returns {Promise<string|null>}
|
||||||
|
* A promise that resolves to the download URL of the file if successful, or null if there is an
|
||||||
|
* error in initialization or fetching the URL.
|
||||||
|
*/
|
||||||
|
async function getFirebaseURL({ fileName, basePath = 'images' }) {
|
||||||
|
const storage = getFirebaseStorage();
|
||||||
|
if (!storage) {
|
||||||
|
console.error('Firebase is not initialized. Cannot get image URL from Firebase Storage.');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const storageRef = ref(storage, `${basePath}/${fileName}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await getDownloadURL(storageRef);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching file URL from Firebase Storage:', error.message);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads a buffer to Firebase Storage.
|
||||||
|
*
|
||||||
|
* @param {Object} params - The parameters object.
|
||||||
|
* @param {string} params.userId - The user's unique identifier. This is used to create a user-specific basePath
|
||||||
|
* in Firebase Storage.
|
||||||
|
* @param {string} params.fileName - The name of the file to be saved in Firebase Storage.
|
||||||
|
* @param {string} params.buffer - The buffer to be uploaded.
|
||||||
|
* @param {string} [params.basePath='images'] - Optional. The base basePath in Firebase Storage where the file will
|
||||||
|
* be stored. Defaults to 'images' if not specified.
|
||||||
|
*
|
||||||
|
* @returns {Promise<string>} - A promise that resolves to the download URL of the uploaded file.
|
||||||
|
*/
|
||||||
|
async function saveBufferToFirebase({ userId, buffer, fileName, basePath = 'images' }) {
|
||||||
|
const storage = getFirebaseStorage();
|
||||||
|
if (!storage) {
|
||||||
|
throw new Error('Firebase is not initialized');
|
||||||
|
}
|
||||||
|
|
||||||
|
const storageRef = ref(storage, `${basePath}/${userId}/${fileName}`);
|
||||||
|
await uploadBytes(storageRef, buffer);
|
||||||
|
|
||||||
|
// Assuming you have a function to get the download URL
|
||||||
|
return await getFirebaseURL({ fileName, basePath: `${basePath}/${userId}` });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts and decodes the file path from a Firebase Storage URL.
|
||||||
|
*
|
||||||
|
* @param {string} urlString - The Firebase Storage URL.
|
||||||
|
* @returns {string} The decoded file path.
|
||||||
|
*/
|
||||||
|
function extractFirebaseFilePath(urlString) {
|
||||||
|
try {
|
||||||
|
const url = new URL(urlString);
|
||||||
|
const pathRegex = /\/o\/(.+?)(\?|$)/;
|
||||||
|
const match = url.pathname.match(pathRegex);
|
||||||
|
|
||||||
|
if (match && match[1]) {
|
||||||
|
return decodeURIComponent(match[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return '';
|
||||||
|
} catch (error) {
|
||||||
|
// If URL parsing fails, return an empty string
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes a file from Firebase storage. This function determines the filepath from the
|
||||||
|
* Firebase storage URL via regex for deletion. Validated by the user's ID.
|
||||||
|
*
|
||||||
|
* @param {Express.Request} req - The request object from Express.
|
||||||
|
* It should contain a `user` object with an `id` property.
|
||||||
|
* @param {MongoFile} file - The file object to be deleted.
|
||||||
|
*
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
* A promise that resolves when the file has been successfully deleted from Firebase storage.
|
||||||
|
* Throws an error if there is an issue with deletion.
|
||||||
|
*/
|
||||||
|
const deleteFirebaseFile = async (req, file) => {
|
||||||
|
const fileName = extractFirebaseFilePath(file.filepath);
|
||||||
|
if (!fileName.includes(req.user.id)) {
|
||||||
|
throw new Error('Invalid file path');
|
||||||
|
}
|
||||||
|
await deleteFile('', fileName);
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
deleteFile,
|
||||||
|
getFirebaseURL,
|
||||||
|
saveURLToFirebase,
|
||||||
|
deleteFirebaseFile,
|
||||||
|
saveBufferToFirebase,
|
||||||
|
};
|
||||||
|
|
@ -1,45 +1,105 @@
|
||||||
const fetch = require('node-fetch');
|
const fs = require('fs');
|
||||||
const { ref, uploadBytes, getDownloadURL } = require('firebase/storage');
|
const path = require('path');
|
||||||
const { getFirebaseStorage } = require('./initialize');
|
const sharp = require('sharp');
|
||||||
|
const { resizeImage } = require('../images/resize');
|
||||||
|
const { saveBufferToFirebase } = require('./crud');
|
||||||
|
const { updateFile } = require('~/models/File');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
async function saveImageToFirebaseStorage(userId, imageUrl, imageName) {
|
/**
|
||||||
const storage = getFirebaseStorage();
|
* Converts an image file to the WebP format. The function first resizes the image based on the specified
|
||||||
if (!storage) {
|
* resolution.
|
||||||
console.error('Firebase is not initialized. Cannot save image to Firebase Storage.');
|
*
|
||||||
return null;
|
*
|
||||||
|
* @param {Object} req - The request object from Express. It should have a `user` property with an `id`
|
||||||
|
* representing the user, and an `app.locals.paths` object with an `imageOutput` path.
|
||||||
|
* @param {Express.Multer.File} file - The file object, which is part of the request. The file object should
|
||||||
|
* have a `path` property that points to the location of the uploaded file.
|
||||||
|
* @param {string} [resolution='high'] - Optional. The desired resolution for the image resizing. Default is 'high'.
|
||||||
|
*
|
||||||
|
* @returns {Promise<{ filepath: string, bytes: number, width: number, height: number}>}
|
||||||
|
* A promise that resolves to an object containing:
|
||||||
|
* - filepath: The path where the converted WebP image is saved.
|
||||||
|
* - bytes: The size of the converted image in bytes.
|
||||||
|
* - width: The width of the converted image.
|
||||||
|
* - height: The height of the converted image.
|
||||||
|
*/
|
||||||
|
async function uploadImageToFirebase(req, file, resolution = 'high') {
|
||||||
|
const inputFilePath = file.path;
|
||||||
|
const { buffer: resizedBuffer, width, height } = await resizeImage(inputFilePath, resolution);
|
||||||
|
const extension = path.extname(inputFilePath);
|
||||||
|
const userId = req.user.id;
|
||||||
|
|
||||||
|
let webPBuffer;
|
||||||
|
let fileName = path.basename(inputFilePath);
|
||||||
|
if (extension.toLowerCase() === '.webp') {
|
||||||
|
webPBuffer = resizedBuffer;
|
||||||
|
} else {
|
||||||
|
webPBuffer = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||||
|
// Replace or append the correct extension
|
||||||
|
const extRegExp = new RegExp(path.extname(fileName) + '$');
|
||||||
|
fileName = fileName.replace(extRegExp, '.webp');
|
||||||
|
if (!path.extname(fileName)) {
|
||||||
|
fileName += '.webp';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const storageRef = ref(storage, `images/${userId.toString()}/${imageName}`);
|
const downloadURL = await saveBufferToFirebase({ userId, buffer: webPBuffer, fileName });
|
||||||
|
|
||||||
|
await fs.promises.unlink(inputFilePath);
|
||||||
|
|
||||||
|
const bytes = Buffer.byteLength(webPBuffer);
|
||||||
|
return { filepath: downloadURL, bytes, width, height };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Local: Updates the file and returns the URL in expected order/format
|
||||||
|
* for image payload handling: tuple order of [filepath, URL].
|
||||||
|
* @param {Object} req - The request object.
|
||||||
|
* @param {MongoFile} file - The file object.
|
||||||
|
* @returns {Promise<[MongoFile, string]>} - A promise that resolves to an array of results from updateFile and encodeImage.
|
||||||
|
*/
|
||||||
|
async function prepareImageURL(req, file) {
|
||||||
|
const { filepath } = file;
|
||||||
|
const promises = [];
|
||||||
|
promises.push(updateFile({ file_id: file.file_id }));
|
||||||
|
promises.push(filepath);
|
||||||
|
return await Promise.all(promises);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads a user's avatar to Firebase Storage and returns the URL.
|
||||||
|
* If the 'manual' flag is set to 'true', it also updates the user's avatar URL in the database.
|
||||||
|
*
|
||||||
|
* @param {object} params - The parameters object.
|
||||||
|
* @param {Buffer} params.buffer - The Buffer containing the avatar image in WebP format.
|
||||||
|
* @param {object} params.User - The User document (mongoose); TODO: remove direct use of Model, `User`
|
||||||
|
* @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false').
|
||||||
|
* @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar.
|
||||||
|
* @throws {Error} - Throws an error if Firebase is not initialized or if there is an error in uploading.
|
||||||
|
*/
|
||||||
|
async function processFirebaseAvatar({ buffer, User, manual }) {
|
||||||
try {
|
try {
|
||||||
// Upload image to Firebase Storage using the image URL
|
const downloadURL = await saveBufferToFirebase({
|
||||||
await uploadBytes(storageRef, await fetch(imageUrl).then((response) => response.buffer()));
|
userId: User._id.toString(),
|
||||||
return imageName;
|
buffer,
|
||||||
|
fileName: 'avatar.png',
|
||||||
|
});
|
||||||
|
|
||||||
|
const isManual = manual === 'true';
|
||||||
|
|
||||||
|
const url = `${downloadURL}?manual=${isManual}`;
|
||||||
|
|
||||||
|
if (isManual) {
|
||||||
|
User.avatar = url;
|
||||||
|
await User.save();
|
||||||
|
}
|
||||||
|
|
||||||
|
return url;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error uploading image to Firebase Storage:', error.message);
|
logger.error('Error uploading profile picture:', error);
|
||||||
return null;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getFirebaseStorageImageUrl(imageName) {
|
module.exports = { uploadImageToFirebase, prepareImageURL, processFirebaseAvatar };
|
||||||
const storage = getFirebaseStorage();
|
|
||||||
if (!storage) {
|
|
||||||
console.error('Firebase is not initialized. Cannot get image URL from Firebase Storage.');
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const storageRef = ref(storage, `images/${imageName}`);
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Get the download URL for the image from Firebase Storage
|
|
||||||
return `})`;
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error fetching image URL from Firebase Storage:', error.message);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
saveImageToFirebaseStorage,
|
|
||||||
getFirebaseStorageImageUrl,
|
|
||||||
};
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,9 @@
|
||||||
|
const crud = require('./crud');
|
||||||
const images = require('./images');
|
const images = require('./images');
|
||||||
const initialize = require('./initialize');
|
const initialize = require('./initialize');
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
...crud,
|
||||||
...images,
|
...images,
|
||||||
...initialize,
|
...initialize,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
174
api/server/services/Files/Local/crud.js
Normal file
174
api/server/services/Files/Local/crud.js
Normal file
|
|
@ -0,0 +1,174 @@
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const axios = require('axios');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
const paths = require('~/config/paths');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves a file to a specified output path with a new filename.
|
||||||
|
*
|
||||||
|
* @param {Express.Multer.File} file - The file object to be saved. Should contain properties like 'originalname' and 'path'.
|
||||||
|
* @param {string} outputPath - The path where the file should be saved.
|
||||||
|
* @param {string} outputFilename - The new filename for the saved file (without extension).
|
||||||
|
* @returns {Promise<string>} The full path of the saved file.
|
||||||
|
* @throws Will throw an error if the file saving process fails.
|
||||||
|
*/
|
||||||
|
async function saveFile(file, outputPath, outputFilename) {
|
||||||
|
try {
|
||||||
|
if (!fs.existsSync(outputPath)) {
|
||||||
|
fs.mkdirSync(outputPath, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileExtension = path.extname(file.originalname);
|
||||||
|
const filenameWithExt = outputFilename + fileExtension;
|
||||||
|
const outputFilePath = path.join(outputPath, filenameWithExt);
|
||||||
|
fs.copyFileSync(file.path, outputFilePath);
|
||||||
|
fs.unlinkSync(file.path);
|
||||||
|
|
||||||
|
return outputFilePath;
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[saveFile] Error while saving the file:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves an uploaded image file to a specified directory based on the user's ID and a filename.
|
||||||
|
*
|
||||||
|
* @param {Express.Request} req - The Express request object, containing the user's information and app configuration.
|
||||||
|
* @param {Express.Multer.File} file - The uploaded file object.
|
||||||
|
* @param {string} filename - The new filename to assign to the saved image (without extension).
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
* @throws Will throw an error if the image saving process fails.
|
||||||
|
*/
|
||||||
|
const saveLocalImage = async (req, file, filename) => {
|
||||||
|
const imagePath = req.app.locals.paths.imageOutput;
|
||||||
|
const outputPath = path.join(imagePath, req.user.id ?? '');
|
||||||
|
await saveFile(file, outputPath, filename);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Saves a file from a given URL to a local directory. The function fetches the file using the provided URL,
|
||||||
|
* determines the content type, and saves it to a specified local directory with the correct file extension.
|
||||||
|
* If the specified directory does not exist, it is created. The function returns the name of the saved file
|
||||||
|
* or null in case of an error.
|
||||||
|
*
|
||||||
|
* @param {Object} params - The parameters object.
|
||||||
|
* @param {string} params.userId - The user's unique identifier. This is used to create a user-specific path
|
||||||
|
* in the local file system.
|
||||||
|
* @param {string} params.URL - The URL of the file to be downloaded and saved.
|
||||||
|
* @param {string} params.fileName - The desired file name for the saved file. This may be modified to include
|
||||||
|
* the correct file extension based on the content type.
|
||||||
|
* @param {string} [params.basePath='images'] - Optional. The base directory where the file will be saved.
|
||||||
|
* Defaults to 'images' if not specified.
|
||||||
|
*
|
||||||
|
* @returns {Promise<string|null>}
|
||||||
|
* A promise that resolves to the file name if the file is successfully saved, or null if there is an error.
|
||||||
|
*/
|
||||||
|
async function saveFileFromURL({ userId, URL, fileName, basePath = 'images' }) {
|
||||||
|
try {
|
||||||
|
// Fetch the file from the URL
|
||||||
|
const response = await axios({
|
||||||
|
url: URL,
|
||||||
|
responseType: 'stream',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get the content type from the response headers
|
||||||
|
const contentType = response.headers['content-type'];
|
||||||
|
let extension = contentType.split('/').pop();
|
||||||
|
|
||||||
|
// Construct the outputPath based on the basePath and userId
|
||||||
|
const outputPath = path.join(paths.publicPath, basePath, userId.toString());
|
||||||
|
|
||||||
|
// Check if the output directory exists, if not, create it
|
||||||
|
if (!fs.existsSync(outputPath)) {
|
||||||
|
fs.mkdirSync(outputPath, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace or append the correct extension
|
||||||
|
const extRegExp = new RegExp(path.extname(fileName) + '$');
|
||||||
|
fileName = fileName.replace(extRegExp, `.${extension}`);
|
||||||
|
if (!path.extname(fileName)) {
|
||||||
|
fileName += `.${extension}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a writable stream for the output path
|
||||||
|
const outputFilePath = path.join(outputPath, fileName);
|
||||||
|
const writer = fs.createWriteStream(outputFilePath);
|
||||||
|
|
||||||
|
// Pipe the response data to the output file
|
||||||
|
response.data.pipe(writer);
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
writer.on('finish', () => resolve(fileName));
|
||||||
|
writer.on('error', reject);
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[saveFileFromURL] Error while saving the file:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constructs a local file path for a given file name and base path. This function simply joins the base
|
||||||
|
* path and the file name to create a file path. It does not check for the existence of the file at the path.
|
||||||
|
*
|
||||||
|
* @param {Object} params - The parameters object.
|
||||||
|
* @param {string} params.fileName - The name of the file for which the path is to be constructed. This should
|
||||||
|
* include the file extension.
|
||||||
|
* @param {string} [params.basePath='images'] - Optional. The base directory to be used for constructing the file path.
|
||||||
|
* Defaults to 'images' if not specified.
|
||||||
|
*
|
||||||
|
* @returns {string}
|
||||||
|
* The constructed local file path.
|
||||||
|
*/
|
||||||
|
async function getLocalFileURL({ fileName, basePath = 'images' }) {
|
||||||
|
return path.posix.join('/', basePath, fileName);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates if a given filepath is within a specified subdirectory under a base path. This function constructs
|
||||||
|
* the expected base path using the base, subfolder, and user id from the request, and then checks if the
|
||||||
|
* provided filepath starts with this constructed base path.
|
||||||
|
*
|
||||||
|
* @param {Express.Request} req - The request object from Express. It should contain a `user` property with an `id`.
|
||||||
|
* @param {string} base - The base directory path.
|
||||||
|
* @param {string} subfolder - The subdirectory under the base path.
|
||||||
|
* @param {string} filepath - The complete file path to be validated.
|
||||||
|
*
|
||||||
|
* @returns {boolean}
|
||||||
|
* Returns true if the filepath is within the specified base and subfolder, false otherwise.
|
||||||
|
*/
|
||||||
|
const isValidPath = (req, base, subfolder, filepath) => {
|
||||||
|
const normalizedBase = path.resolve(base, subfolder, req.user.id);
|
||||||
|
const normalizedFilepath = path.resolve(filepath);
|
||||||
|
return normalizedFilepath.startsWith(normalizedBase);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes a file from the filesystem. This function takes a file object, constructs the full path, and
|
||||||
|
* verifies the path's validity before deleting the file. If the path is invalid, an error is thrown.
|
||||||
|
*
|
||||||
|
* @param {Express.Request} req - The request object from Express. It should have an `app.locals.paths` object with
|
||||||
|
* a `publicPath` property.
|
||||||
|
* @param {MongoFile} file - The file object to be deleted. It should have a `filepath` property that is
|
||||||
|
* a string representing the path of the file relative to the publicPath.
|
||||||
|
*
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
* A promise that resolves when the file has been successfully deleted, or throws an error if the
|
||||||
|
* file path is invalid or if there is an error in deletion.
|
||||||
|
*/
|
||||||
|
const deleteLocalFile = async (req, file) => {
|
||||||
|
const { publicPath } = req.app.locals.paths;
|
||||||
|
const parts = file.filepath.split(path.sep);
|
||||||
|
const subfolder = parts[1];
|
||||||
|
const filepath = path.join(publicPath, file.filepath);
|
||||||
|
|
||||||
|
if (!isValidPath(req, publicPath, subfolder, filepath)) {
|
||||||
|
throw new Error('Invalid file path');
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.promises.unlink(filepath);
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = { saveFile, saveLocalImage, saveFileFromURL, getLocalFileURL, deleteLocalFile };
|
||||||
140
api/server/services/Files/Local/images.js
Normal file
140
api/server/services/Files/Local/images.js
Normal file
|
|
@ -0,0 +1,140 @@
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const sharp = require('sharp');
|
||||||
|
const { resizeImage } = require('../images/resize');
|
||||||
|
const { updateFile } = require('~/models/File');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts an image file to the WebP format. The function first resizes the image based on the specified
|
||||||
|
* resolution.
|
||||||
|
*
|
||||||
|
* If the original image is already in WebP format, it writes the resized image back. Otherwise,
|
||||||
|
* it converts the image to WebP format before saving.
|
||||||
|
*
|
||||||
|
* The original image is deleted after conversion.
|
||||||
|
*
|
||||||
|
* @param {Object} req - The request object from Express. It should have a `user` property with an `id`
|
||||||
|
* representing the user, and an `app.locals.paths` object with an `imageOutput` path.
|
||||||
|
* @param {Express.Multer.File} file - The file object, which is part of the request. The file object should
|
||||||
|
* have a `path` property that points to the location of the uploaded file.
|
||||||
|
* @param {string} [resolution='high'] - Optional. The desired resolution for the image resizing. Default is 'high'.
|
||||||
|
*
|
||||||
|
* @returns {Promise<{ filepath: string, bytes: number, width: number, height: number}>}
|
||||||
|
* A promise that resolves to an object containing:
|
||||||
|
* - filepath: The path where the converted WebP image is saved.
|
||||||
|
* - bytes: The size of the converted image in bytes.
|
||||||
|
* - width: The width of the converted image.
|
||||||
|
* - height: The height of the converted image.
|
||||||
|
*/
|
||||||
|
async function uploadLocalImage(req, file, resolution = 'high') {
|
||||||
|
const inputFilePath = file.path;
|
||||||
|
const { buffer: resizedBuffer, width, height } = await resizeImage(inputFilePath, resolution);
|
||||||
|
const extension = path.extname(inputFilePath);
|
||||||
|
|
||||||
|
const { imageOutput } = req.app.locals.paths;
|
||||||
|
const userPath = path.join(imageOutput, req.user.id);
|
||||||
|
|
||||||
|
if (!fs.existsSync(userPath)) {
|
||||||
|
fs.mkdirSync(userPath, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
const newPath = path.join(userPath, path.basename(inputFilePath));
|
||||||
|
|
||||||
|
if (extension.toLowerCase() === '.webp') {
|
||||||
|
const bytes = Buffer.byteLength(resizedBuffer);
|
||||||
|
await fs.promises.writeFile(newPath, resizedBuffer);
|
||||||
|
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(newPath));
|
||||||
|
return { filepath, bytes, width, height };
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputFilePath = newPath.replace(extension, '.webp');
|
||||||
|
const data = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
||||||
|
await fs.promises.writeFile(outputFilePath, data);
|
||||||
|
const bytes = Buffer.byteLength(data);
|
||||||
|
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(outputFilePath));
|
||||||
|
await fs.promises.unlink(inputFilePath);
|
||||||
|
return { filepath, bytes, width, height };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encodes an image file to base64.
|
||||||
|
* @param {string} imagePath - The path to the image file.
|
||||||
|
* @returns {Promise<string>} A promise that resolves with the base64 encoded image data.
|
||||||
|
*/
|
||||||
|
function encodeImage(imagePath) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
fs.readFile(imagePath, (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
resolve(data.toString('base64'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Local: Updates the file and encodes the image to base64,
|
||||||
|
* for image payload handling: tuple order of [filepath, base64].
|
||||||
|
* @param {Object} req - The request object.
|
||||||
|
* @param {MongoFile} file - The file object.
|
||||||
|
* @returns {Promise<[MongoFile, string]>} - A promise that resolves to an array of results from updateFile and encodeImage.
|
||||||
|
*/
|
||||||
|
async function prepareImagesLocal(req, file) {
|
||||||
|
const { publicPath, imageOutput } = req.app.locals.paths;
|
||||||
|
const userPath = path.join(imageOutput, req.user.id);
|
||||||
|
|
||||||
|
if (!fs.existsSync(userPath)) {
|
||||||
|
fs.mkdirSync(userPath, { recursive: true });
|
||||||
|
}
|
||||||
|
const filepath = path.join(publicPath, file.filepath);
|
||||||
|
|
||||||
|
const promises = [];
|
||||||
|
promises.push(updateFile({ file_id: file.file_id }));
|
||||||
|
promises.push(encodeImage(filepath));
|
||||||
|
return await Promise.all(promises);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads a user's avatar to Firebase Storage and returns the URL.
|
||||||
|
* If the 'manual' flag is set to 'true', it also updates the user's avatar URL in the database.
|
||||||
|
*
|
||||||
|
* @param {object} params - The parameters object.
|
||||||
|
* @param {Buffer} params.buffer - The Buffer containing the avatar image in WebP format.
|
||||||
|
* @param {object} params.User - The User document (mongoose); TODO: remove direct use of Model, `User`
|
||||||
|
* @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false').
|
||||||
|
* @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar.
|
||||||
|
* @throws {Error} - Throws an error if Firebase is not initialized or if there is an error in uploading.
|
||||||
|
*/
|
||||||
|
async function processLocalAvatar({ buffer, User, manual }) {
|
||||||
|
const userDir = path.resolve(
|
||||||
|
__dirname,
|
||||||
|
'..',
|
||||||
|
'..',
|
||||||
|
'..',
|
||||||
|
'..',
|
||||||
|
'..',
|
||||||
|
'client',
|
||||||
|
'public',
|
||||||
|
'images',
|
||||||
|
User._id.toString(),
|
||||||
|
);
|
||||||
|
const fileName = `avatar-${new Date().getTime()}.png`;
|
||||||
|
const urlRoute = `/images/${User._id.toString()}/${fileName}`;
|
||||||
|
const avatarPath = path.join(userDir, fileName);
|
||||||
|
|
||||||
|
await fs.promises.mkdir(userDir, { recursive: true });
|
||||||
|
await fs.promises.writeFile(avatarPath, buffer);
|
||||||
|
|
||||||
|
const isManual = manual === 'true';
|
||||||
|
let url = `${urlRoute}?manual=${isManual}`;
|
||||||
|
|
||||||
|
if (isManual) {
|
||||||
|
User.avatar = url;
|
||||||
|
await User.save();
|
||||||
|
}
|
||||||
|
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { uploadLocalImage, encodeImage, prepareImagesLocal, processLocalAvatar };
|
||||||
7
api/server/services/Files/Local/index.js
Normal file
7
api/server/services/Files/Local/index.js
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
const images = require('./images');
|
||||||
|
const crud = require('./crud');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
...crud,
|
||||||
|
...images,
|
||||||
|
};
|
||||||
78
api/server/services/Files/images/avatar.js
Normal file
78
api/server/services/Files/images/avatar.js
Normal file
|
|
@ -0,0 +1,78 @@
|
||||||
|
const sharp = require('sharp');
|
||||||
|
const fs = require('fs').promises;
|
||||||
|
const fetch = require('node-fetch');
|
||||||
|
const User = require('~/models/User');
|
||||||
|
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
async function convertToWebP(inputBuffer) {
|
||||||
|
return sharp(inputBuffer).resize({ width: 150 }).toFormat('webp').toBuffer();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads an avatar image for a user. This function can handle various types of input (URL, Buffer, or File object),
|
||||||
|
* processes the image to a square format, converts it to WebP format, and then uses a specified file strategy for
|
||||||
|
* further processing. It performs validation on the user ID and the input type. The function can throw errors for
|
||||||
|
* invalid input types, fetching issues, or other processing errors.
|
||||||
|
*
|
||||||
|
* @param {Object} params - The parameters object.
|
||||||
|
* @param {string} params.userId - The unique identifier of the user for whom the avatar is being uploaded.
|
||||||
|
* @param {FileSources} params.fileStrategy - The file handling strategy to use, determining how the avatar is processed.
|
||||||
|
* @param {(string|Buffer|File)} params.input - The input representing the avatar image. Can be a URL (string),
|
||||||
|
* a Buffer, or a File object.
|
||||||
|
* @param {string} params.manual - A string flag indicating whether the upload process is manual.
|
||||||
|
*
|
||||||
|
* @returns {Promise<any>}
|
||||||
|
* A promise that resolves to the result of the `processAvatar` function, specific to the chosen file
|
||||||
|
* strategy. Throws an error if any step in the process fails.
|
||||||
|
*
|
||||||
|
* @throws {Error} Throws an error if the user ID is undefined, the input type is invalid, the image fetching fails,
|
||||||
|
* or any other error occurs during the processing.
|
||||||
|
*/
|
||||||
|
async function uploadAvatar({ userId, fileStrategy, input, manual }) {
|
||||||
|
try {
|
||||||
|
if (userId === undefined) {
|
||||||
|
throw new Error('User ID is undefined');
|
||||||
|
}
|
||||||
|
const _id = userId;
|
||||||
|
// TODO: remove direct use of Model, `User`
|
||||||
|
const oldUser = await User.findOne({ _id });
|
||||||
|
|
||||||
|
let imageBuffer;
|
||||||
|
if (typeof input === 'string') {
|
||||||
|
const response = await fetch(input);
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to fetch image from URL. Status: ${response.status}`);
|
||||||
|
}
|
||||||
|
imageBuffer = await response.buffer();
|
||||||
|
} else if (input instanceof Buffer) {
|
||||||
|
imageBuffer = input;
|
||||||
|
} else if (typeof input === 'object' && input instanceof File) {
|
||||||
|
const fileContent = await fs.readFile(input.path);
|
||||||
|
imageBuffer = Buffer.from(fileContent);
|
||||||
|
} else {
|
||||||
|
throw new Error('Invalid input type. Expected URL, Buffer, or File.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const { width, height } = await sharp(imageBuffer).metadata();
|
||||||
|
const minSize = Math.min(width, height);
|
||||||
|
const squaredBuffer = await sharp(imageBuffer)
|
||||||
|
.extract({
|
||||||
|
left: Math.floor((width - minSize) / 2),
|
||||||
|
top: Math.floor((height - minSize) / 2),
|
||||||
|
width: minSize,
|
||||||
|
height: minSize,
|
||||||
|
})
|
||||||
|
.toBuffer();
|
||||||
|
|
||||||
|
const webPBuffer = await convertToWebP(squaredBuffer);
|
||||||
|
const { processAvatar } = getStrategyFunctions(fileStrategy);
|
||||||
|
return await processAvatar({ buffer: webPBuffer, User: oldUser, manual });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error uploading the avatar:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = uploadAvatar;
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
const { ref, uploadBytes, getDownloadURL } = require('firebase/storage');
|
|
||||||
const { getFirebaseStorage } = require('~/server/services/Files/Firebase/initialize');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
async function firebaseStrategy(userId, webPBuffer, oldUser, manual) {
|
|
||||||
try {
|
|
||||||
const storage = getFirebaseStorage();
|
|
||||||
if (!storage) {
|
|
||||||
throw new Error('Firebase is not initialized.');
|
|
||||||
}
|
|
||||||
const avatarRef = ref(storage, `images/${userId.toString()}/avatar`);
|
|
||||||
|
|
||||||
await uploadBytes(avatarRef, webPBuffer);
|
|
||||||
const urlFirebase = await getDownloadURL(avatarRef);
|
|
||||||
const isManual = manual === 'true';
|
|
||||||
|
|
||||||
const url = `${urlFirebase}?manual=${isManual}`;
|
|
||||||
if (isManual) {
|
|
||||||
oldUser.avatar = url;
|
|
||||||
await oldUser.save();
|
|
||||||
}
|
|
||||||
return url;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error uploading profile picture:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = firebaseStrategy;
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
const fs = require('fs').promises;
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
async function localStrategy(userId, webPBuffer, oldUser, manual) {
|
|
||||||
const userDir = path.resolve(
|
|
||||||
__dirname,
|
|
||||||
'..',
|
|
||||||
'..',
|
|
||||||
'..',
|
|
||||||
'..',
|
|
||||||
'..',
|
|
||||||
'..',
|
|
||||||
'client',
|
|
||||||
'public',
|
|
||||||
'images',
|
|
||||||
userId,
|
|
||||||
);
|
|
||||||
let avatarPath = path.join(userDir, 'avatar.png');
|
|
||||||
const urlRoute = `/images/${userId}/avatar.png`;
|
|
||||||
await fs.mkdir(userDir, { recursive: true });
|
|
||||||
await fs.writeFile(avatarPath, webPBuffer);
|
|
||||||
const isManual = manual === 'true';
|
|
||||||
let url = `${urlRoute}?manual=${isManual}×tamp=${new Date().getTime()}`;
|
|
||||||
if (isManual) {
|
|
||||||
oldUser.avatar = url;
|
|
||||||
await oldUser.save();
|
|
||||||
}
|
|
||||||
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = localStrategy;
|
|
||||||
|
|
@ -1,63 +0,0 @@
|
||||||
const sharp = require('sharp');
|
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const fs = require('fs').promises;
|
|
||||||
const User = require('~/models/User');
|
|
||||||
const { getFirebaseStorage } = require('~/server/services/Files/Firebase/initialize');
|
|
||||||
const firebaseStrategy = require('./firebaseStrategy');
|
|
||||||
const localStrategy = require('./localStrategy');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
async function convertToWebP(inputBuffer) {
|
|
||||||
return sharp(inputBuffer).resize({ width: 150 }).toFormat('webp').toBuffer();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function uploadAvatar(userId, input, manual) {
|
|
||||||
try {
|
|
||||||
if (userId === undefined) {
|
|
||||||
throw new Error('User ID is undefined');
|
|
||||||
}
|
|
||||||
const _id = userId;
|
|
||||||
// TODO: remove direct use of Model, `User`
|
|
||||||
const oldUser = await User.findOne({ _id });
|
|
||||||
let imageBuffer;
|
|
||||||
if (typeof input === 'string') {
|
|
||||||
const response = await fetch(input);
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Failed to fetch image from URL. Status: ${response.status}`);
|
|
||||||
}
|
|
||||||
imageBuffer = await response.buffer();
|
|
||||||
} else if (input instanceof Buffer) {
|
|
||||||
imageBuffer = input;
|
|
||||||
} else if (typeof input === 'object' && input instanceof File) {
|
|
||||||
const fileContent = await fs.readFile(input.path);
|
|
||||||
imageBuffer = Buffer.from(fileContent);
|
|
||||||
} else {
|
|
||||||
throw new Error('Invalid input type. Expected URL, Buffer, or File.');
|
|
||||||
}
|
|
||||||
const { width, height } = await sharp(imageBuffer).metadata();
|
|
||||||
const minSize = Math.min(width, height);
|
|
||||||
const squaredBuffer = await sharp(imageBuffer)
|
|
||||||
.extract({
|
|
||||||
left: Math.floor((width - minSize) / 2),
|
|
||||||
top: Math.floor((height - minSize) / 2),
|
|
||||||
width: minSize,
|
|
||||||
height: minSize,
|
|
||||||
})
|
|
||||||
.toBuffer();
|
|
||||||
const webPBuffer = await convertToWebP(squaredBuffer);
|
|
||||||
const storage = getFirebaseStorage();
|
|
||||||
if (storage) {
|
|
||||||
const url = await firebaseStrategy(userId, webPBuffer, oldUser, manual);
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = await localStrategy(userId, webPBuffer, oldUser, manual);
|
|
||||||
return url;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error uploading the avatar:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = uploadAvatar;
|
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
const path = require('path');
|
|
||||||
const sharp = require('sharp');
|
|
||||||
const fs = require('fs');
|
|
||||||
const { resizeImage } = require('./resize');
|
|
||||||
|
|
||||||
async function convertToWebP(req, file, resolution = 'high') {
|
|
||||||
const inputFilePath = file.path;
|
|
||||||
const { buffer: resizedBuffer, width, height } = await resizeImage(inputFilePath, resolution);
|
|
||||||
const extension = path.extname(inputFilePath);
|
|
||||||
|
|
||||||
const { imageOutput } = req.app.locals.config;
|
|
||||||
const userPath = path.join(imageOutput, req.user.id);
|
|
||||||
|
|
||||||
if (!fs.existsSync(userPath)) {
|
|
||||||
fs.mkdirSync(userPath, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
const newPath = path.join(userPath, path.basename(inputFilePath));
|
|
||||||
|
|
||||||
if (extension.toLowerCase() === '.webp') {
|
|
||||||
const bytes = Buffer.byteLength(resizedBuffer);
|
|
||||||
await fs.promises.writeFile(newPath, resizedBuffer);
|
|
||||||
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(newPath));
|
|
||||||
return { filepath, bytes, width, height };
|
|
||||||
}
|
|
||||||
|
|
||||||
const outputFilePath = newPath.replace(extension, '.webp');
|
|
||||||
const data = await sharp(resizedBuffer).toFormat('webp').toBuffer();
|
|
||||||
await fs.promises.writeFile(outputFilePath, data);
|
|
||||||
const bytes = Buffer.byteLength(data);
|
|
||||||
const filepath = path.posix.join('/', 'images', req.user.id, path.basename(outputFilePath));
|
|
||||||
await fs.promises.unlink(inputFilePath);
|
|
||||||
return { filepath, bytes, width, height };
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { convertToWebP };
|
|
||||||
|
|
@ -1,45 +1,5 @@
|
||||||
const fs = require('fs');
|
const { EModelEndpoint, FileSources } = require('librechat-data-provider');
|
||||||
const path = require('path');
|
const { getStrategyFunctions } = require('../strategies');
|
||||||
const { EModelEndpoint } = require('librechat-data-provider');
|
|
||||||
const { updateFile } = require('~/models');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Encodes an image file to base64.
|
|
||||||
* @param {string} imagePath - The path to the image file.
|
|
||||||
* @returns {Promise<string>} A promise that resolves with the base64 encoded image data.
|
|
||||||
*/
|
|
||||||
function encodeImage(imagePath) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
fs.readFile(imagePath, (err, data) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
} else {
|
|
||||||
resolve(data.toString('base64'));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Updates the file and encodes the image.
|
|
||||||
* @param {Object} req - The request object.
|
|
||||||
* @param {Object} file - The file object.
|
|
||||||
* @returns {Promise<[MongoFile, string]>} - A promise that resolves to an array of results from updateFile and encodeImage.
|
|
||||||
*/
|
|
||||||
async function updateAndEncode(req, file) {
|
|
||||||
const { publicPath, imageOutput } = req.app.locals.config;
|
|
||||||
const userPath = path.join(imageOutput, req.user.id);
|
|
||||||
|
|
||||||
if (!fs.existsSync(userPath)) {
|
|
||||||
fs.mkdirSync(userPath, { recursive: true });
|
|
||||||
}
|
|
||||||
const filepath = path.join(publicPath, file.filepath);
|
|
||||||
|
|
||||||
const promises = [];
|
|
||||||
promises.push(updateFile({ file_id: file.file_id }));
|
|
||||||
promises.push(encodeImage(filepath));
|
|
||||||
return await Promise.all(promises);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encodes and formats the given files.
|
* Encodes and formats the given files.
|
||||||
|
|
@ -50,25 +10,42 @@ async function updateAndEncode(req, file) {
|
||||||
*/
|
*/
|
||||||
async function encodeAndFormat(req, files, endpoint) {
|
async function encodeAndFormat(req, files, endpoint) {
|
||||||
const promises = [];
|
const promises = [];
|
||||||
|
const encodingMethods = {};
|
||||||
|
|
||||||
for (let file of files) {
|
for (let file of files) {
|
||||||
promises.push(updateAndEncode(req, file));
|
const source = file.source ?? FileSources.local;
|
||||||
|
|
||||||
|
if (encodingMethods[source]) {
|
||||||
|
promises.push(encodingMethods[source](req, file));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { prepareImagePayload } = getStrategyFunctions(source);
|
||||||
|
if (!prepareImagePayload) {
|
||||||
|
throw new Error(`Encoding function not implemented for ${source}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
encodingMethods[source] = prepareImagePayload;
|
||||||
|
promises.push(prepareImagePayload(req, file));
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: make detail configurable, as of now resizing is done
|
const detail = req.body.imageDetail ?? 'auto';
|
||||||
// to prefer "high" but "low" may be used if the image is small enough
|
|
||||||
const detail = req.body.detail ?? 'auto';
|
/** @type {Array<[MongoFile, string]>} */
|
||||||
const encodedImages = await Promise.all(promises);
|
const formattedImages = await Promise.all(promises);
|
||||||
|
|
||||||
const result = {
|
const result = {
|
||||||
files: [],
|
files: [],
|
||||||
image_urls: [],
|
image_urls: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
for (const [file, base64] of encodedImages) {
|
for (const [file, imageContent] of formattedImages) {
|
||||||
const imagePart = {
|
const imagePart = {
|
||||||
type: 'image_url',
|
type: 'image_url',
|
||||||
image_url: {
|
image_url: {
|
||||||
url: `data:image/webp;base64,${base64}`,
|
url: imageContent.startsWith('http')
|
||||||
|
? imageContent
|
||||||
|
: `data:image/webp;base64,${imageContent}`,
|
||||||
detail,
|
detail,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
@ -81,17 +58,16 @@ async function encodeAndFormat(req, files, endpoint) {
|
||||||
|
|
||||||
result.files.push({
|
result.files.push({
|
||||||
file_id: file.file_id,
|
file_id: file.file_id,
|
||||||
filepath: file.filepath,
|
// filepath: file.filepath,
|
||||||
filename: file.filename,
|
// filename: file.filename,
|
||||||
type: file.type,
|
// type: file.type,
|
||||||
height: file.height,
|
// height: file.height,
|
||||||
width: file.width,
|
// width: file.width,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
encodeImage,
|
|
||||||
encodeAndFormat,
|
encodeAndFormat,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,13 @@
|
||||||
const convert = require('./convert');
|
const avatar = require('./avatar');
|
||||||
const encode = require('./encode');
|
const encode = require('./encode');
|
||||||
const parse = require('./parse');
|
const parse = require('./parse');
|
||||||
const resize = require('./resize');
|
const resize = require('./resize');
|
||||||
const validate = require('./validate');
|
const validate = require('./validate');
|
||||||
const uploadAvatar = require('./avatar/uploadAvatar');
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
...convert,
|
|
||||||
...encode,
|
...encode,
|
||||||
...parse,
|
...parse,
|
||||||
...resize,
|
...resize,
|
||||||
...validate,
|
...validate,
|
||||||
uploadAvatar,
|
avatar,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
const URL = require('url').URL;
|
const URL = require('url').URL;
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
|
||||||
const imageExtensionRegex = /\.(jpg|jpeg|png|gif|bmp|tiff|svg)$/i;
|
const imageExtensionRegex = /\.(jpg|jpeg|png|gif|bmp|tiff|svg|webp)$/i;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extracts the image basename from a given URL.
|
* Extracts the image basename from a given URL.
|
||||||
|
|
@ -22,6 +22,24 @@ function getImageBasename(urlString) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts the basename of a file from a given URL.
|
||||||
|
*
|
||||||
|
* @param {string} urlString - The URL string from which the file basename is to be extracted.
|
||||||
|
* @returns {string} The basename of the file from the URL.
|
||||||
|
* Returns an empty string if the URL parsing fails.
|
||||||
|
*/
|
||||||
|
function getFileBasename(urlString) {
|
||||||
|
try {
|
||||||
|
const url = new URL(urlString);
|
||||||
|
return path.basename(url.pathname);
|
||||||
|
} catch (error) {
|
||||||
|
// If URL parsing fails, return an empty string
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
getImageBasename,
|
getImageBasename,
|
||||||
|
getFileBasename,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
const localStrategy = require('./localStrategy');
|
|
||||||
const process = require('./process');
|
|
||||||
const save = require('./save');
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
...save,
|
|
||||||
...process,
|
|
||||||
localStrategy,
|
|
||||||
};
|
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
const { createFile } = require('~/models');
|
|
||||||
const { convertToWebP } = require('./images/convert');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Applies the local strategy for image uploads.
|
|
||||||
* Saves file metadata to the database with an expiry TTL.
|
|
||||||
* Files must be deleted from the server filesystem manually.
|
|
||||||
*
|
|
||||||
* @param {Object} params - The parameters object.
|
|
||||||
* @param {Express.Request} params.req - The Express request object.
|
|
||||||
* @param {Express.Response} params.res - The Express response object.
|
|
||||||
* @param {Express.Multer.File} params.file - The uploaded file.
|
|
||||||
* @param {ImageMetadata} params.metadata - Additional metadata for the file.
|
|
||||||
* @returns {Promise<void>}
|
|
||||||
*/
|
|
||||||
const localStrategy = async ({ req, res, file, metadata }) => {
|
|
||||||
const { file_id, temp_file_id } = metadata;
|
|
||||||
const { filepath, bytes, width, height } = await convertToWebP(req, file);
|
|
||||||
const result = await createFile(
|
|
||||||
{
|
|
||||||
user: req.user.id,
|
|
||||||
file_id,
|
|
||||||
temp_file_id,
|
|
||||||
bytes,
|
|
||||||
filepath,
|
|
||||||
filename: file.originalname,
|
|
||||||
type: 'image/webp',
|
|
||||||
width,
|
|
||||||
height,
|
|
||||||
},
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
res.status(200).json({ message: 'File uploaded and processed successfully', ...result });
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = localStrategy;
|
|
||||||
|
|
@ -1,17 +1,6 @@
|
||||||
const { updateFileUsage } = require('~/models');
|
const { updateFileUsage, createFile } = require('~/models');
|
||||||
|
const { getStrategyFunctions } = require('./strategies');
|
||||||
// const mapImageUrls = (files, detail) => {
|
const { logger } = require('~/config');
|
||||||
// return files
|
|
||||||
// .filter((file) => file.type.includes('image'))
|
|
||||||
// .map((file) => ({
|
|
||||||
// type: 'image_url',
|
|
||||||
// image_url: {
|
|
||||||
// /* Temporarily set to path to encode later */
|
|
||||||
// url: file.filepath,
|
|
||||||
// detail,
|
|
||||||
// },
|
|
||||||
// }));
|
|
||||||
// };
|
|
||||||
|
|
||||||
const processFiles = async (files) => {
|
const processFiles = async (files) => {
|
||||||
const promises = [];
|
const promises = [];
|
||||||
|
|
@ -24,6 +13,76 @@ const processFiles = async (files) => {
|
||||||
return await Promise.all(promises);
|
return await Promise.all(promises);
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
/**
|
||||||
processFiles,
|
* Processes a file URL using a specified file handling strategy. This function accepts a strategy name,
|
||||||
|
* fetches the corresponding file processing functions (for saving and retrieving file URLs), and then
|
||||||
|
* executes these functions in sequence. It first saves the file using the provided URL and then retrieves
|
||||||
|
* the URL of the saved file. If any error occurs during this process, it logs the error and throws an
|
||||||
|
* exception with an appropriate message.
|
||||||
|
*
|
||||||
|
* @param {Object} params - The parameters object.
|
||||||
|
* @param {FileSources} params.fileStrategy - The file handling strategy to use. Must be a value from the
|
||||||
|
* `FileSources` enum, which defines different file handling
|
||||||
|
* strategies (like saving to Firebase, local storage, etc.).
|
||||||
|
* @param {string} params.userId - The user's unique identifier. Used for creating user-specific paths or
|
||||||
|
* references in the file handling process.
|
||||||
|
* @param {string} params.URL - The URL of the file to be processed.
|
||||||
|
* @param {string} params.fileName - The name that will be used to save the file. This should include the
|
||||||
|
* file extension.
|
||||||
|
* @param {string} params.basePath - The base path or directory where the file will be saved or retrieved from.
|
||||||
|
*
|
||||||
|
* @returns {Promise<string>}
|
||||||
|
* A promise that resolves to the URL of the processed file. It throws an error if the file processing
|
||||||
|
* fails at any stage.
|
||||||
|
*/
|
||||||
|
const processFileURL = async ({ fileStrategy, userId, URL, fileName, basePath }) => {
|
||||||
|
const { saveURL, getFileURL } = getStrategyFunctions(fileStrategy);
|
||||||
|
try {
|
||||||
|
await saveURL({ userId, URL, fileName, basePath });
|
||||||
|
return await getFileURL({ fileName: `${userId}/${fileName}`, basePath });
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Error while processing the image with ${fileStrategy}:`, error);
|
||||||
|
throw new Error(`Failed to process the image with ${fileStrategy}. ${error.message}`);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applies the current strategy for image uploads.
|
||||||
|
* Saves file metadata to the database with an expiry TTL.
|
||||||
|
* Files must be deleted from the server filesystem manually.
|
||||||
|
*
|
||||||
|
* @param {Object} params - The parameters object.
|
||||||
|
* @param {Express.Request} params.req - The Express request object.
|
||||||
|
* @param {Express.Response} params.res - The Express response object.
|
||||||
|
* @param {Express.Multer.File} params.file - The uploaded file.
|
||||||
|
* @param {ImageMetadata} params.metadata - Additional metadata for the file.
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
const processImageUpload = async ({ req, res, file, metadata }) => {
|
||||||
|
const source = req.app.locals.fileStrategy;
|
||||||
|
const { handleImageUpload } = getStrategyFunctions(source);
|
||||||
|
const { file_id, temp_file_id } = metadata;
|
||||||
|
const { filepath, bytes, width, height } = await handleImageUpload(req, file);
|
||||||
|
const result = await createFile(
|
||||||
|
{
|
||||||
|
user: req.user.id,
|
||||||
|
file_id,
|
||||||
|
temp_file_id,
|
||||||
|
bytes,
|
||||||
|
filepath,
|
||||||
|
filename: file.originalname,
|
||||||
|
source,
|
||||||
|
type: 'image/webp',
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
},
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
res.status(200).json({ message: 'File uploaded and processed successfully', ...result });
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
processImageUpload,
|
||||||
|
processFiles,
|
||||||
|
processFileURL,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,48 +0,0 @@
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const { logger } = require('~/config');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Saves a file to a specified output path with a new filename.
|
|
||||||
*
|
|
||||||
* @param {Express.Multer.File} file - The file object to be saved. Should contain properties like 'originalname' and 'path'.
|
|
||||||
* @param {string} outputPath - The path where the file should be saved.
|
|
||||||
* @param {string} outputFilename - The new filename for the saved file (without extension).
|
|
||||||
* @returns {Promise<string>} The full path of the saved file.
|
|
||||||
* @throws Will throw an error if the file saving process fails.
|
|
||||||
*/
|
|
||||||
async function saveFile(file, outputPath, outputFilename) {
|
|
||||||
try {
|
|
||||||
if (!fs.existsSync(outputPath)) {
|
|
||||||
fs.mkdirSync(outputPath, { recursive: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
const fileExtension = path.extname(file.originalname);
|
|
||||||
const filenameWithExt = outputFilename + fileExtension;
|
|
||||||
const outputFilePath = path.join(outputPath, filenameWithExt);
|
|
||||||
fs.copyFileSync(file.path, outputFilePath);
|
|
||||||
fs.unlinkSync(file.path);
|
|
||||||
|
|
||||||
return outputFilePath;
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('[saveFile] Error while saving the file:', error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Saves an uploaded image file to a specified directory based on the user's ID and a filename.
|
|
||||||
*
|
|
||||||
* @param {Express.Request} req - The Express request object, containing the user's information and app configuration.
|
|
||||||
* @param {Express.Multer.File} file - The uploaded file object.
|
|
||||||
* @param {string} filename - The new filename to assign to the saved image (without extension).
|
|
||||||
* @returns {Promise<void>}
|
|
||||||
* @throws Will throw an error if the image saving process fails.
|
|
||||||
*/
|
|
||||||
const saveLocalImage = async (req, file, filename) => {
|
|
||||||
const imagePath = req.app.locals.config.imageOutput;
|
|
||||||
const outputPath = path.join(imagePath, req.user.id ?? '');
|
|
||||||
await saveFile(file, outputPath, filename);
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = { saveFile, saveLocalImage };
|
|
||||||
54
api/server/services/Files/strategies.js
Normal file
54
api/server/services/Files/strategies.js
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
const { FileSources } = require('librechat-data-provider');
|
||||||
|
const {
|
||||||
|
getFirebaseURL,
|
||||||
|
prepareImageURL,
|
||||||
|
saveURLToFirebase,
|
||||||
|
deleteFirebaseFile,
|
||||||
|
uploadImageToFirebase,
|
||||||
|
processFirebaseAvatar,
|
||||||
|
} = require('./Firebase');
|
||||||
|
const {
|
||||||
|
getLocalFileURL,
|
||||||
|
saveFileFromURL,
|
||||||
|
deleteLocalFile,
|
||||||
|
uploadLocalImage,
|
||||||
|
prepareImagesLocal,
|
||||||
|
processLocalAvatar,
|
||||||
|
} = require('./Local');
|
||||||
|
|
||||||
|
// Firebase Strategy Functions
|
||||||
|
const firebaseStrategy = () => ({
|
||||||
|
// saveFile:
|
||||||
|
saveURL: saveURLToFirebase,
|
||||||
|
getFileURL: getFirebaseURL,
|
||||||
|
deleteFile: deleteFirebaseFile,
|
||||||
|
prepareImagePayload: prepareImageURL,
|
||||||
|
processAvatar: processFirebaseAvatar,
|
||||||
|
handleImageUpload: uploadImageToFirebase,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Local Strategy Functions
|
||||||
|
const localStrategy = () => ({
|
||||||
|
// saveFile: ,
|
||||||
|
saveURL: saveFileFromURL,
|
||||||
|
getFileURL: getLocalFileURL,
|
||||||
|
deleteFile: deleteLocalFile,
|
||||||
|
processAvatar: processLocalAvatar,
|
||||||
|
handleImageUpload: uploadLocalImage,
|
||||||
|
prepareImagePayload: prepareImagesLocal,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Strategy Selector
|
||||||
|
const getStrategyFunctions = (fileSource) => {
|
||||||
|
if (fileSource === FileSources.firebase) {
|
||||||
|
return firebaseStrategy();
|
||||||
|
} else if (fileSource === FileSources.local) {
|
||||||
|
return localStrategy();
|
||||||
|
} else {
|
||||||
|
throw new Error('Invalid file source');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getStrategyFunctions,
|
||||||
|
};
|
||||||
|
|
@ -10,6 +10,10 @@ const {
|
||||||
} = require('../strategies');
|
} = require('../strategies');
|
||||||
const client = require('../cache/redis');
|
const client = require('../cache/redis');
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* @param {Express.Application} app
|
||||||
|
*/
|
||||||
const configureSocialLogins = (app) => {
|
const configureSocialLogins = (app) => {
|
||||||
if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) {
|
if (process.env.GOOGLE_CLIENT_ID && process.env.GOOGLE_CLIENT_SECRET) {
|
||||||
passport.use(googleLogin());
|
passport.use(googleLogin());
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,14 @@
|
||||||
const { Strategy: DiscordStrategy } = require('passport-discord');
|
const { Strategy: DiscordStrategy } = require('passport-discord');
|
||||||
|
const { createNewUser, handleExistingUser } = require('./process');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
const User = require('~/models/User');
|
const User = require('~/models/User');
|
||||||
const { useFirebase, uploadAvatar } = require('~/server/services/Files/images');
|
|
||||||
|
|
||||||
const discordLogin = async (accessToken, refreshToken, profile, cb) => {
|
const discordLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
try {
|
try {
|
||||||
const email = profile.email;
|
const email = profile.email;
|
||||||
const discordId = profile.id;
|
const discordId = profile.id;
|
||||||
|
|
||||||
|
// TODO: remove direct access of User model
|
||||||
const oldUser = await User.findOne({ email });
|
const oldUser = await User.findOne({ email });
|
||||||
const ALLOW_SOCIAL_REGISTRATION =
|
const ALLOW_SOCIAL_REGISTRATION =
|
||||||
process.env.ALLOW_SOCIAL_REGISTRATION?.toLowerCase() === 'true';
|
process.env.ALLOW_SOCIAL_REGISTRATION?.toLowerCase() === 'true';
|
||||||
|
|
@ -21,12 +23,20 @@ const discordLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (oldUser) {
|
if (oldUser) {
|
||||||
await handleExistingUser(oldUser, avatarUrl, useFirebase);
|
await handleExistingUser(oldUser, avatarUrl);
|
||||||
return cb(null, oldUser);
|
return cb(null, oldUser);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ALLOW_SOCIAL_REGISTRATION) {
|
if (ALLOW_SOCIAL_REGISTRATION) {
|
||||||
const newUser = await createNewUser(profile, discordId, email, avatarUrl, useFirebase);
|
const newUser = await createNewUser({
|
||||||
|
email,
|
||||||
|
avatarUrl,
|
||||||
|
provider: 'discord',
|
||||||
|
providerKey: 'discordId',
|
||||||
|
providerId: discordId,
|
||||||
|
username: profile.username,
|
||||||
|
name: profile.global_name,
|
||||||
|
});
|
||||||
return cb(null, newUser);
|
return cb(null, newUser);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|
@ -35,38 +45,6 @@ const discordLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => {
|
|
||||||
if (!useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
|
||||||
oldUser.avatar = avatarUrl;
|
|
||||||
await oldUser.save();
|
|
||||||
} else if (useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
|
||||||
const userId = oldUser._id;
|
|
||||||
const newavatarUrl = await uploadAvatar(userId, avatarUrl);
|
|
||||||
oldUser.avatar = newavatarUrl;
|
|
||||||
await oldUser.save();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const createNewUser = async (profile, discordId, email, avatarUrl, useFirebase) => {
|
|
||||||
const newUser = await new User({
|
|
||||||
provider: 'discord',
|
|
||||||
discordId,
|
|
||||||
username: profile.username,
|
|
||||||
email,
|
|
||||||
name: profile.global_name,
|
|
||||||
avatar: avatarUrl,
|
|
||||||
}).save();
|
|
||||||
|
|
||||||
if (useFirebase) {
|
|
||||||
const userId = newUser._id;
|
|
||||||
const newavatarUrl = await uploadAvatar(userId, avatarUrl);
|
|
||||||
newUser.avatar = newavatarUrl;
|
|
||||||
await newUser.save();
|
|
||||||
}
|
|
||||||
|
|
||||||
return newUser;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = () =>
|
module.exports = () =>
|
||||||
new DiscordStrategy(
|
new DiscordStrategy(
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
const FacebookStrategy = require('passport-facebook').Strategy;
|
const FacebookStrategy = require('passport-facebook').Strategy;
|
||||||
|
const { createNewUser, handleExistingUser } = require('./process');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
const User = require('~/models/User');
|
const User = require('~/models/User');
|
||||||
const { useFirebase, uploadAvatar } = require('~/server/services/Files/images');
|
|
||||||
|
|
||||||
const facebookLogin = async (accessToken, refreshToken, profile, cb) => {
|
const facebookLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -13,12 +13,20 @@ const facebookLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
const avatarUrl = profile.photos[0]?.value;
|
const avatarUrl = profile.photos[0]?.value;
|
||||||
|
|
||||||
if (oldUser) {
|
if (oldUser) {
|
||||||
await handleExistingUser(oldUser, avatarUrl, useFirebase);
|
await handleExistingUser(oldUser, avatarUrl);
|
||||||
return cb(null, oldUser);
|
return cb(null, oldUser);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ALLOW_SOCIAL_REGISTRATION) {
|
if (ALLOW_SOCIAL_REGISTRATION) {
|
||||||
const newUser = await createNewUser(profile, facebookId, email, avatarUrl, useFirebase);
|
const newUser = await createNewUser({
|
||||||
|
email,
|
||||||
|
avatarUrl,
|
||||||
|
provider: 'facebook',
|
||||||
|
providerKey: 'facebookId',
|
||||||
|
providerId: facebookId,
|
||||||
|
username: profile.displayName,
|
||||||
|
name: profile.name?.givenName + ' ' + profile.name?.familyName,
|
||||||
|
});
|
||||||
return cb(null, newUser);
|
return cb(null, newUser);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|
@ -27,38 +35,6 @@ const facebookLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => {
|
|
||||||
if (!useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
|
||||||
oldUser.avatar = avatarUrl;
|
|
||||||
await oldUser.save();
|
|
||||||
} else if (useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
|
||||||
const userId = oldUser._id;
|
|
||||||
const newavatarUrl = await uploadAvatar(userId, avatarUrl);
|
|
||||||
oldUser.avatar = newavatarUrl;
|
|
||||||
await oldUser.save();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const createNewUser = async (profile, facebookId, email, avatarUrl, useFirebase) => {
|
|
||||||
const newUser = await new User({
|
|
||||||
provider: 'facebook',
|
|
||||||
facebookId,
|
|
||||||
username: profile.displayName,
|
|
||||||
email,
|
|
||||||
name: profile.name?.givenName + ' ' + profile.name?.familyName,
|
|
||||||
avatar: avatarUrl,
|
|
||||||
}).save();
|
|
||||||
|
|
||||||
if (useFirebase) {
|
|
||||||
const userId = newUser._id;
|
|
||||||
const newavatarUrl = await uploadAvatar(userId, avatarUrl);
|
|
||||||
newUser.avatar = newavatarUrl;
|
|
||||||
await newUser.save();
|
|
||||||
}
|
|
||||||
|
|
||||||
return newUser;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = () =>
|
module.exports = () =>
|
||||||
new FacebookStrategy(
|
new FacebookStrategy(
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
const { Strategy: GitHubStrategy } = require('passport-github2');
|
const { Strategy: GitHubStrategy } = require('passport-github2');
|
||||||
|
const { createNewUser, handleExistingUser } = require('./process');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
const User = require('~/models/User');
|
const User = require('~/models/User');
|
||||||
const { useFirebase, uploadAvatar } = require('~/server/services/Files/images');
|
|
||||||
|
|
||||||
const githubLogin = async (accessToken, refreshToken, profile, cb) => {
|
const githubLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -13,12 +13,21 @@ const githubLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
const avatarUrl = profile.photos[0].value;
|
const avatarUrl = profile.photos[0].value;
|
||||||
|
|
||||||
if (oldUser) {
|
if (oldUser) {
|
||||||
await handleExistingUser(oldUser, avatarUrl, useFirebase);
|
await handleExistingUser(oldUser, avatarUrl);
|
||||||
return cb(null, oldUser);
|
return cb(null, oldUser);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ALLOW_SOCIAL_REGISTRATION) {
|
if (ALLOW_SOCIAL_REGISTRATION) {
|
||||||
const newUser = await createNewUser(profile, githubId, email, avatarUrl, useFirebase);
|
const newUser = await createNewUser({
|
||||||
|
email,
|
||||||
|
avatarUrl,
|
||||||
|
provider: 'github',
|
||||||
|
providerKey: 'githubId',
|
||||||
|
providerId: githubId,
|
||||||
|
username: profile.username,
|
||||||
|
name: profile.displayName,
|
||||||
|
emailVerified: profile.emails[0].verified,
|
||||||
|
});
|
||||||
return cb(null, newUser);
|
return cb(null, newUser);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|
@ -27,39 +36,6 @@ const githubLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => {
|
|
||||||
if (!useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
|
||||||
oldUser.avatar = avatarUrl;
|
|
||||||
await oldUser.save();
|
|
||||||
} else if (useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
|
||||||
const userId = oldUser._id;
|
|
||||||
const avatarURL = await uploadAvatar(userId, avatarUrl);
|
|
||||||
oldUser.avatar = avatarURL;
|
|
||||||
await oldUser.save();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const createNewUser = async (profile, githubId, email, avatarUrl, useFirebase) => {
|
|
||||||
const newUser = await new User({
|
|
||||||
provider: 'github',
|
|
||||||
githubId,
|
|
||||||
username: profile.username,
|
|
||||||
email,
|
|
||||||
emailVerified: profile.emails[0].verified,
|
|
||||||
name: profile.displayName,
|
|
||||||
avatar: avatarUrl,
|
|
||||||
}).save();
|
|
||||||
|
|
||||||
if (useFirebase) {
|
|
||||||
const userId = newUser._id;
|
|
||||||
const avatarURL = await uploadAvatar(userId, avatarUrl);
|
|
||||||
newUser.avatar = avatarURL;
|
|
||||||
await newUser.save();
|
|
||||||
}
|
|
||||||
|
|
||||||
return newUser;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = () =>
|
module.exports = () =>
|
||||||
new GitHubStrategy(
|
new GitHubStrategy(
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
const { Strategy: GoogleStrategy } = require('passport-google-oauth20');
|
const { Strategy: GoogleStrategy } = require('passport-google-oauth20');
|
||||||
|
const { createNewUser, handleExistingUser } = require('./process');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
const User = require('~/models/User');
|
const User = require('~/models/User');
|
||||||
const { useFirebase, uploadAvatar } = require('~/server/services/Files/images');
|
|
||||||
|
|
||||||
const googleLogin = async (accessToken, refreshToken, profile, cb) => {
|
const googleLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
try {
|
try {
|
||||||
|
|
@ -13,12 +13,21 @@ const googleLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
const avatarUrl = profile.photos[0].value;
|
const avatarUrl = profile.photos[0].value;
|
||||||
|
|
||||||
if (oldUser) {
|
if (oldUser) {
|
||||||
await handleExistingUser(oldUser, avatarUrl, useFirebase);
|
await handleExistingUser(oldUser, avatarUrl);
|
||||||
return cb(null, oldUser);
|
return cb(null, oldUser);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ALLOW_SOCIAL_REGISTRATION) {
|
if (ALLOW_SOCIAL_REGISTRATION) {
|
||||||
const newUser = await createNewUser(profile, googleId, email, avatarUrl, useFirebase);
|
const newUser = await createNewUser({
|
||||||
|
email,
|
||||||
|
avatarUrl,
|
||||||
|
provider: 'google',
|
||||||
|
providerKey: 'googleId',
|
||||||
|
providerId: googleId,
|
||||||
|
username: profile.name.givenName,
|
||||||
|
name: `${profile.name.givenName} ${profile.name.familyName}`,
|
||||||
|
emailVerified: profile.emails[0].verified,
|
||||||
|
});
|
||||||
return cb(null, newUser);
|
return cb(null, newUser);
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|
@ -27,39 +36,6 @@ const googleLogin = async (accessToken, refreshToken, profile, cb) => {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleExistingUser = async (oldUser, avatarUrl, useFirebase) => {
|
|
||||||
if ((!useFirebase && !oldUser.avatar.includes('?manual=true')) || oldUser.avatar === null) {
|
|
||||||
oldUser.avatar = avatarUrl;
|
|
||||||
await oldUser.save();
|
|
||||||
} else if (useFirebase && !oldUser.avatar.includes('?manual=true')) {
|
|
||||||
const userId = oldUser._id;
|
|
||||||
const avatarURL = await uploadAvatar(userId, avatarUrl);
|
|
||||||
oldUser.avatar = avatarURL;
|
|
||||||
await oldUser.save();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const createNewUser = async (profile, googleId, email, avatarUrl, useFirebase) => {
|
|
||||||
const newUser = await new User({
|
|
||||||
provider: 'google',
|
|
||||||
googleId,
|
|
||||||
username: profile.name.givenName,
|
|
||||||
email,
|
|
||||||
emailVerified: profile.emails[0].verified,
|
|
||||||
name: `${profile.name.givenName} ${profile.name.familyName}`,
|
|
||||||
avatar: avatarUrl,
|
|
||||||
}).save();
|
|
||||||
|
|
||||||
if (useFirebase) {
|
|
||||||
const userId = newUser._id;
|
|
||||||
const avatarURL = await uploadAvatar(userId, avatarUrl);
|
|
||||||
newUser.avatar = avatarURL;
|
|
||||||
await newUser.save();
|
|
||||||
}
|
|
||||||
|
|
||||||
return newUser;
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = () =>
|
module.exports = () =>
|
||||||
new GoogleStrategy(
|
new GoogleStrategy(
|
||||||
{
|
{
|
||||||
|
|
|
||||||
92
api/strategies/process.js
Normal file
92
api/strategies/process.js
Normal file
|
|
@ -0,0 +1,92 @@
|
||||||
|
const { FileSources } = require('librechat-data-provider');
|
||||||
|
const uploadAvatar = require('~/server/services/Files/images/avatar');
|
||||||
|
const User = require('~/models/User');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates the avatar URL of an existing user. If the user's avatar URL does not include the query parameter
|
||||||
|
* '?manual=true', it updates the user's avatar with the provided URL. For local file storage, it directly updates
|
||||||
|
* the avatar URL, while for other storage types, it processes the avatar URL using the specified file strategy.
|
||||||
|
*
|
||||||
|
* @param {User} oldUser - The existing user object that needs to be updated. Expected to have an 'avatar' property.
|
||||||
|
* @param {string} avatarUrl - The new avatar URL to be set for the user.
|
||||||
|
*
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
* The function updates the user's avatar and saves the user object. It does not return any value.
|
||||||
|
*
|
||||||
|
* @throws {Error} Throws an error if there's an issue saving the updated user object.
|
||||||
|
*/
|
||||||
|
const handleExistingUser = async (oldUser, avatarUrl) => {
|
||||||
|
const fileStrategy = process.env.CDN_PROVIDER;
|
||||||
|
const isLocal = fileStrategy === FileSources.local;
|
||||||
|
|
||||||
|
if (isLocal && !oldUser.avatar.includes('?manual=true')) {
|
||||||
|
oldUser.avatar = avatarUrl;
|
||||||
|
await oldUser.save();
|
||||||
|
} else if (!isLocal && !oldUser.avatar.includes('?manual=true')) {
|
||||||
|
const userId = oldUser._id;
|
||||||
|
const newavatarUrl = await uploadAvatar({ userId, input: avatarUrl, fileStrategy });
|
||||||
|
oldUser.avatar = newavatarUrl;
|
||||||
|
await oldUser.save();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new user with the provided user details. If the file strategy is not local, the avatar URL is
|
||||||
|
* processed using the specified file strategy. The new user is saved to the database with the processed or
|
||||||
|
* original avatar URL.
|
||||||
|
*
|
||||||
|
* @param {Object} params - The parameters object for user creation.
|
||||||
|
* @param {string} params.email - The email of the new user.
|
||||||
|
* @param {string} params.avatarUrl - The avatar URL of the new user.
|
||||||
|
* @param {string} params.provider - The provider of the user's account.
|
||||||
|
* @param {string} params.providerKey - The key to identify the provider in the user model.
|
||||||
|
* @param {string} params.providerId - The provider-specific ID of the user.
|
||||||
|
* @param {string} params.username - The username of the new user.
|
||||||
|
* @param {string} params.name - The name of the new user.
|
||||||
|
* @param {boolean} [params.emailVerified=false] - Optional. Indicates whether the user's email is verified. Defaults to false.
|
||||||
|
*
|
||||||
|
* @returns {Promise<User>}
|
||||||
|
* A promise that resolves to the newly created user object.
|
||||||
|
*
|
||||||
|
* @throws {Error} Throws an error if there's an issue creating or saving the new user object.
|
||||||
|
*/
|
||||||
|
const createNewUser = async ({
|
||||||
|
email,
|
||||||
|
avatarUrl,
|
||||||
|
provider,
|
||||||
|
providerKey,
|
||||||
|
providerId,
|
||||||
|
username,
|
||||||
|
name,
|
||||||
|
emailVerified,
|
||||||
|
}) => {
|
||||||
|
const update = {
|
||||||
|
email,
|
||||||
|
avatar: avatarUrl,
|
||||||
|
provider,
|
||||||
|
[providerKey]: providerId,
|
||||||
|
username,
|
||||||
|
name,
|
||||||
|
emailVerified,
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: remove direct access of User model
|
||||||
|
const newUser = await new User(update).save();
|
||||||
|
|
||||||
|
const fileStrategy = process.env.CDN_PROVIDER;
|
||||||
|
const isLocal = fileStrategy === FileSources.local;
|
||||||
|
|
||||||
|
if (!isLocal) {
|
||||||
|
const userId = newUser._id;
|
||||||
|
const newavatarUrl = await uploadAvatar({ userId, input: avatarUrl, fileStrategy });
|
||||||
|
newUser.avatar = newavatarUrl;
|
||||||
|
await newUser.save();
|
||||||
|
}
|
||||||
|
|
||||||
|
return newUser;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
handleExistingUser,
|
||||||
|
createNewUser,
|
||||||
|
};
|
||||||
|
|
@ -1,3 +1,43 @@
|
||||||
|
jest.mock('winston', () => {
|
||||||
|
const mockFormatFunction = jest.fn((fn) => fn);
|
||||||
|
|
||||||
|
mockFormatFunction.colorize = jest.fn();
|
||||||
|
mockFormatFunction.combine = jest.fn();
|
||||||
|
mockFormatFunction.label = jest.fn();
|
||||||
|
mockFormatFunction.timestamp = jest.fn();
|
||||||
|
mockFormatFunction.printf = jest.fn();
|
||||||
|
mockFormatFunction.errors = jest.fn();
|
||||||
|
mockFormatFunction.splat = jest.fn();
|
||||||
|
return {
|
||||||
|
format: mockFormatFunction,
|
||||||
|
createLogger: jest.fn().mockReturnValue({
|
||||||
|
info: jest.fn(),
|
||||||
|
warn: jest.fn(),
|
||||||
|
debug: jest.fn(),
|
||||||
|
error: jest.fn(),
|
||||||
|
}),
|
||||||
|
transports: {
|
||||||
|
Console: jest.fn(),
|
||||||
|
DailyRotateFile: jest.fn(),
|
||||||
|
},
|
||||||
|
addColors: jest.fn(),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
jest.mock('winston-daily-rotate-file', () => {
|
||||||
|
return jest.fn().mockImplementation(() => {
|
||||||
|
return {
|
||||||
|
level: 'error',
|
||||||
|
filename: '../logs/error-%DATE%.log',
|
||||||
|
datePattern: 'YYYY-MM-DD',
|
||||||
|
zippedArchive: true,
|
||||||
|
maxSize: '20m',
|
||||||
|
maxFiles: '14d',
|
||||||
|
format: 'format',
|
||||||
|
};
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
jest.mock('~/config', () => {
|
jest.mock('~/config', () => {
|
||||||
return {
|
return {
|
||||||
logger: {
|
logger: {
|
||||||
|
|
@ -8,3 +48,11 @@ jest.mock('~/config', () => {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
jest.mock('~/config/parsers', () => {
|
||||||
|
return {
|
||||||
|
redactMessage: jest.fn(),
|
||||||
|
redactFormat: jest.fn(),
|
||||||
|
debugTraverse: jest.fn(),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
|
||||||
|
|
@ -26,6 +26,18 @@
|
||||||
* @memberof typedefs
|
* @memberof typedefs
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports TMessage
|
||||||
|
* @typedef {import('librechat-data-provider').TMessage} TMessage
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports FileSources
|
||||||
|
* @typedef {import('librechat-data-provider').FileSources} FileSources
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @exports ImageMetadata
|
* @exports ImageMetadata
|
||||||
* @typedef {Object} ImageMetadata
|
* @typedef {Object} ImageMetadata
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { FileSources } from 'librechat-data-provider';
|
||||||
import type {
|
import type {
|
||||||
TConversation,
|
TConversation,
|
||||||
TMessage,
|
TMessage,
|
||||||
|
|
@ -230,6 +231,7 @@ export interface ExtendedFile {
|
||||||
size: number;
|
size: number;
|
||||||
preview: string;
|
preview: string;
|
||||||
progress: number;
|
progress: number;
|
||||||
|
source?: FileSources;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ContextType = { navVisible: boolean; setNavVisible: (visible: boolean) => void };
|
export type ContextType = { navVisible: boolean; setNavVisible: (visible: boolean) => void };
|
||||||
|
|
|
||||||
|
|
@ -6,11 +6,12 @@ import { useChatHelpers, useSSE } from '~/hooks';
|
||||||
// import GenerationButtons from './Input/GenerationButtons';
|
// import GenerationButtons from './Input/GenerationButtons';
|
||||||
import MessagesView from './Messages/MessagesView';
|
import MessagesView from './Messages/MessagesView';
|
||||||
// import OptionsBar from './Input/OptionsBar';
|
// import OptionsBar from './Input/OptionsBar';
|
||||||
|
import { useGetFiles } from '~/data-provider';
|
||||||
|
import { buildTree, mapFiles } from '~/utils';
|
||||||
import { Spinner } from '~/components/svg';
|
import { Spinner } from '~/components/svg';
|
||||||
import { ChatContext } from '~/Providers';
|
import { ChatContext } from '~/Providers';
|
||||||
import Presentation from './Presentation';
|
import Presentation from './Presentation';
|
||||||
import ChatForm from './Input/ChatForm';
|
import ChatForm from './Input/ChatForm';
|
||||||
import { buildTree } from '~/utils';
|
|
||||||
import Landing from './Landing';
|
import Landing from './Landing';
|
||||||
import Header from './Header';
|
import Header from './Header';
|
||||||
import Footer from './Footer';
|
import Footer from './Footer';
|
||||||
|
|
@ -21,11 +22,16 @@ function ChatView({ index = 0 }: { index?: number }) {
|
||||||
const submissionAtIndex = useRecoilValue(store.submissionByIndex(0));
|
const submissionAtIndex = useRecoilValue(store.submissionByIndex(0));
|
||||||
useSSE(submissionAtIndex);
|
useSSE(submissionAtIndex);
|
||||||
|
|
||||||
|
const { data: fileMap } = useGetFiles({
|
||||||
|
select: mapFiles,
|
||||||
|
});
|
||||||
|
|
||||||
const { data: messagesTree = null, isLoading } = useGetMessagesByConvoId(conversationId ?? '', {
|
const { data: messagesTree = null, isLoading } = useGetMessagesByConvoId(conversationId ?? '', {
|
||||||
select: (data) => {
|
select: (data) => {
|
||||||
const dataTree = buildTree(data, false);
|
const dataTree = buildTree({ messages: data, fileMap });
|
||||||
return dataTree?.length === 0 ? null : dataTree ?? null;
|
return dataTree?.length === 0 ? null : dataTree ?? null;
|
||||||
},
|
},
|
||||||
|
enabled: !!fileMap,
|
||||||
});
|
});
|
||||||
|
|
||||||
const chatHelpers = useChatHelpers(index, conversationId);
|
const chatHelpers = useChatHelpers(index, conversationId);
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
import debounce from 'lodash/debounce';
|
import debounce from 'lodash/debounce';
|
||||||
import { useState, useEffect, useCallback } from 'react';
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
|
import { FileSources } from 'librechat-data-provider';
|
||||||
import type { BatchFile } from 'librechat-data-provider';
|
import type { BatchFile } from 'librechat-data-provider';
|
||||||
import { useDeleteFilesMutation } from '~/data-provider';
|
import { useDeleteFilesMutation } from '~/data-provider';
|
||||||
import { useSetFilesToDelete } from '~/hooks';
|
import { useSetFilesToDelete } from '~/hooks';
|
||||||
|
|
@ -70,13 +71,20 @@ export default function Images({
|
||||||
}
|
}
|
||||||
|
|
||||||
const deleteFile = (_file: ExtendedFile) => {
|
const deleteFile = (_file: ExtendedFile) => {
|
||||||
const { file_id, progress, temp_file_id = '', filepath = '' } = _file;
|
const {
|
||||||
|
file_id,
|
||||||
|
progress,
|
||||||
|
temp_file_id = '',
|
||||||
|
filepath = '',
|
||||||
|
source = FileSources.local,
|
||||||
|
} = _file;
|
||||||
if (progress < 1) {
|
if (progress < 1) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const file = {
|
const file: BatchFile = {
|
||||||
file_id,
|
file_id,
|
||||||
filepath,
|
filepath,
|
||||||
|
source,
|
||||||
};
|
};
|
||||||
|
|
||||||
setFiles((currentFiles) => {
|
setFiles((currentFiles) => {
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,7 @@ const ErrorMessage = ({ text }: TText) => {
|
||||||
// Display Message Component
|
// Display Message Component
|
||||||
const DisplayMessage = ({ text, isCreatedByUser, message, showCursor }: TDisplayProps) => {
|
const DisplayMessage = ({ text, isCreatedByUser, message, showCursor }: TDisplayProps) => {
|
||||||
const imageFiles = message?.files
|
const imageFiles = message?.files
|
||||||
? message.files.filter((file) => file.type.startsWith('image/'))
|
? message.files.filter((file) => file.type && file.type.startsWith('image/'))
|
||||||
: null;
|
: null;
|
||||||
return (
|
return (
|
||||||
<Container>
|
<Container>
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
import { useEffect } from 'react';
|
import { useEffect } from 'react';
|
||||||
|
import { FileSources } from 'librechat-data-provider';
|
||||||
import type { ExtendedFile } from '~/common';
|
import type { ExtendedFile } from '~/common';
|
||||||
import { useDragHelpers, useSetFilesToDelete } from '~/hooks';
|
import { useDragHelpers, useSetFilesToDelete } from '~/hooks';
|
||||||
import DragDropOverlay from './Input/Files/DragDropOverlay';
|
import DragDropOverlay from './Input/Files/DragDropOverlay';
|
||||||
|
|
@ -25,6 +26,7 @@ export default function Presentation({ children }: { children: React.ReactNode }
|
||||||
.map((file) => ({
|
.map((file) => ({
|
||||||
file_id: file.file_id,
|
file_id: file.file_id,
|
||||||
filepath: file.filepath as string,
|
filepath: file.filepath as string,
|
||||||
|
source: file.source as FileSources,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
if (files.length === 0) {
|
if (files.length === 0) {
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ export default function PopoverButtons({
|
||||||
buttonClass,
|
buttonClass,
|
||||||
iconClass = '',
|
iconClass = '',
|
||||||
}: {
|
}: {
|
||||||
endpoint: EModelEndpoint;
|
endpoint: EModelEndpoint | string;
|
||||||
buttonClass?: string;
|
buttonClass?: string;
|
||||||
iconClass?: string;
|
iconClass?: string;
|
||||||
}) {
|
}) {
|
||||||
|
|
@ -32,7 +32,9 @@ export default function PopoverButtons({
|
||||||
const buttons: { [key: string]: TPopoverButton[] } = {
|
const buttons: { [key: string]: TPopoverButton[] } = {
|
||||||
google: [
|
google: [
|
||||||
{
|
{
|
||||||
label: (showExamples ? localize('com_endpoint_hide') : localize('com_endpoint_show')) + localize('com_endpoint_examples'),
|
label:
|
||||||
|
(showExamples ? localize('com_endpoint_hide') : localize('com_endpoint_show')) +
|
||||||
|
localize('com_endpoint_examples'),
|
||||||
buttonClass: isCodeChat ? 'disabled' : '',
|
buttonClass: isCodeChat ? 'disabled' : '',
|
||||||
handler: triggerExamples,
|
handler: triggerExamples,
|
||||||
icon: <MessagesSquared className={cn('mr-1 w-[14px]', iconClass)} />,
|
icon: <MessagesSquared className={cn('mr-1 w-[14px]', iconClass)} />,
|
||||||
|
|
@ -40,7 +42,10 @@ export default function PopoverButtons({
|
||||||
],
|
],
|
||||||
gptPlugins: [
|
gptPlugins: [
|
||||||
{
|
{
|
||||||
label: localize('com_endpoint_show_what_settings', showAgentSettings ? localize('com_endpoint_completion') : localize('com_endpoint_agent')),
|
label: localize(
|
||||||
|
'com_endpoint_show_what_settings',
|
||||||
|
showAgentSettings ? localize('com_endpoint_completion') : localize('com_endpoint_agent'),
|
||||||
|
),
|
||||||
buttonClass: '',
|
buttonClass: '',
|
||||||
handler: () => setShowAgentSettings((prev) => !prev),
|
handler: () => setShowAgentSettings((prev) => !prev),
|
||||||
icon: <GPTIcon className={cn('mr-1 w-[14px]', iconClass)} size={24} />,
|
icon: <GPTIcon className={cn('mr-1 w-[14px]', iconClass)} size={24} />,
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,13 @@
|
||||||
import TextareaAutosize from 'react-textarea-autosize';
|
import TextareaAutosize from 'react-textarea-autosize';
|
||||||
|
import { ImageDetail, imageDetailNumeric, imageDetailValue } from 'librechat-data-provider';
|
||||||
import {
|
import {
|
||||||
SelectDropDown,
|
|
||||||
Input,
|
Input,
|
||||||
Label,
|
Label,
|
||||||
|
Switch,
|
||||||
Slider,
|
Slider,
|
||||||
InputNumber,
|
|
||||||
HoverCard,
|
HoverCard,
|
||||||
|
InputNumber,
|
||||||
|
SelectDropDown,
|
||||||
HoverCardTrigger,
|
HoverCardTrigger,
|
||||||
} from '~/components/ui';
|
} from '~/components/ui';
|
||||||
import { cn, defaultTextProps, optionText, removeFocusOutlines } from '~/utils/';
|
import { cn, defaultTextProps, optionText, removeFocusOutlines } from '~/utils/';
|
||||||
|
|
@ -20,6 +22,8 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
const {
|
const {
|
||||||
|
endpoint,
|
||||||
|
endpointType,
|
||||||
model,
|
model,
|
||||||
chatGptLabel,
|
chatGptLabel,
|
||||||
promptPrefix,
|
promptPrefix,
|
||||||
|
|
@ -27,6 +31,8 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
top_p: topP,
|
top_p: topP,
|
||||||
frequency_penalty: freqP,
|
frequency_penalty: freqP,
|
||||||
presence_penalty: presP,
|
presence_penalty: presP,
|
||||||
|
resendImages,
|
||||||
|
imageDetail,
|
||||||
} = conversation;
|
} = conversation;
|
||||||
const setModel = setOption('model');
|
const setModel = setOption('model');
|
||||||
const setChatGptLabel = setOption('chatGptLabel');
|
const setChatGptLabel = setOption('chatGptLabel');
|
||||||
|
|
@ -35,6 +41,10 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
const setTopP = setOption('top_p');
|
const setTopP = setOption('top_p');
|
||||||
const setFreqP = setOption('frequency_penalty');
|
const setFreqP = setOption('frequency_penalty');
|
||||||
const setPresP = setOption('presence_penalty');
|
const setPresP = setOption('presence_penalty');
|
||||||
|
const setResendImages = setOption('resendImages');
|
||||||
|
const setImageDetail = setOption('imageDetail');
|
||||||
|
|
||||||
|
const optionEndpoint = endpointType ?? endpoint;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="grid grid-cols-5 gap-6">
|
<div className="grid grid-cols-5 gap-6">
|
||||||
|
|
@ -126,7 +136,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
className="flex h-4 w-full"
|
className="flex h-4 w-full"
|
||||||
/>
|
/>
|
||||||
</HoverCardTrigger>
|
</HoverCardTrigger>
|
||||||
<OptionHover endpoint={conversation?.endpoint ?? ''} type="temp" side={ESide.Left} />
|
<OptionHover endpoint={optionEndpoint ?? ''} type="temp" side={ESide.Left} />
|
||||||
</HoverCard>
|
</HoverCard>
|
||||||
<HoverCard openDelay={300}>
|
<HoverCard openDelay={300}>
|
||||||
<HoverCardTrigger className="grid w-full items-center gap-2">
|
<HoverCardTrigger className="grid w-full items-center gap-2">
|
||||||
|
|
@ -164,7 +174,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
className="flex h-4 w-full"
|
className="flex h-4 w-full"
|
||||||
/>
|
/>
|
||||||
</HoverCardTrigger>
|
</HoverCardTrigger>
|
||||||
<OptionHover endpoint={conversation?.endpoint ?? ''} type="topp" side={ESide.Left} />
|
<OptionHover endpoint={optionEndpoint ?? ''} type="topp" side={ESide.Left} />
|
||||||
</HoverCard>
|
</HoverCard>
|
||||||
|
|
||||||
<HoverCard openDelay={300}>
|
<HoverCard openDelay={300}>
|
||||||
|
|
@ -203,7 +213,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
className="flex h-4 w-full"
|
className="flex h-4 w-full"
|
||||||
/>
|
/>
|
||||||
</HoverCardTrigger>
|
</HoverCardTrigger>
|
||||||
<OptionHover endpoint={conversation?.endpoint ?? ''} type="freq" side={ESide.Left} />
|
<OptionHover endpoint={optionEndpoint ?? ''} type="freq" side={ESide.Left} />
|
||||||
</HoverCard>
|
</HoverCard>
|
||||||
|
|
||||||
<HoverCard openDelay={300}>
|
<HoverCard openDelay={300}>
|
||||||
|
|
@ -242,8 +252,66 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
className="flex h-4 w-full"
|
className="flex h-4 w-full"
|
||||||
/>
|
/>
|
||||||
</HoverCardTrigger>
|
</HoverCardTrigger>
|
||||||
<OptionHover endpoint={conversation?.endpoint ?? ''} type="pres" side={ESide.Left} />
|
<OptionHover endpoint={optionEndpoint ?? ''} type="pres" side={ESide.Left} />
|
||||||
</HoverCard>
|
</HoverCard>
|
||||||
|
<div className="w-full">
|
||||||
|
<div className="mb-2 flex w-full justify-between gap-2">
|
||||||
|
<label
|
||||||
|
htmlFor="resend-images"
|
||||||
|
className="text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70 dark:text-gray-50"
|
||||||
|
>
|
||||||
|
<small>{localize('com_endpoint_plug_resend_images')}</small>
|
||||||
|
</label>
|
||||||
|
<label
|
||||||
|
htmlFor="image-detail-value"
|
||||||
|
className="text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70 dark:text-gray-50"
|
||||||
|
>
|
||||||
|
<small>Image Detail</small>
|
||||||
|
</label>
|
||||||
|
<Input
|
||||||
|
id="image-detail-value"
|
||||||
|
disabled={true}
|
||||||
|
value={imageDetail ?? ImageDetail.auto}
|
||||||
|
className={cn(
|
||||||
|
defaultTextProps,
|
||||||
|
optionText,
|
||||||
|
'flex rounded-md bg-transparent py-2 text-xs focus:outline-none focus:ring-2 focus:ring-slate-400 focus:ring-offset-2 dark:border-slate-700',
|
||||||
|
'pointer-events-none max-h-5 w-12 border-0 group-hover/temp:border-gray-200',
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex w-full justify-between gap-2">
|
||||||
|
<HoverCard openDelay={500}>
|
||||||
|
<HoverCardTrigger>
|
||||||
|
<Switch
|
||||||
|
id="resend-images"
|
||||||
|
checked={resendImages ?? false}
|
||||||
|
onCheckedChange={(checked: boolean) => setResendImages(checked)}
|
||||||
|
disabled={readonly}
|
||||||
|
className="flex"
|
||||||
|
/>
|
||||||
|
<OptionHover endpoint={optionEndpoint ?? ''} type="resend" side={ESide.Bottom} />
|
||||||
|
</HoverCardTrigger>
|
||||||
|
</HoverCard>
|
||||||
|
<HoverCard openDelay={500}>
|
||||||
|
<HoverCardTrigger className="flex w-[52%] md:w-[125px]">
|
||||||
|
<Slider
|
||||||
|
id="image-detail-slider"
|
||||||
|
disabled={readonly}
|
||||||
|
value={[
|
||||||
|
imageDetailNumeric[imageDetail ?? ''] ?? imageDetailNumeric[ImageDetail.auto],
|
||||||
|
]}
|
||||||
|
onValueChange={(value) => setImageDetail(imageDetailValue[value[0]])}
|
||||||
|
doubleClickHandler={() => setImageDetail(ImageDetail.auto)}
|
||||||
|
max={2}
|
||||||
|
min={0}
|
||||||
|
step={1}
|
||||||
|
/>
|
||||||
|
<OptionHover endpoint={optionEndpoint ?? ''} type="detail" side={ESide.Bottom} />
|
||||||
|
</HoverCardTrigger>
|
||||||
|
</HoverCard>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,8 @@ const openAI = {
|
||||||
topp: 'com_endpoint_openai_topp',
|
topp: 'com_endpoint_openai_topp',
|
||||||
freq: 'com_endpoint_openai_freq',
|
freq: 'com_endpoint_openai_freq',
|
||||||
pres: 'com_endpoint_openai_pres',
|
pres: 'com_endpoint_openai_pres',
|
||||||
|
resend: 'com_endpoint_openai_resend',
|
||||||
|
detail: 'com_endpoint_openai_detail',
|
||||||
};
|
};
|
||||||
|
|
||||||
const types = {
|
const types = {
|
||||||
|
|
@ -47,7 +49,7 @@ function OptionHover({ endpoint, type, side }: TOptionHoverProps) {
|
||||||
}
|
}
|
||||||
return (
|
return (
|
||||||
<HoverCardPortal>
|
<HoverCardPortal>
|
||||||
<HoverCardContent side={side} className="w-80 ">
|
<HoverCardContent side={side} className="z-[80] w-80">
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<p className="text-sm text-gray-600 dark:text-gray-300">{localize(text)}</p>
|
<p className="text-sm text-gray-600 dark:text-gray-300">{localize(text)}</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ export default function ExportModal({ open, onOpenChange, conversation }) {
|
||||||
|
|
||||||
const { data: messagesTree = null } = useGetMessagesByConvoId(conversation.conversationId ?? '', {
|
const { data: messagesTree = null } = useGetMessagesByConvoId(conversation.conversationId ?? '', {
|
||||||
select: (data) => {
|
select: (data) => {
|
||||||
const dataTree = buildTree(data, false);
|
const dataTree = buildTree({ messages: data });
|
||||||
return dataTree?.length === 0 ? null : dataTree ?? null;
|
return dataTree?.length === 0 ? null : dataTree ?? null;
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||||
import type { UseMutationResult } from '@tanstack/react-query';
|
import type { UseMutationResult } from '@tanstack/react-query';
|
||||||
import type {
|
import type {
|
||||||
FileUploadResponse,
|
TFileUpload,
|
||||||
UploadMutationOptions,
|
UploadMutationOptions,
|
||||||
FileUploadBody,
|
FileUploadBody,
|
||||||
DeleteFilesResponse,
|
DeleteFilesResponse,
|
||||||
|
|
@ -23,7 +23,7 @@ import store from '~/store';
|
||||||
export const useUploadImageMutation = (
|
export const useUploadImageMutation = (
|
||||||
options?: UploadMutationOptions,
|
options?: UploadMutationOptions,
|
||||||
): UseMutationResult<
|
): UseMutationResult<
|
||||||
FileUploadResponse, // response data
|
TFileUpload, // response data
|
||||||
unknown, // error
|
unknown, // error
|
||||||
FileUploadBody, // request
|
FileUploadBody, // request
|
||||||
unknown // context
|
unknown // context
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,18 @@
|
||||||
import { UseQueryOptions, useQuery, QueryObserverResult } from '@tanstack/react-query';
|
|
||||||
import { QueryKeys, dataService } from 'librechat-data-provider';
|
import { QueryKeys, dataService } from 'librechat-data-provider';
|
||||||
import type { TPreset } from 'librechat-data-provider';
|
import { UseQueryOptions, useQuery, QueryObserverResult } from '@tanstack/react-query';
|
||||||
|
import type { TPreset, TFile } from 'librechat-data-provider';
|
||||||
|
|
||||||
|
export const useGetFiles = <TData = TFile[] | boolean>(
|
||||||
|
config?: UseQueryOptions<TFile[], unknown, TData>,
|
||||||
|
): QueryObserverResult<TData, unknown> => {
|
||||||
|
return useQuery<TFile[], unknown, TData>([QueryKeys.files], () => dataService.getFiles(), {
|
||||||
|
refetchOnWindowFocus: false,
|
||||||
|
refetchOnReconnect: false,
|
||||||
|
refetchOnMount: false,
|
||||||
|
...config,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
export const useGetPresetsQuery = (
|
export const useGetPresetsQuery = (
|
||||||
config?: UseQueryOptions<TPreset[]>,
|
config?: UseQueryOptions<TPreset[]>,
|
||||||
): QueryObserverResult<TPreset[], unknown> => {
|
): QueryObserverResult<TPreset[], unknown> => {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,9 @@
|
||||||
import { v4 } from 'uuid';
|
import { v4 } from 'uuid';
|
||||||
import debounce from 'lodash/debounce';
|
import debounce from 'lodash/debounce';
|
||||||
|
import { QueryKeys } from 'librechat-data-provider';
|
||||||
|
import { useQueryClient } from '@tanstack/react-query';
|
||||||
import { useState, useEffect, useCallback } from 'react';
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
|
import type { TFile } from 'librechat-data-provider';
|
||||||
import type { ExtendedFile } from '~/common';
|
import type { ExtendedFile } from '~/common';
|
||||||
import { useToastContext } from '~/Providers/ToastContext';
|
import { useToastContext } from '~/Providers/ToastContext';
|
||||||
import { useChatContext } from '~/Providers/ChatContext';
|
import { useChatContext } from '~/Providers/ChatContext';
|
||||||
|
|
@ -16,6 +19,7 @@ const totalSizeLimit = maxSize * 1024 * 1024; // 25 MB
|
||||||
const supportedTypes = ['image/jpeg', 'image/jpg', 'image/png', 'image/webp'];
|
const supportedTypes = ['image/jpeg', 'image/jpg', 'image/png', 'image/webp'];
|
||||||
|
|
||||||
const useFileHandling = () => {
|
const useFileHandling = () => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
const { showToast } = useToastContext();
|
const { showToast } = useToastContext();
|
||||||
const [errors, setErrors] = useState<string[]>([]);
|
const [errors, setErrors] = useState<string[]>([]);
|
||||||
const setError = (error: string) => setErrors((prevErrors) => [...prevErrors, error]);
|
const setError = (error: string) => setErrors((prevErrors) => [...prevErrors, error]);
|
||||||
|
|
@ -116,6 +120,9 @@ const useFileHandling = () => {
|
||||||
filepath: data.filepath,
|
filepath: data.filepath,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const _files = queryClient.getQueryData<TFile[]>([QueryKeys.files]) ?? [];
|
||||||
|
queryClient.setQueryData([QueryKeys.files], [..._files, data]);
|
||||||
|
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
updateFileById(data.temp_file_id, {
|
updateFileById(data.temp_file_id, {
|
||||||
progress: 1,
|
progress: 1,
|
||||||
|
|
@ -126,6 +133,7 @@ const useFileHandling = () => {
|
||||||
height: data.height,
|
height: data.height,
|
||||||
width: data.width,
|
width: data.width,
|
||||||
filename: data.filename,
|
filename: data.filename,
|
||||||
|
source: data.source,
|
||||||
});
|
});
|
||||||
}, 300);
|
}, 300);
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
import { useCallback } from 'react';
|
import { useCallback } from 'react';
|
||||||
|
import { FileSources } from 'librechat-data-provider';
|
||||||
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
||||||
import {
|
import {
|
||||||
useSetRecoilState,
|
useSetRecoilState,
|
||||||
|
|
@ -122,10 +123,11 @@ const useNewConvo = (index = 0) => {
|
||||||
|
|
||||||
if (conversation.conversationId === 'new' && !modelsData) {
|
if (conversation.conversationId === 'new' && !modelsData) {
|
||||||
const filesToDelete = Array.from(files.values())
|
const filesToDelete = Array.from(files.values())
|
||||||
.filter((file) => file.filepath)
|
.filter((file) => file.filepath && file.source)
|
||||||
.map((file) => ({
|
.map((file) => ({
|
||||||
file_id: file.file_id,
|
file_id: file.file_id,
|
||||||
filepath: file.filepath as string,
|
filepath: file.filepath as string,
|
||||||
|
source: file.source as FileSources, // Ensure that the source is of type FileSources
|
||||||
}));
|
}));
|
||||||
|
|
||||||
setFiles(new Map());
|
setFiles(new Map());
|
||||||
|
|
|
||||||
|
|
@ -162,6 +162,10 @@ export default {
|
||||||
'Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model\'s likelihood to repeat the same line verbatim.',
|
'Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model\'s likelihood to repeat the same line verbatim.',
|
||||||
com_endpoint_openai_pres:
|
com_endpoint_openai_pres:
|
||||||
'Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model\'s likelihood to talk about new topics.',
|
'Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model\'s likelihood to talk about new topics.',
|
||||||
|
com_endpoint_openai_resend:
|
||||||
|
'Resend all previously attached images. Note: this can significantly increase token cost and you may experience errors with many image attachments.',
|
||||||
|
com_endpoint_openai_detail:
|
||||||
|
'The resolution for Vision requests. "Low" is cheaper and faster, "High" is more detailed and expensive, and "Auto" will automatically choose between the two based on the image resolution.',
|
||||||
com_endpoint_openai_custom_name_placeholder: 'Set a custom name for ChatGPT',
|
com_endpoint_openai_custom_name_placeholder: 'Set a custom name for ChatGPT',
|
||||||
com_endpoint_openai_prompt_prefix_placeholder:
|
com_endpoint_openai_prompt_prefix_placeholder:
|
||||||
'Set custom instructions to include in System Message. Default: none',
|
'Set custom instructions to include in System Message. Default: none',
|
||||||
|
|
@ -177,6 +181,7 @@ export default {
|
||||||
com_endpoint_frequency_penalty: 'Frequency Penalty',
|
com_endpoint_frequency_penalty: 'Frequency Penalty',
|
||||||
com_endpoint_presence_penalty: 'Presence Penalty',
|
com_endpoint_presence_penalty: 'Presence Penalty',
|
||||||
com_endpoint_plug_use_functions: 'Use Functions',
|
com_endpoint_plug_use_functions: 'Use Functions',
|
||||||
|
com_endpoint_plug_resend_images: 'Resend Images',
|
||||||
com_endpoint_plug_skip_completion: 'Skip Completion',
|
com_endpoint_plug_skip_completion: 'Skip Completion',
|
||||||
com_endpoint_disabled_with_tools: 'disabled with tools',
|
com_endpoint_disabled_with_tools: 'disabled with tools',
|
||||||
com_endpoint_disabled_with_tools_placeholder: 'Disabled with Tools Selected',
|
com_endpoint_disabled_with_tools_placeholder: 'Disabled with Tools Selected',
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ import {
|
||||||
useGetModelsQuery,
|
useGetModelsQuery,
|
||||||
useGetEndpointsQuery,
|
useGetEndpointsQuery,
|
||||||
} from 'librechat-data-provider/react-query';
|
} from 'librechat-data-provider/react-query';
|
||||||
import { TPreset } from 'librechat-data-provider';
|
import type { TPreset } from 'librechat-data-provider';
|
||||||
import { useNewConvo, useConfigOverride } from '~/hooks';
|
import { useNewConvo, useConfigOverride } from '~/hooks';
|
||||||
import ChatView from '~/components/Chat/ChatView';
|
import ChatView from '~/components/Chat/ChatView';
|
||||||
import useAuthRedirect from './useAuthRedirect';
|
import useAuthRedirect from './useAuthRedirect';
|
||||||
|
|
@ -15,6 +15,7 @@ import store from '~/store';
|
||||||
|
|
||||||
export default function ChatRoute() {
|
export default function ChatRoute() {
|
||||||
const index = 0;
|
const index = 0;
|
||||||
|
|
||||||
useConfigOverride();
|
useConfigOverride();
|
||||||
const { conversationId } = useParams();
|
const { conversationId } = useParams();
|
||||||
const { conversation } = store.useCreateConversationAtom(index);
|
const { conversation } = store.useCreateConversationAtom(index);
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,12 @@
|
||||||
/* eslint-disable react-hooks/exhaustive-deps */
|
/* eslint-disable react-hooks/exhaustive-deps */
|
||||||
import { useEffect, useState } from 'react';
|
import { useEffect, useState } from 'react';
|
||||||
import { useRecoilValue, useSetRecoilState } from 'recoil';
|
|
||||||
import { Outlet, useLocation } from 'react-router-dom';
|
import { Outlet, useLocation } from 'react-router-dom';
|
||||||
|
import { useRecoilValue, useSetRecoilState } from 'recoil';
|
||||||
import { useGetModelsQuery, useGetSearchEnabledQuery } from 'librechat-data-provider/react-query';
|
import { useGetModelsQuery, useGetSearchEnabledQuery } from 'librechat-data-provider/react-query';
|
||||||
import type { ContextType } from '~/common';
|
import type { ContextType } from '~/common';
|
||||||
import { useAuthContext, useServerStream, useConversation } from '~/hooks';
|
import { useAuthContext, useServerStream, useConversation } from '~/hooks';
|
||||||
import { Nav, MobileNav } from '~/components/Nav';
|
import { Nav, MobileNav } from '~/components/Nav';
|
||||||
|
import { useGetFiles } from '~/data-provider';
|
||||||
import store from '~/store';
|
import store from '~/store';
|
||||||
|
|
||||||
export default function Root() {
|
export default function Root() {
|
||||||
|
|
@ -24,6 +25,7 @@ export default function Root() {
|
||||||
const setIsSearchEnabled = useSetRecoilState(store.isSearchEnabled);
|
const setIsSearchEnabled = useSetRecoilState(store.isSearchEnabled);
|
||||||
const setModelsConfig = useSetRecoilState(store.modelsConfig);
|
const setModelsConfig = useSetRecoilState(store.modelsConfig);
|
||||||
|
|
||||||
|
useGetFiles({ enabled: isAuthenticated });
|
||||||
const searchEnabledQuery = useGetSearchEnabledQuery({ enabled: isAuthenticated });
|
const searchEnabledQuery = useGetSearchEnabledQuery({ enabled: isAuthenticated });
|
||||||
const modelsQuery = useGetModelsQuery({ enabled: isAuthenticated && modelsQueryEnabled });
|
const modelsQuery = useGetModelsQuery({ enabled: isAuthenticated && modelsQueryEnabled });
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ const messages = atom<TMessagesAtom>({
|
||||||
const messagesTree = selector({
|
const messagesTree = selector({
|
||||||
key: 'messagesTree',
|
key: 'messagesTree',
|
||||||
get: ({ get }) => {
|
get: ({ get }) => {
|
||||||
return buildTree(get(messages), false);
|
return buildTree({ messages: get(messages) });
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ const searchResultMessages = atom<TMessage[] | null>({
|
||||||
const searchResultMessagesTree = selector({
|
const searchResultMessagesTree = selector({
|
||||||
key: 'searchResultMessagesTree',
|
key: 'searchResultMessagesTree',
|
||||||
get: ({ get }) => {
|
get: ({ get }) => {
|
||||||
return buildTree(get(searchResultMessages), true);
|
return buildTree({ messages: get(searchResultMessages), groupAll: true });
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,19 @@
|
||||||
import { TMessage } from 'librechat-data-provider';
|
import { TFile, TMessage } from 'librechat-data-provider';
|
||||||
|
|
||||||
const even =
|
const even =
|
||||||
'w-full border-b border-black/10 dark:border-gray-900/50 text-gray-800 bg-white dark:text-gray-100 group dark:bg-gray-800 hover:bg-gray-100/25 hover:text-gray-700 dark:hover:bg-gray-900 dark:hover:text-gray-200';
|
'w-full border-b border-black/10 dark:border-gray-900/50 text-gray-800 bg-white dark:text-gray-100 group dark:bg-gray-800 hover:bg-gray-100/25 hover:text-gray-700 dark:hover:bg-gray-900 dark:hover:text-gray-200';
|
||||||
const odd =
|
const odd =
|
||||||
'w-full border-b border-black/10 bg-gray-50 dark:border-gray-900/50 text-gray-800 dark:text-gray-100 group bg-gray-100 dark:bg-gray-1000 hover:bg-gray-100/40 hover:text-gray-700 dark:hover:bg-[#3b3d49] dark:hover:text-gray-200';
|
'w-full border-b border-black/10 bg-gray-50 dark:border-gray-900/50 text-gray-800 dark:text-gray-100 group bg-gray-100 dark:bg-gray-1000 hover:bg-gray-100/40 hover:text-gray-700 dark:hover:bg-[#3b3d49] dark:hover:text-gray-200';
|
||||||
|
|
||||||
export default function buildTree(messages: TMessage[] | null, groupAll = false) {
|
export default function buildTree({
|
||||||
|
messages,
|
||||||
|
fileMap,
|
||||||
|
groupAll = false,
|
||||||
|
}: {
|
||||||
|
messages: TMessage[] | null;
|
||||||
|
fileMap?: Record<string, TFile>;
|
||||||
|
groupAll?: boolean;
|
||||||
|
}) {
|
||||||
if (messages === null) {
|
if (messages === null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
@ -21,6 +29,12 @@ export default function buildTree(messages: TMessage[] | null, groupAll = false)
|
||||||
messages.forEach((message) => {
|
messages.forEach((message) => {
|
||||||
messageMap[message.messageId] = { ...message, children: [] };
|
messageMap[message.messageId] = { ...message, children: [] };
|
||||||
|
|
||||||
|
if (message.files && fileMap) {
|
||||||
|
messageMap[message.messageId].files = message.files.map(
|
||||||
|
(file) => fileMap[file.file_id] ?? file,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const parentMessage = messageMap[message.parentMessageId ?? ''];
|
const parentMessage = messageMap[message.parentMessageId ?? ''];
|
||||||
if (parentMessage) {
|
if (parentMessage) {
|
||||||
parentMessage.children.push(messageMap[message.messageId]);
|
parentMessage.children.push(messageMap[message.messageId]);
|
||||||
|
|
|
||||||
12
client/src/utils/files.ts
Normal file
12
client/src/utils/files.ts
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
import type { TFile } from 'librechat-data-provider';
|
||||||
|
|
||||||
|
/** Maps Files by `file_id` for quick lookup */
|
||||||
|
export function mapFiles(files: TFile[]) {
|
||||||
|
const fileMap = {} as Record<string, TFile>;
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
fileMap[file.file_id] = file;
|
||||||
|
}
|
||||||
|
|
||||||
|
return fileMap;
|
||||||
|
}
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
export * from './json';
|
export * from './json';
|
||||||
|
export * from './files';
|
||||||
export * from './presets';
|
export * from './presets';
|
||||||
export * from './languages';
|
export * from './languages';
|
||||||
export * from './endpoints';
|
export * from './endpoints';
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,8 @@ export const getPresetIcon = (preset: TPreset, Icon) => {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type TEndpoints = Array<string | EModelEndpoint>;
|
||||||
|
|
||||||
export const getPresetTitle = (preset: TPreset) => {
|
export const getPresetTitle = (preset: TPreset) => {
|
||||||
const {
|
const {
|
||||||
endpoint,
|
endpoint,
|
||||||
|
|
@ -26,12 +28,16 @@ export const getPresetTitle = (preset: TPreset) => {
|
||||||
let modelInfo = model || '';
|
let modelInfo = model || '';
|
||||||
let label = '';
|
let label = '';
|
||||||
|
|
||||||
if (
|
const usesChatGPTLabel: TEndpoints = [
|
||||||
endpoint &&
|
EModelEndpoint.azureOpenAI,
|
||||||
[EModelEndpoint.azureOpenAI, EModelEndpoint.openAI, EModelEndpoint.custom].includes(endpoint)
|
EModelEndpoint.openAI,
|
||||||
) {
|
EModelEndpoint.custom,
|
||||||
|
];
|
||||||
|
const usesModelLabel: TEndpoints = [EModelEndpoint.google, EModelEndpoint.anthropic];
|
||||||
|
|
||||||
|
if (endpoint && usesChatGPTLabel.includes(endpoint)) {
|
||||||
label = chatGptLabel || '';
|
label = chatGptLabel || '';
|
||||||
} else if (endpoint && [EModelEndpoint.google, EModelEndpoint.anthropic].includes(endpoint)) {
|
} else if (endpoint && usesModelLabel.includes(endpoint)) {
|
||||||
label = modelLabel || '';
|
label = modelLabel || '';
|
||||||
} else if (endpoint === EModelEndpoint.bingAI) {
|
} else if (endpoint === EModelEndpoint.bingAI) {
|
||||||
modelInfo = jailbreak ? 'Sydney' : modelInfo;
|
modelInfo = jailbreak ? 'Sydney' : modelInfo;
|
||||||
|
|
|
||||||
|
|
@ -100,4 +100,20 @@ FIREBASE_APP_ID=1:your_app_id #appId
|
||||||
|
|
||||||
- Publish your updated rules
|
- Publish your updated rules
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
### Configure `fileStrategy` in `librechat.yaml`
|
||||||
|
|
||||||
|
Finally, to enable the app use Firebase, you must set the following in your `librechat.yaml` config file.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
version: 1.0.1
|
||||||
|
cache: true
|
||||||
|
fileStrategy: "firebase" # This is the field and value you need to add
|
||||||
|
endpoints:
|
||||||
|
custom:
|
||||||
|
- name: "Mistral"
|
||||||
|
# Rest of file omitted
|
||||||
|
```
|
||||||
|
|
||||||
|
For more information about the `librechat.yaml` config file, [see the guide here](../install/configuration/custom_config.md).
|
||||||
|
|
@ -8,6 +8,7 @@ weight: 2
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
* 🤖[Custom Endpoints](../install/configuration/custom_config.md)
|
||||||
* 🔌[Plugins](./plugins/index.md)
|
* 🔌[Plugins](./plugins/index.md)
|
||||||
* 🔌 [Introduction](./plugins/introduction.md)
|
* 🔌 [Introduction](./plugins/introduction.md)
|
||||||
* 🛠️ [Make Your Own](./plugins/make_your_own.md)
|
* 🛠️ [Make Your Own](./plugins/make_your_own.md)
|
||||||
|
|
|
||||||
|
|
@ -69,20 +69,26 @@ docker-compose up # no need to rebuild
|
||||||
|
|
||||||
**Note:** Fields not specifically mentioned as required are optional.
|
**Note:** Fields not specifically mentioned as required are optional.
|
||||||
|
|
||||||
### 1. Version
|
### Version
|
||||||
- **Key**: `version`
|
- **Key**: `version`
|
||||||
- **Type**: String
|
- **Type**: String
|
||||||
- **Description**: Specifies the version of the configuration file.
|
- **Description**: Specifies the version of the configuration file.
|
||||||
- **Example**: `version: 1.0.0`
|
- **Example**: `version: 1.0.1`
|
||||||
- **Required**
|
- **Required**
|
||||||
|
|
||||||
### 2. Cache Settings
|
### Cache Settings
|
||||||
- **Key**: `cache`
|
- **Key**: `cache`
|
||||||
- **Type**: Boolean
|
- **Type**: Boolean
|
||||||
- **Description**: Toggles caching on or off. Set to `true` to enable caching.
|
- **Description**: Toggles caching on or off. Set to `true` to enable caching.
|
||||||
- **Example**: `cache: true`
|
- **Example**: `cache: true`
|
||||||
|
|
||||||
### 3. Endpoints
|
### File Strategy
|
||||||
|
- **Key**: `fileStrategy`
|
||||||
|
- **Type**: String = "local" | ["firebase"](../../features/firebase.md)
|
||||||
|
- **Description**: Determines where to save user uploaded/generated files. Defaults to `"local"` if omitted.
|
||||||
|
- **Example**: `fileStrategy: "firebase"`
|
||||||
|
|
||||||
|
### Endpoints
|
||||||
- **Key**: `endpoints`
|
- **Key**: `endpoints`
|
||||||
- **Type**: Object
|
- **Type**: Object
|
||||||
- **Description**: Defines custom API endpoints for the application.
|
- **Description**: Defines custom API endpoints for the application.
|
||||||
|
|
@ -290,7 +296,7 @@ Custom endpoints share logic with the OpenAI endpoint, and thus have default par
|
||||||
## Example Config
|
## Example Config
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
version: 1.0.0
|
version: 1.0.1
|
||||||
cache: true
|
cache: true
|
||||||
endpoints:
|
endpoints:
|
||||||
custom:
|
custom:
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
# Configuration version (required)
|
# Configuration version (required)
|
||||||
version: 1.0.0
|
version: 1.0.1
|
||||||
|
|
||||||
# Cache settings: Set to true to enable caching
|
# Cache settings: Set to true to enable caching
|
||||||
cache: true
|
cache: true
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "librechat-data-provider",
|
"name": "librechat-data-provider",
|
||||||
"version": "0.3.5",
|
"version": "0.3.6",
|
||||||
"description": "data services for librechat apps",
|
"description": "data services for librechat apps",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"module": "dist/index.es.js",
|
"module": "dist/index.es.js",
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,8 @@
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
import { EModelEndpoint, eModelEndpointSchema } from './schemas';
|
import { EModelEndpoint, eModelEndpointSchema } from './schemas';
|
||||||
|
import { FileSources } from './types/files';
|
||||||
|
|
||||||
|
export const fileSourceSchema = z.nativeEnum(FileSources);
|
||||||
|
|
||||||
export const endpointSchema = z.object({
|
export const endpointSchema = z.object({
|
||||||
name: z.string().refine((value) => !eModelEndpointSchema.safeParse(value).success, {
|
name: z.string().refine((value) => !eModelEndpointSchema.safeParse(value).success, {
|
||||||
|
|
@ -25,6 +28,7 @@ export const endpointSchema = z.object({
|
||||||
export const configSchema = z.object({
|
export const configSchema = z.object({
|
||||||
version: z.string(),
|
version: z.string(),
|
||||||
cache: z.boolean(),
|
cache: z.boolean(),
|
||||||
|
fileStrategy: fileSourceSchema.optional(),
|
||||||
endpoints: z
|
endpoints: z
|
||||||
.object({
|
.object({
|
||||||
custom: z.array(endpointSchema.partial()),
|
custom: z.array(endpointSchema.partial()),
|
||||||
|
|
@ -184,3 +188,27 @@ export enum AuthKeys {
|
||||||
*/
|
*/
|
||||||
GOOGLE_API_KEY = 'GOOGLE_API_KEY',
|
GOOGLE_API_KEY = 'GOOGLE_API_KEY',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enum for Image Detail Cost.
|
||||||
|
*
|
||||||
|
* **Low Res Fixed Cost:** `85`
|
||||||
|
*
|
||||||
|
* **High Res Calculation:**
|
||||||
|
*
|
||||||
|
* Number of `512px` Tiles * `170` + `85` (Additional Cost)
|
||||||
|
*/
|
||||||
|
export enum ImageDetailCost {
|
||||||
|
/**
|
||||||
|
* Low resolution is a fixed value.
|
||||||
|
*/
|
||||||
|
LOW = 85,
|
||||||
|
/**
|
||||||
|
* High resolution Cost Per Tile
|
||||||
|
*/
|
||||||
|
HIGH = 170,
|
||||||
|
/**
|
||||||
|
* Additional Cost added to High Resolution Total Cost
|
||||||
|
*/
|
||||||
|
ADDITIONAL = 85,
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -193,7 +193,11 @@ export const listAssistants = (
|
||||||
|
|
||||||
/* Files */
|
/* Files */
|
||||||
|
|
||||||
export const uploadImage = (data: FormData): Promise<f.FileUploadResponse> => {
|
export const getFiles = (): Promise<f.TFile[]> => {
|
||||||
|
return request.get(endpoints.files());
|
||||||
|
};
|
||||||
|
|
||||||
|
export const uploadImage = (data: FormData): Promise<f.TFileUpload> => {
|
||||||
return request.postMultiPart(endpoints.images(), data);
|
return request.postMultiPart(endpoints.images(), data);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,7 @@ export enum QueryKeys {
|
||||||
assistants = 'assistants',
|
assistants = 'assistants',
|
||||||
assistant = 'assistant',
|
assistant = 'assistant',
|
||||||
endpointsConfigOverride = 'endpointsConfigOverride',
|
endpointsConfigOverride = 'endpointsConfigOverride',
|
||||||
|
files = 'files',
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum MutationKeys {
|
export enum MutationKeys {
|
||||||
|
|
|
||||||
|
|
@ -52,6 +52,26 @@ export const eModelEndpointSchema = z.nativeEnum(EModelEndpoint);
|
||||||
|
|
||||||
export const extendedModelEndpointSchema = z.union([eModelEndpointSchema, z.string()]);
|
export const extendedModelEndpointSchema = z.union([eModelEndpointSchema, z.string()]);
|
||||||
|
|
||||||
|
export enum ImageDetail {
|
||||||
|
low = 'low',
|
||||||
|
auto = 'auto',
|
||||||
|
high = 'high',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const imageDetailNumeric = {
|
||||||
|
[ImageDetail.low]: 0,
|
||||||
|
[ImageDetail.auto]: 1,
|
||||||
|
[ImageDetail.high]: 2,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const imageDetailValue = {
|
||||||
|
0: ImageDetail.low,
|
||||||
|
1: ImageDetail.auto,
|
||||||
|
2: ImageDetail.high,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const eImageDetailSchema = z.nativeEnum(ImageDetail);
|
||||||
|
|
||||||
export const tPluginAuthConfigSchema = z.object({
|
export const tPluginAuthConfigSchema = z.object({
|
||||||
authField: z.string(),
|
authField: z.string(),
|
||||||
label: z.string(),
|
label: z.string(),
|
||||||
|
|
@ -140,8 +160,8 @@ export type TMessage = z.input<typeof tMessageSchema> & {
|
||||||
plugin?: TResPlugin | null;
|
plugin?: TResPlugin | null;
|
||||||
plugins?: TResPlugin[];
|
plugins?: TResPlugin[];
|
||||||
files?: {
|
files?: {
|
||||||
type: string;
|
|
||||||
file_id: string;
|
file_id: string;
|
||||||
|
type?: string;
|
||||||
filename?: string;
|
filename?: string;
|
||||||
preview?: string;
|
preview?: string;
|
||||||
filepath?: string;
|
filepath?: string;
|
||||||
|
|
@ -184,6 +204,9 @@ export const tConversationSchema = z.object({
|
||||||
toneStyle: z.string().nullable().optional(),
|
toneStyle: z.string().nullable().optional(),
|
||||||
maxOutputTokens: z.number().optional(),
|
maxOutputTokens: z.number().optional(),
|
||||||
agentOptions: tAgentOptionsSchema.nullable().optional(),
|
agentOptions: tAgentOptionsSchema.nullable().optional(),
|
||||||
|
/* vision */
|
||||||
|
resendImages: z.boolean().optional(),
|
||||||
|
imageDetail: eImageDetailSchema.optional(),
|
||||||
/* assistant */
|
/* assistant */
|
||||||
assistant_id: z.string().optional(),
|
assistant_id: z.string().optional(),
|
||||||
thread_id: z.string().optional(),
|
thread_id: z.string().optional(),
|
||||||
|
|
@ -234,6 +257,8 @@ export const openAISchema = tConversationSchema
|
||||||
top_p: true,
|
top_p: true,
|
||||||
presence_penalty: true,
|
presence_penalty: true,
|
||||||
frequency_penalty: true,
|
frequency_penalty: true,
|
||||||
|
resendImages: true,
|
||||||
|
imageDetail: true,
|
||||||
})
|
})
|
||||||
.transform((obj) => ({
|
.transform((obj) => ({
|
||||||
...obj,
|
...obj,
|
||||||
|
|
@ -244,6 +269,8 @@ export const openAISchema = tConversationSchema
|
||||||
top_p: obj.top_p ?? 1,
|
top_p: obj.top_p ?? 1,
|
||||||
presence_penalty: obj.presence_penalty ?? 0,
|
presence_penalty: obj.presence_penalty ?? 0,
|
||||||
frequency_penalty: obj.frequency_penalty ?? 0,
|
frequency_penalty: obj.frequency_penalty ?? 0,
|
||||||
|
resendImages: obj.resendImages ?? false,
|
||||||
|
imageDetail: obj.imageDetail ?? ImageDetail.auto,
|
||||||
}))
|
}))
|
||||||
.catch(() => ({
|
.catch(() => ({
|
||||||
model: 'gpt-3.5-turbo',
|
model: 'gpt-3.5-turbo',
|
||||||
|
|
@ -253,6 +280,8 @@ export const openAISchema = tConversationSchema
|
||||||
top_p: 1,
|
top_p: 1,
|
||||||
presence_penalty: 0,
|
presence_penalty: 0,
|
||||||
frequency_penalty: 0,
|
frequency_penalty: 0,
|
||||||
|
resendImages: false,
|
||||||
|
imageDetail: ImageDetail.auto,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
export const googleSchema = tConversationSchema
|
export const googleSchema = tConversationSchema
|
||||||
|
|
@ -455,6 +484,8 @@ export const compactOpenAISchema = tConversationSchema
|
||||||
top_p: true,
|
top_p: true,
|
||||||
presence_penalty: true,
|
presence_penalty: true,
|
||||||
frequency_penalty: true,
|
frequency_penalty: true,
|
||||||
|
resendImages: true,
|
||||||
|
imageDetail: true,
|
||||||
})
|
})
|
||||||
.transform((obj: Partial<TConversation>) => {
|
.transform((obj: Partial<TConversation>) => {
|
||||||
const newObj: Partial<TConversation> = { ...obj };
|
const newObj: Partial<TConversation> = { ...obj };
|
||||||
|
|
@ -473,6 +504,12 @@ export const compactOpenAISchema = tConversationSchema
|
||||||
if (newObj.frequency_penalty === 0) {
|
if (newObj.frequency_penalty === 0) {
|
||||||
delete newObj.frequency_penalty;
|
delete newObj.frequency_penalty;
|
||||||
}
|
}
|
||||||
|
if (newObj.resendImages !== true) {
|
||||||
|
delete newObj.resendImages;
|
||||||
|
}
|
||||||
|
if (newObj.imageDetail === ImageDetail.auto) {
|
||||||
|
delete newObj.imageDetail;
|
||||||
|
}
|
||||||
|
|
||||||
return removeNullishValues(newObj);
|
return removeNullishValues(newObj);
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,25 @@
|
||||||
export type FileUploadResponse = {
|
export enum FileSources {
|
||||||
|
local = 'local',
|
||||||
|
firebase = 'firebase',
|
||||||
|
openai = 'openai',
|
||||||
|
s3 = 's3',
|
||||||
|
}
|
||||||
|
|
||||||
|
export type TFile = {
|
||||||
message: string;
|
message: string;
|
||||||
file_id: string;
|
file_id: string;
|
||||||
temp_file_id: string;
|
|
||||||
filepath: string;
|
filepath: string;
|
||||||
filename: string;
|
filename: string;
|
||||||
type: string;
|
type: string;
|
||||||
size: number;
|
size: number;
|
||||||
height: number;
|
temp_file_id?: string;
|
||||||
width: number;
|
source?: FileSources;
|
||||||
|
height?: number;
|
||||||
|
width?: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type TFileUpload = TFile & {
|
||||||
|
temp_file_id: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type AvatarUploadResponse = {
|
export type AvatarUploadResponse = {
|
||||||
|
|
@ -20,7 +32,7 @@ export type FileUploadBody = {
|
||||||
};
|
};
|
||||||
|
|
||||||
export type UploadMutationOptions = {
|
export type UploadMutationOptions = {
|
||||||
onSuccess?: (data: FileUploadResponse, variables: FileUploadBody, context?: unknown) => void;
|
onSuccess?: (data: TFileUpload, variables: FileUploadBody, context?: unknown) => void;
|
||||||
onMutate?: (variables: FileUploadBody) => void | Promise<unknown>;
|
onMutate?: (variables: FileUploadBody) => void | Promise<unknown>;
|
||||||
onError?: (error: unknown, variables: FileUploadBody, context?: unknown) => void;
|
onError?: (error: unknown, variables: FileUploadBody, context?: unknown) => void;
|
||||||
};
|
};
|
||||||
|
|
@ -39,6 +51,7 @@ export type DeleteFilesResponse = {
|
||||||
export type BatchFile = {
|
export type BatchFile = {
|
||||||
file_id: string;
|
file_id: string;
|
||||||
filepath: string;
|
filepath: string;
|
||||||
|
source: FileSources;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type DeleteFilesBody = {
|
export type DeleteFilesBody = {
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue