Merge branch 'main' into feature/entra-id-azure-integration

This commit is contained in:
victorbjor 2025-10-07 08:34:44 +02:00 committed by GitHub
commit be58d8e4f0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
244 changed files with 6722 additions and 3399 deletions

View file

@ -163,10 +163,10 @@ GOOGLE_KEY=user_provided
# GOOGLE_AUTH_HEADER=true # GOOGLE_AUTH_HEADER=true
# Gemini API (AI Studio) # Gemini API (AI Studio)
# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite-preview-06-17,gemini-2.0-flash,gemini-2.0-flash-lite # GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash,gemini-2.0-flash-lite
# Vertex AI # Vertex AI
# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite-preview-06-17,gemini-2.0-flash-001,gemini-2.0-flash-lite-001 # GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite,gemini-2.0-flash-001,gemini-2.0-flash-lite-001
# GOOGLE_TITLE_MODEL=gemini-2.0-flash-lite-001 # GOOGLE_TITLE_MODEL=gemini-2.0-flash-lite-001

View file

@ -0,0 +1,78 @@
name: Cache Integration Tests
on:
pull_request:
branches:
- main
- dev
- release/*
paths:
- 'packages/api/src/cache/**'
- 'redis-config/**'
- '.github/workflows/cache-integration-tests.yml'
jobs:
cache_integration_tests:
name: Run Cache Integration Tests
timeout-minutes: 30
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Use Node.js 20.x
uses: actions/setup-node@v4
with:
node-version: 20
cache: 'npm'
- name: Install Redis tools
run: |
sudo apt-get update
sudo apt-get install -y redis-server redis-tools
- name: Start Single Redis Instance
run: |
redis-server --daemonize yes --port 6379
sleep 2
# Verify single Redis is running
redis-cli -p 6379 ping || exit 1
- name: Start Redis Cluster
working-directory: redis-config
run: |
chmod +x start-cluster.sh stop-cluster.sh
./start-cluster.sh
sleep 10
# Verify cluster is running
redis-cli -p 7001 cluster info || exit 1
redis-cli -p 7002 cluster info || exit 1
redis-cli -p 7003 cluster info || exit 1
- name: Install dependencies
run: npm ci
- name: Build packages
run: |
npm run build:data-provider
npm run build:data-schemas
npm run build:api
- name: Run cache integration tests
working-directory: packages/api
env:
NODE_ENV: test
USE_REDIS: true
REDIS_URI: redis://127.0.0.1:6379
REDIS_CLUSTER_URI: redis://127.0.0.1:7001,redis://127.0.0.1:7002,redis://127.0.0.1:7003
run: npm run test:cache:integration
- name: Stop Redis Cluster
if: always()
working-directory: redis-config
run: ./stop-cluster.sh || true
- name: Stop Single Redis Instance
if: always()
run: redis-cli -p 6379 shutdown || true

View file

@ -1,4 +1,4 @@
# v0.8.0-rc4 # v0.8.0
# Base node image # Base node image
FROM node:20-alpine AS node FROM node:20-alpine AS node

View file

@ -1,5 +1,5 @@
# Dockerfile.multi # Dockerfile.multi
# v0.8.0-rc4 # v0.8.0
# Base for all builds # Base for all builds
FROM node:20-alpine AS base-min FROM node:20-alpine AS base-min

View file

@ -1,4 +1,5 @@
const Anthropic = require('@anthropic-ai/sdk'); const Anthropic = require('@anthropic-ai/sdk');
const { logger } = require('@librechat/data-schemas');
const { HttpsProxyAgent } = require('https-proxy-agent'); const { HttpsProxyAgent } = require('https-proxy-agent');
const { const {
Constants, Constants,
@ -9,7 +10,7 @@ const {
getResponseSender, getResponseSender,
validateVisionModel, validateVisionModel,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { SplitStreamHandler: _Handler } = require('@librechat/agents'); const { sleep, SplitStreamHandler: _Handler } = require('@librechat/agents');
const { const {
Tokenizer, Tokenizer,
createFetch, createFetch,
@ -31,9 +32,7 @@ const {
} = require('./prompts'); } = require('./prompts');
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens'); const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
const { encodeAndFormat } = require('~/server/services/Files/images/encode'); const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const { sleep } = require('~/server/utils');
const BaseClient = require('./BaseClient'); const BaseClient = require('./BaseClient');
const { logger } = require('~/config');
const HUMAN_PROMPT = '\n\nHuman:'; const HUMAN_PROMPT = '\n\nHuman:';
const AI_PROMPT = '\n\nAssistant:'; const AI_PROMPT = '\n\nAssistant:';

View file

@ -1,18 +1,24 @@
const crypto = require('crypto'); const crypto = require('crypto');
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const { logger } = require('@librechat/data-schemas'); const { logger } = require('@librechat/data-schemas');
const { getBalanceConfig } = require('@librechat/api');
const { const {
supportsBalanceCheck, getBalanceConfig,
isAgentsEndpoint, encodeAndFormatAudios,
isParamEndpoint, encodeAndFormatVideos,
EModelEndpoint, encodeAndFormatDocuments,
} = require('@librechat/api');
const {
Constants,
ErrorTypes,
ContentTypes, ContentTypes,
excludedKeys, excludedKeys,
ErrorTypes, EModelEndpoint,
Constants, isParamEndpoint,
isAgentsEndpoint,
supportsBalanceCheck,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require('~/models'); const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require('~/models');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { checkBalance } = require('~/models/balanceMethods'); const { checkBalance } = require('~/models/balanceMethods');
const { truncateToolCallOutputs } = require('./prompts'); const { truncateToolCallOutputs } = require('./prompts');
const { getFiles } = require('~/models/File'); const { getFiles } = require('~/models/File');
@ -1198,8 +1204,99 @@ class BaseClient {
return await this.sendCompletion(payload, opts); return await this.sendCompletion(payload, opts);
} }
async addDocuments(message, attachments) {
const documentResult = await encodeAndFormatDocuments(
this.options.req,
attachments,
{
provider: this.options.agent?.provider,
useResponsesApi: this.options.agent?.model_parameters?.useResponsesApi,
},
getStrategyFunctions,
);
message.documents =
documentResult.documents && documentResult.documents.length
? documentResult.documents
: undefined;
return documentResult.files;
}
async addVideos(message, attachments) {
const videoResult = await encodeAndFormatVideos(
this.options.req,
attachments,
this.options.agent.provider,
getStrategyFunctions,
);
message.videos =
videoResult.videos && videoResult.videos.length ? videoResult.videos : undefined;
return videoResult.files;
}
async addAudios(message, attachments) {
const audioResult = await encodeAndFormatAudios(
this.options.req,
attachments,
this.options.agent.provider,
getStrategyFunctions,
);
message.audios =
audioResult.audios && audioResult.audios.length ? audioResult.audios : undefined;
return audioResult.files;
}
async processAttachments(message, attachments) {
const categorizedAttachments = {
images: [],
documents: [],
videos: [],
audios: [],
};
for (const file of attachments) {
if (file.type.startsWith('image/')) {
categorizedAttachments.images.push(file);
} else if (file.type === 'application/pdf') {
categorizedAttachments.documents.push(file);
} else if (file.type.startsWith('video/')) {
categorizedAttachments.videos.push(file);
} else if (file.type.startsWith('audio/')) {
categorizedAttachments.audios.push(file);
}
}
const [imageFiles, documentFiles, videoFiles, audioFiles] = await Promise.all([
categorizedAttachments.images.length > 0
? this.addImageURLs(message, categorizedAttachments.images)
: Promise.resolve([]),
categorizedAttachments.documents.length > 0
? this.addDocuments(message, categorizedAttachments.documents)
: Promise.resolve([]),
categorizedAttachments.videos.length > 0
? this.addVideos(message, categorizedAttachments.videos)
: Promise.resolve([]),
categorizedAttachments.audios.length > 0
? this.addAudios(message, categorizedAttachments.audios)
: Promise.resolve([]),
]);
const allFiles = [...imageFiles, ...documentFiles, ...videoFiles, ...audioFiles];
const seenFileIds = new Set();
const uniqueFiles = [];
for (const file of allFiles) {
if (file.file_id && !seenFileIds.has(file.file_id)) {
seenFileIds.add(file.file_id);
uniqueFiles.push(file);
} else if (!file.file_id) {
uniqueFiles.push(file);
}
}
return uniqueFiles;
}
/** /**
*
* @param {TMessage[]} _messages * @param {TMessage[]} _messages
* @returns {Promise<TMessage[]>} * @returns {Promise<TMessage[]>}
*/ */
@ -1248,7 +1345,7 @@ class BaseClient {
{}, {},
); );
await this.addImageURLs(message, files, this.visionMode); await this.processAttachments(message, files);
this.message_file_map[message.messageId] = files; this.message_file_map[message.messageId] = files;
return message; return message;

View file

@ -1,4 +1,6 @@
const { google } = require('googleapis'); const { google } = require('googleapis');
const { sleep } = require('@librechat/agents');
const { logger } = require('@librechat/data-schemas');
const { getModelMaxTokens } = require('@librechat/api'); const { getModelMaxTokens } = require('@librechat/api');
const { concat } = require('@langchain/core/utils/stream'); const { concat } = require('@langchain/core/utils/stream');
const { ChatVertexAI } = require('@langchain/google-vertexai'); const { ChatVertexAI } = require('@langchain/google-vertexai');
@ -22,8 +24,6 @@ const {
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { encodeAndFormat } = require('~/server/services/Files/images'); const { encodeAndFormat } = require('~/server/services/Files/images');
const { spendTokens } = require('~/models/spendTokens'); const { spendTokens } = require('~/models/spendTokens');
const { sleep } = require('~/server/utils');
const { logger } = require('~/config');
const { const {
formatMessage, formatMessage,
createContextHandlers, createContextHandlers,

View file

@ -1,6 +1,6 @@
const { OllamaClient } = require('./OllamaClient'); const { logger } = require('@librechat/data-schemas');
const { HttpsProxyAgent } = require('https-proxy-agent'); const { HttpsProxyAgent } = require('https-proxy-agent');
const { SplitStreamHandler, CustomOpenAIClient: OpenAI } = require('@librechat/agents'); const { sleep, SplitStreamHandler, CustomOpenAIClient: OpenAI } = require('@librechat/agents');
const { const {
isEnabled, isEnabled,
Tokenizer, Tokenizer,
@ -36,16 +36,15 @@ const {
createContextHandlers, createContextHandlers,
} = require('./prompts'); } = require('./prompts');
const { encodeAndFormat } = require('~/server/services/Files/images/encode'); const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const { addSpaceIfNeeded, sleep } = require('~/server/utils');
const { spendTokens } = require('~/models/spendTokens'); const { spendTokens } = require('~/models/spendTokens');
const { addSpaceIfNeeded } = require('~/server/utils');
const { handleOpenAIErrors } = require('./tools/util'); const { handleOpenAIErrors } = require('./tools/util');
const { OllamaClient } = require('./OllamaClient');
const { summaryBuffer } = require('./memory'); const { summaryBuffer } = require('./memory');
const { runTitleChain } = require('./chains'); const { runTitleChain } = require('./chains');
const { extractBaseURL } = require('~/utils'); const { extractBaseURL } = require('~/utils');
const { tokenSplit } = require('./document'); const { tokenSplit } = require('./document');
const BaseClient = require('./BaseClient'); const BaseClient = require('./BaseClient');
const { createLLM } = require('./llm');
const { logger } = require('~/config');
class OpenAIClient extends BaseClient { class OpenAIClient extends BaseClient {
constructor(apiKey, options = {}) { constructor(apiKey, options = {}) {
@ -616,65 +615,8 @@ class OpenAIClient extends BaseClient {
return (reply ?? '').trim(); return (reply ?? '').trim();
} }
async initializeLLM({ initializeLLM() {
model = openAISettings.model.default, throw new Error('Deprecated');
modelName,
temperature = 0.2,
max_tokens,
streaming,
}) {
const modelOptions = {
modelName: modelName ?? model,
temperature,
user: this.user,
};
if (max_tokens) {
modelOptions.max_tokens = max_tokens;
}
const configOptions = {};
if (this.langchainProxy) {
configOptions.basePath = this.langchainProxy;
}
if (this.useOpenRouter) {
configOptions.basePath = 'https://openrouter.ai/api/v1';
configOptions.baseOptions = {
headers: {
'HTTP-Referer': 'https://librechat.ai',
'X-Title': 'LibreChat',
},
};
}
const { headers } = this.options;
if (headers && typeof headers === 'object' && !Array.isArray(headers)) {
configOptions.baseOptions = {
headers: resolveHeaders({
headers: {
...headers,
...configOptions?.baseOptions?.headers,
},
}),
};
}
if (this.options.proxy) {
configOptions.httpAgent = new HttpsProxyAgent(this.options.proxy);
configOptions.httpsAgent = new HttpsProxyAgent(this.options.proxy);
}
const llm = createLLM({
modelOptions,
configOptions,
openAIApiKey: this.apiKey,
azure: this.azure,
streaming,
});
return llm;
} }
/** /**

View file

@ -1,5 +1,5 @@
const { Readable } = require('stream'); const { Readable } = require('stream');
const { logger } = require('~/config'); const { logger } = require('@librechat/data-schemas');
class TextStream extends Readable { class TextStream extends Readable {
constructor(text, options = {}) { constructor(text, options = {}) {

View file

@ -1,5 +1,5 @@
const { logger } = require('@librechat/data-schemas');
const { ZeroShotAgentOutputParser } = require('langchain/agents'); const { ZeroShotAgentOutputParser } = require('langchain/agents');
const { logger } = require('~/config');
class CustomOutputParser extends ZeroShotAgentOutputParser { class CustomOutputParser extends ZeroShotAgentOutputParser {
constructor(fields) { constructor(fields) {

View file

@ -1,7 +1,7 @@
const { z } = require('zod'); const { z } = require('zod');
const { logger } = require('@librechat/data-schemas');
const { langPrompt, createTitlePrompt, escapeBraces, getSnippet } = require('../prompts'); const { langPrompt, createTitlePrompt, escapeBraces, getSnippet } = require('../prompts');
const { createStructuredOutputChainFromZod } = require('langchain/chains/openai_functions'); const { createStructuredOutputChainFromZod } = require('langchain/chains/openai_functions');
const { logger } = require('~/config');
const langSchema = z.object({ const langSchema = z.object({
language: z.string().describe('The language of the input text (full noun, no abbreviations).'), language: z.string().describe('The language of the input text (full noun, no abbreviations).'),

View file

@ -1,81 +0,0 @@
const { ChatOpenAI } = require('@langchain/openai');
const { isEnabled, sanitizeModelName, constructAzureURL } = require('@librechat/api');
/**
* Creates a new instance of a language model (LLM) for chat interactions.
*
* @param {Object} options - The options for creating the LLM.
* @param {ModelOptions} options.modelOptions - The options specific to the model, including modelName, temperature, presence_penalty, frequency_penalty, and other model-related settings.
* @param {ConfigOptions} options.configOptions - Configuration options for the API requests, including proxy settings and custom headers.
* @param {Callbacks} [options.callbacks] - Callback functions for managing the lifecycle of the LLM, including token buffers, context, and initial message count.
* @param {boolean} [options.streaming=false] - Determines if the LLM should operate in streaming mode.
* @param {string} options.openAIApiKey - The API key for OpenAI, used for authentication.
* @param {AzureOptions} [options.azure={}] - Optional Azure-specific configurations. If provided, Azure configurations take precedence over OpenAI configurations.
*
* @returns {ChatOpenAI} An instance of the ChatOpenAI class, configured with the provided options.
*
* @example
* const llm = createLLM({
* modelOptions: { modelName: 'gpt-4o-mini', temperature: 0.2 },
* configOptions: { basePath: 'https://example.api/path' },
* callbacks: { onMessage: handleMessage },
* openAIApiKey: 'your-api-key'
* });
*/
function createLLM({
modelOptions,
configOptions,
callbacks,
streaming = false,
openAIApiKey,
azure = {},
}) {
let credentials = { openAIApiKey };
let configuration = {
apiKey: openAIApiKey,
...(configOptions.basePath && { baseURL: configOptions.basePath }),
};
/** @type {AzureOptions} */
let azureOptions = {};
if (azure) {
const useModelName = isEnabled(process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME);
credentials = {};
configuration = {};
azureOptions = azure;
azureOptions.azureOpenAIApiDeploymentName = useModelName
? sanitizeModelName(modelOptions.modelName)
: azureOptions.azureOpenAIApiDeploymentName;
}
if (azure && process.env.AZURE_OPENAI_DEFAULT_MODEL) {
modelOptions.modelName = process.env.AZURE_OPENAI_DEFAULT_MODEL;
}
if (azure && configOptions.basePath) {
const azureURL = constructAzureURL({
baseURL: configOptions.basePath,
azureOptions,
});
azureOptions.azureOpenAIBasePath = azureURL.split(
`/${azureOptions.azureOpenAIApiDeploymentName}`,
)[0];
}
return new ChatOpenAI(
{
streaming,
credentials,
configuration,
...azureOptions,
...modelOptions,
...credentials,
callbacks,
},
configOptions,
);
}
module.exports = createLLM;

View file

@ -1,7 +1,5 @@
const createLLM = require('./createLLM');
const createCoherePayload = require('./createCoherePayload'); const createCoherePayload = require('./createCoherePayload');
module.exports = { module.exports = {
createLLM,
createCoherePayload, createCoherePayload,
}; };

View file

@ -1,31 +0,0 @@
require('dotenv').config();
const { ChatOpenAI } = require('@langchain/openai');
const { getBufferString, ConversationSummaryBufferMemory } = require('langchain/memory');
const chatPromptMemory = new ConversationSummaryBufferMemory({
llm: new ChatOpenAI({ modelName: 'gpt-4o-mini', temperature: 0 }),
maxTokenLimit: 10,
returnMessages: true,
});
(async () => {
await chatPromptMemory.saveContext({ input: 'hi my name\'s Danny' }, { output: 'whats up' });
await chatPromptMemory.saveContext({ input: 'not much you' }, { output: 'not much' });
await chatPromptMemory.saveContext(
{ input: 'are you excited for the olympics?' },
{ output: 'not really' },
);
// We can also utilize the predict_new_summary method directly.
const messages = await chatPromptMemory.chatHistory.getMessages();
console.log('MESSAGES\n\n');
console.log(JSON.stringify(messages));
const previous_summary = '';
const predictSummary = await chatPromptMemory.predictNewSummary(messages, previous_summary);
console.log('SUMMARY\n\n');
console.log(JSON.stringify(getBufferString([{ role: 'system', content: predictSummary }])));
// const { history } = await chatPromptMemory.loadMemoryVariables({});
// console.log('HISTORY\n\n');
// console.log(JSON.stringify(history));
})();

View file

@ -1,7 +1,7 @@
const { logger } = require('@librechat/data-schemas');
const { ConversationSummaryBufferMemory, ChatMessageHistory } = require('langchain/memory'); const { ConversationSummaryBufferMemory, ChatMessageHistory } = require('langchain/memory');
const { formatLangChainMessages, SUMMARY_PROMPT } = require('../prompts'); const { formatLangChainMessages, SUMMARY_PROMPT } = require('../prompts');
const { predictNewSummary } = require('../chains'); const { predictNewSummary } = require('../chains');
const { logger } = require('~/config');
const createSummaryBufferMemory = ({ llm, prompt, messages, ...rest }) => { const createSummaryBufferMemory = ({ llm, prompt, messages, ...rest }) => {
const chatHistory = new ChatMessageHistory(messages); const chatHistory = new ChatMessageHistory(messages);

View file

@ -1,4 +1,4 @@
const { logger } = require('~/config'); const { logger } = require('@librechat/data-schemas');
/** /**
* The `addImages` function corrects any erroneous image URLs in the `responseMessage.text` * The `addImages` function corrects any erroneous image URLs in the `responseMessage.text`

View file

@ -1,7 +1,7 @@
const { z } = require('zod'); const { z } = require('zod');
const { Tool } = require('@langchain/core/tools'); const { Tool } = require('@langchain/core/tools');
const { logger } = require('@librechat/data-schemas');
const { SearchClient, AzureKeyCredential } = require('@azure/search-documents'); const { SearchClient, AzureKeyCredential } = require('@azure/search-documents');
const { logger } = require('~/config');
class AzureAISearch extends Tool { class AzureAISearch extends Tool {
// Constants for default values // Constants for default values
@ -18,7 +18,7 @@ class AzureAISearch extends Tool {
super(); super();
this.name = 'azure-ai-search'; this.name = 'azure-ai-search';
this.description = this.description =
'Use the \'azure-ai-search\' tool to retrieve search results relevant to your input'; "Use the 'azure-ai-search' tool to retrieve search results relevant to your input";
/* Used to initialize the Tool without necessary variables. */ /* Used to initialize the Tool without necessary variables. */
this.override = fields.override ?? false; this.override = fields.override ?? false;

View file

@ -1,9 +1,8 @@
const { z } = require('zod'); const { z } = require('zod');
const path = require('path'); const path = require('path');
const OpenAI = require('openai'); const OpenAI = require('openai');
const fetch = require('node-fetch');
const { v4: uuidv4 } = require('uuid'); const { v4: uuidv4 } = require('uuid');
const { ProxyAgent } = require('undici'); const { ProxyAgent, fetch } = require('undici');
const { Tool } = require('@langchain/core/tools'); const { Tool } = require('@langchain/core/tools');
const { logger } = require('@librechat/data-schemas'); const { logger } = require('@librechat/data-schemas');
const { getImageBasename } = require('@librechat/api'); const { getImageBasename } = require('@librechat/api');

View file

@ -3,12 +3,12 @@ const axios = require('axios');
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const { v4: uuidv4 } = require('uuid'); const { v4: uuidv4 } = require('uuid');
const { Tool } = require('@langchain/core/tools'); const { Tool } = require('@langchain/core/tools');
const { logger } = require('@librechat/data-schemas');
const { HttpsProxyAgent } = require('https-proxy-agent'); const { HttpsProxyAgent } = require('https-proxy-agent');
const { FileContext, ContentTypes } = require('librechat-data-provider'); const { FileContext, ContentTypes } = require('librechat-data-provider');
const { logger } = require('~/config');
const displayMessage = const displayMessage =
'Flux displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.'; "Flux displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
/** /**
* FluxAPI - A tool for generating high-quality images from text prompts using the Flux API. * FluxAPI - A tool for generating high-quality images from text prompts using the Flux API.

View file

@ -6,9 +6,9 @@ const axios = require('axios');
const sharp = require('sharp'); const sharp = require('sharp');
const { v4: uuidv4 } = require('uuid'); const { v4: uuidv4 } = require('uuid');
const { Tool } = require('@langchain/core/tools'); const { Tool } = require('@langchain/core/tools');
const { logger } = require('@librechat/data-schemas');
const { FileContext, ContentTypes } = require('librechat-data-provider'); const { FileContext, ContentTypes } = require('librechat-data-provider');
const paths = require('~/config/paths'); const paths = require('~/config/paths');
const { logger } = require('~/config');
const displayMessage = const displayMessage =
"Stable Diffusion displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user."; "Stable Diffusion displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";

View file

@ -1,7 +1,7 @@
const { z } = require('zod'); const { z } = require('zod');
const { Tool } = require('@langchain/core/tools'); const { Tool } = require('@langchain/core/tools');
const { logger } = require('@librechat/data-schemas');
const { getEnvironmentVariable } = require('@langchain/core/utils/env'); const { getEnvironmentVariable } = require('@langchain/core/utils/env');
const { logger } = require('~/config');
/** /**
* Tool for the Traversaal AI search API, Ares. * Tool for the Traversaal AI search API, Ares.
@ -21,7 +21,7 @@ class TraversaalSearch extends Tool {
query: z query: z
.string() .string()
.describe( .describe(
'A properly written sentence to be interpreted by an AI to search the web according to the user\'s request.', "A properly written sentence to be interpreted by an AI to search the web according to the user's request.",
), ),
}); });
@ -38,7 +38,6 @@ class TraversaalSearch extends Tool {
return apiKey; return apiKey;
} }
// eslint-disable-next-line no-unused-vars
async _call({ query }, _runManager) { async _call({ query }, _runManager) {
const body = { const body = {
query: [query], query: [query],

View file

@ -1,8 +1,8 @@
/* eslint-disable no-useless-escape */ /* eslint-disable no-useless-escape */
const axios = require('axios');
const { z } = require('zod'); const { z } = require('zod');
const axios = require('axios');
const { Tool } = require('@langchain/core/tools'); const { Tool } = require('@langchain/core/tools');
const { logger } = require('~/config'); const { logger } = require('@librechat/data-schemas');
class WolframAlphaAPI extends Tool { class WolframAlphaAPI extends Tool {
constructor(fields) { constructor(fields) {

View file

@ -1,5 +1,5 @@
const OpenAI = require('openai'); const OpenAI = require('openai');
const { logger } = require('~/config'); const { logger } = require('@librechat/data-schemas');
/** /**
* Handles errors that may occur when making requests to OpenAI's API. * Handles errors that may occur when making requests to OpenAI's API.

View file

@ -30,7 +30,6 @@ jest.mock('~/server/services/Config', () => ({
}), }),
})); }));
const { BaseLLM } = require('@langchain/openai');
const { Calculator } = require('@langchain/community/tools/calculator'); const { Calculator } = require('@langchain/community/tools/calculator');
const { User } = require('~/db/models'); const { User } = require('~/db/models');
@ -172,7 +171,6 @@ describe('Tool Handlers', () => {
beforeAll(async () => { beforeAll(async () => {
const toolMap = await loadTools({ const toolMap = await loadTools({
user: fakeUser._id, user: fakeUser._id,
model: BaseLLM,
tools: sampleTools, tools: sampleTools,
returnMap: true, returnMap: true,
useSpecs: true, useSpecs: true,
@ -266,7 +264,6 @@ describe('Tool Handlers', () => {
it('returns an empty object when no tools are requested', async () => { it('returns an empty object when no tools are requested', async () => {
toolFunctions = await loadTools({ toolFunctions = await loadTools({
user: fakeUser._id, user: fakeUser._id,
model: BaseLLM,
returnMap: true, returnMap: true,
useSpecs: true, useSpecs: true,
}); });
@ -276,7 +273,6 @@ describe('Tool Handlers', () => {
process.env.SD_WEBUI_URL = mockCredential; process.env.SD_WEBUI_URL = mockCredential;
toolFunctions = await loadTools({ toolFunctions = await loadTools({
user: fakeUser._id, user: fakeUser._id,
model: BaseLLM,
tools: ['stable-diffusion'], tools: ['stable-diffusion'],
functions: true, functions: true,
returnMap: true, returnMap: true,

View file

@ -1,108 +0,0 @@
const KeyvRedis = require('@keyv/redis').default;
const { Keyv } = require('keyv');
const { RedisStore } = require('rate-limit-redis');
const { Time } = require('librechat-data-provider');
const { logger } = require('@librechat/data-schemas');
const { RedisStore: ConnectRedis } = require('connect-redis');
const MemoryStore = require('memorystore')(require('express-session'));
const { keyvRedisClient, ioredisClient, GLOBAL_PREFIX_SEPARATOR } = require('./redisClients');
const { cacheConfig } = require('./cacheConfig');
const { violationFile } = require('./keyvFiles');
/**
* Creates a cache instance using Redis or a fallback store. Suitable for general caching needs.
* @param {string} namespace - The cache namespace.
* @param {number} [ttl] - Time to live for cache entries.
* @param {object} [fallbackStore] - Optional fallback store if Redis is not used.
* @returns {Keyv} Cache instance.
*/
const standardCache = (namespace, ttl = undefined, fallbackStore = undefined) => {
if (
cacheConfig.USE_REDIS &&
!cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES?.includes(namespace)
) {
try {
const keyvRedis = new KeyvRedis(keyvRedisClient);
const cache = new Keyv(keyvRedis, { namespace, ttl });
keyvRedis.namespace = cacheConfig.REDIS_KEY_PREFIX;
keyvRedis.keyPrefixSeparator = GLOBAL_PREFIX_SEPARATOR;
cache.on('error', (err) => {
logger.error(`Cache error in namespace ${namespace}:`, err);
});
return cache;
} catch (err) {
logger.error(`Failed to create Redis cache for namespace ${namespace}:`, err);
throw err;
}
}
if (fallbackStore) return new Keyv({ store: fallbackStore, namespace, ttl });
return new Keyv({ namespace, ttl });
};
/**
* Creates a cache instance for storing violation data.
* Uses a file-based fallback store if Redis is not enabled.
* @param {string} namespace - The cache namespace for violations.
* @param {number} [ttl] - Time to live for cache entries.
* @returns {Keyv} Cache instance for violations.
*/
const violationCache = (namespace, ttl = undefined) => {
return standardCache(`violations:${namespace}`, ttl, violationFile);
};
/**
* Creates a session cache instance using Redis or in-memory store.
* @param {string} namespace - The session namespace.
* @param {number} [ttl] - Time to live for session entries.
* @returns {MemoryStore | ConnectRedis} Session store instance.
*/
const sessionCache = (namespace, ttl = undefined) => {
namespace = namespace.endsWith(':') ? namespace : `${namespace}:`;
if (!cacheConfig.USE_REDIS) return new MemoryStore({ ttl, checkPeriod: Time.ONE_DAY });
const store = new ConnectRedis({ client: ioredisClient, ttl, prefix: namespace });
if (ioredisClient) {
ioredisClient.on('error', (err) => {
logger.error(`Session store Redis error for namespace ${namespace}:`, err);
});
}
return store;
};
/**
* Creates a rate limiter cache using Redis.
* @param {string} prefix - The key prefix for rate limiting.
* @returns {RedisStore|undefined} RedisStore instance or undefined if Redis is not used.
*/
const limiterCache = (prefix) => {
if (!prefix) throw new Error('prefix is required');
if (!cacheConfig.USE_REDIS) return undefined;
prefix = prefix.endsWith(':') ? prefix : `${prefix}:`;
try {
if (!ioredisClient) {
logger.warn(`Redis client not available for rate limiter with prefix ${prefix}`);
return undefined;
}
return new RedisStore({ sendCommand, prefix });
} catch (err) {
logger.error(`Failed to create Redis rate limiter for prefix ${prefix}:`, err);
return undefined;
}
};
const sendCommand = (...args) => {
if (!ioredisClient) {
logger.warn('Redis client not available for command execution');
return Promise.reject(new Error('Redis client not available'));
}
return ioredisClient.call(...args).catch((err) => {
logger.error('Redis command execution failed:', err);
throw err;
});
};
module.exports = { standardCache, sessionCache, violationCache, limiterCache };

View file

@ -1,432 +0,0 @@
const { Time } = require('librechat-data-provider');
// Mock dependencies first
const mockKeyvRedis = {
namespace: '',
keyPrefixSeparator: '',
};
const mockKeyv = jest.fn().mockReturnValue({
mock: 'keyv',
on: jest.fn(),
});
const mockConnectRedis = jest.fn().mockReturnValue({ mock: 'connectRedis' });
const mockMemoryStore = jest.fn().mockReturnValue({ mock: 'memoryStore' });
const mockRedisStore = jest.fn().mockReturnValue({ mock: 'redisStore' });
const mockIoredisClient = {
call: jest.fn(),
on: jest.fn(),
};
const mockKeyvRedisClient = {};
const mockViolationFile = {};
// Mock modules before requiring the main module
jest.mock('@keyv/redis', () => ({
default: jest.fn().mockImplementation(() => mockKeyvRedis),
}));
jest.mock('keyv', () => ({
Keyv: mockKeyv,
}));
jest.mock('./cacheConfig', () => ({
cacheConfig: {
USE_REDIS: false,
REDIS_KEY_PREFIX: 'test',
FORCED_IN_MEMORY_CACHE_NAMESPACES: [],
},
}));
jest.mock('./redisClients', () => ({
keyvRedisClient: mockKeyvRedisClient,
ioredisClient: mockIoredisClient,
GLOBAL_PREFIX_SEPARATOR: '::',
}));
jest.mock('./keyvFiles', () => ({
violationFile: mockViolationFile,
}));
jest.mock('connect-redis', () => ({ RedisStore: mockConnectRedis }));
jest.mock('memorystore', () => jest.fn(() => mockMemoryStore));
jest.mock('rate-limit-redis', () => ({
RedisStore: mockRedisStore,
}));
jest.mock('@librechat/data-schemas', () => ({
logger: {
error: jest.fn(),
warn: jest.fn(),
info: jest.fn(),
},
}));
// Import after mocking
const { standardCache, sessionCache, violationCache, limiterCache } = require('./cacheFactory');
const { cacheConfig } = require('./cacheConfig');
describe('cacheFactory', () => {
beforeEach(() => {
jest.clearAllMocks();
// Reset cache config mock
cacheConfig.USE_REDIS = false;
cacheConfig.REDIS_KEY_PREFIX = 'test';
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = [];
});
describe('redisCache', () => {
it('should create Redis cache when USE_REDIS is true', () => {
cacheConfig.USE_REDIS = true;
const namespace = 'test-namespace';
const ttl = 3600;
standardCache(namespace, ttl);
expect(require('@keyv/redis').default).toHaveBeenCalledWith(mockKeyvRedisClient);
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl });
expect(mockKeyvRedis.namespace).toBe(cacheConfig.REDIS_KEY_PREFIX);
expect(mockKeyvRedis.keyPrefixSeparator).toBe('::');
});
it('should create Redis cache with undefined ttl when not provided', () => {
cacheConfig.USE_REDIS = true;
const namespace = 'test-namespace';
standardCache(namespace);
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl: undefined });
});
it('should use fallback store when USE_REDIS is false and fallbackStore is provided', () => {
cacheConfig.USE_REDIS = false;
const namespace = 'test-namespace';
const ttl = 3600;
const fallbackStore = { some: 'store' };
standardCache(namespace, ttl, fallbackStore);
expect(mockKeyv).toHaveBeenCalledWith({ store: fallbackStore, namespace, ttl });
});
it('should create default Keyv instance when USE_REDIS is false and no fallbackStore', () => {
cacheConfig.USE_REDIS = false;
const namespace = 'test-namespace';
const ttl = 3600;
standardCache(namespace, ttl);
expect(mockKeyv).toHaveBeenCalledWith({ namespace, ttl });
});
it('should handle namespace and ttl as undefined', () => {
cacheConfig.USE_REDIS = false;
standardCache();
expect(mockKeyv).toHaveBeenCalledWith({ namespace: undefined, ttl: undefined });
});
it('should use fallback when namespace is in FORCED_IN_MEMORY_CACHE_NAMESPACES', () => {
cacheConfig.USE_REDIS = true;
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = ['forced-memory'];
const namespace = 'forced-memory';
const ttl = 3600;
standardCache(namespace, ttl);
expect(require('@keyv/redis').default).not.toHaveBeenCalled();
expect(mockKeyv).toHaveBeenCalledWith({ namespace, ttl });
});
it('should use Redis when namespace is not in FORCED_IN_MEMORY_CACHE_NAMESPACES', () => {
cacheConfig.USE_REDIS = true;
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = ['other-namespace'];
const namespace = 'test-namespace';
const ttl = 3600;
standardCache(namespace, ttl);
expect(require('@keyv/redis').default).toHaveBeenCalledWith(mockKeyvRedisClient);
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl });
});
it('should throw error when Redis cache creation fails', () => {
cacheConfig.USE_REDIS = true;
const namespace = 'test-namespace';
const ttl = 3600;
const testError = new Error('Redis connection failed');
const KeyvRedis = require('@keyv/redis').default;
KeyvRedis.mockImplementationOnce(() => {
throw testError;
});
expect(() => standardCache(namespace, ttl)).toThrow('Redis connection failed');
const { logger } = require('@librechat/data-schemas');
expect(logger.error).toHaveBeenCalledWith(
`Failed to create Redis cache for namespace ${namespace}:`,
testError,
);
expect(mockKeyv).not.toHaveBeenCalled();
});
});
describe('violationCache', () => {
it('should create violation cache with prefixed namespace', () => {
const namespace = 'test-violations';
const ttl = 7200;
// We can't easily mock the internal redisCache call since it's in the same module
// But we can test that the function executes without throwing
expect(() => violationCache(namespace, ttl)).not.toThrow();
});
it('should create violation cache with undefined ttl', () => {
const namespace = 'test-violations';
violationCache(namespace);
// The function should call redisCache with violations: prefixed namespace
// Since we can't easily mock the internal redisCache call, we test the behavior
expect(() => violationCache(namespace)).not.toThrow();
});
it('should handle undefined namespace', () => {
expect(() => violationCache(undefined)).not.toThrow();
});
});
describe('sessionCache', () => {
it('should return MemoryStore when USE_REDIS is false', () => {
cacheConfig.USE_REDIS = false;
const namespace = 'sessions';
const ttl = 86400;
const result = sessionCache(namespace, ttl);
expect(mockMemoryStore).toHaveBeenCalledWith({ ttl, checkPeriod: Time.ONE_DAY });
expect(result).toBe(mockMemoryStore());
});
it('should return ConnectRedis when USE_REDIS is true', () => {
cacheConfig.USE_REDIS = true;
const namespace = 'sessions';
const ttl = 86400;
const result = sessionCache(namespace, ttl);
expect(mockConnectRedis).toHaveBeenCalledWith({
client: mockIoredisClient,
ttl,
prefix: `${namespace}:`,
});
expect(result).toBe(mockConnectRedis());
});
it('should add colon to namespace if not present', () => {
cacheConfig.USE_REDIS = true;
const namespace = 'sessions';
sessionCache(namespace);
expect(mockConnectRedis).toHaveBeenCalledWith({
client: mockIoredisClient,
ttl: undefined,
prefix: 'sessions:',
});
});
it('should not add colon to namespace if already present', () => {
cacheConfig.USE_REDIS = true;
const namespace = 'sessions:';
sessionCache(namespace);
expect(mockConnectRedis).toHaveBeenCalledWith({
client: mockIoredisClient,
ttl: undefined,
prefix: 'sessions:',
});
});
it('should handle undefined ttl', () => {
cacheConfig.USE_REDIS = false;
const namespace = 'sessions';
sessionCache(namespace);
expect(mockMemoryStore).toHaveBeenCalledWith({
ttl: undefined,
checkPeriod: Time.ONE_DAY,
});
});
it('should throw error when ConnectRedis constructor fails', () => {
cacheConfig.USE_REDIS = true;
const namespace = 'sessions';
const ttl = 86400;
// Mock ConnectRedis to throw an error during construction
const redisError = new Error('Redis connection failed');
mockConnectRedis.mockImplementationOnce(() => {
throw redisError;
});
// The error should propagate up, not be caught
expect(() => sessionCache(namespace, ttl)).toThrow('Redis connection failed');
// Verify that MemoryStore was NOT used as fallback
expect(mockMemoryStore).not.toHaveBeenCalled();
});
it('should register error handler but let errors propagate to Express', () => {
cacheConfig.USE_REDIS = true;
const namespace = 'sessions';
// Create a mock session store with middleware methods
const mockSessionStore = {
get: jest.fn(),
set: jest.fn(),
destroy: jest.fn(),
};
mockConnectRedis.mockReturnValue(mockSessionStore);
const store = sessionCache(namespace);
// Verify error handler was registered
expect(mockIoredisClient.on).toHaveBeenCalledWith('error', expect.any(Function));
// Get the error handler
const errorHandler = mockIoredisClient.on.mock.calls.find((call) => call[0] === 'error')[1];
// Simulate an error from Redis during a session operation
const redisError = new Error('Socket closed unexpectedly');
// The error handler should log but not swallow the error
const { logger } = require('@librechat/data-schemas');
errorHandler(redisError);
expect(logger.error).toHaveBeenCalledWith(
`Session store Redis error for namespace ${namespace}::`,
redisError,
);
// Now simulate what happens when session middleware tries to use the store
const callback = jest.fn();
mockSessionStore.get.mockImplementation((sid, cb) => {
cb(new Error('Redis connection lost'));
});
// Call the store's get method (as Express session would)
store.get('test-session-id', callback);
// The error should be passed to the callback, not swallowed
expect(callback).toHaveBeenCalledWith(new Error('Redis connection lost'));
});
it('should handle null ioredisClient gracefully', () => {
cacheConfig.USE_REDIS = true;
const namespace = 'sessions';
// Temporarily set ioredisClient to null (simulating connection not established)
const originalClient = require('./redisClients').ioredisClient;
require('./redisClients').ioredisClient = null;
// ConnectRedis might accept null client but would fail on first use
// The important thing is it doesn't throw uncaught exceptions during construction
const store = sessionCache(namespace);
expect(store).toBeDefined();
// Restore original client
require('./redisClients').ioredisClient = originalClient;
});
});
describe('limiterCache', () => {
it('should return undefined when USE_REDIS is false', () => {
cacheConfig.USE_REDIS = false;
const result = limiterCache('prefix');
expect(result).toBeUndefined();
});
it('should return RedisStore when USE_REDIS is true', () => {
cacheConfig.USE_REDIS = true;
const result = limiterCache('rate-limit');
expect(mockRedisStore).toHaveBeenCalledWith({
sendCommand: expect.any(Function),
prefix: `rate-limit:`,
});
expect(result).toBe(mockRedisStore());
});
it('should add colon to prefix if not present', () => {
cacheConfig.USE_REDIS = true;
limiterCache('rate-limit');
expect(mockRedisStore).toHaveBeenCalledWith({
sendCommand: expect.any(Function),
prefix: 'rate-limit:',
});
});
it('should not add colon to prefix if already present', () => {
cacheConfig.USE_REDIS = true;
limiterCache('rate-limit:');
expect(mockRedisStore).toHaveBeenCalledWith({
sendCommand: expect.any(Function),
prefix: 'rate-limit:',
});
});
it('should pass sendCommand function that calls ioredisClient.call', async () => {
cacheConfig.USE_REDIS = true;
mockIoredisClient.call.mockResolvedValue('test-value');
limiterCache('rate-limit');
const sendCommandCall = mockRedisStore.mock.calls[0][0];
const sendCommand = sendCommandCall.sendCommand;
// Test that sendCommand properly delegates to ioredisClient.call
const args = ['GET', 'test-key'];
const result = await sendCommand(...args);
expect(mockIoredisClient.call).toHaveBeenCalledWith(...args);
expect(result).toBe('test-value');
});
it('should handle sendCommand errors properly', async () => {
cacheConfig.USE_REDIS = true;
// Mock the call method to reject with an error
const testError = new Error('Redis error');
mockIoredisClient.call.mockRejectedValue(testError);
limiterCache('rate-limit');
const sendCommandCall = mockRedisStore.mock.calls[0][0];
const sendCommand = sendCommandCall.sendCommand;
// Test that sendCommand properly handles errors
const args = ['GET', 'test-key'];
await expect(sendCommand(...args)).rejects.toThrow('Redis error');
expect(mockIoredisClient.call).toHaveBeenCalledWith(...args);
});
it('should handle undefined prefix', () => {
cacheConfig.USE_REDIS = true;
expect(() => limiterCache()).toThrow('prefix is required');
});
});
});

View file

@ -1,5 +1,5 @@
const { isEnabled } = require('@librechat/api');
const { Time, CacheKeys } = require('librechat-data-provider'); const { Time, CacheKeys } = require('librechat-data-provider');
const { isEnabled } = require('~/server/utils');
const getLogStores = require('./getLogStores'); const getLogStores = require('./getLogStores');
const { USE_REDIS, LIMIT_CONCURRENT_MESSAGES } = process.env ?? {}; const { USE_REDIS, LIMIT_CONCURRENT_MESSAGES } = process.env ?? {};

View file

@ -1,9 +1,13 @@
const { cacheConfig } = require('./cacheConfig');
const { Keyv } = require('keyv'); const { Keyv } = require('keyv');
const { CacheKeys, ViolationTypes, Time } = require('librechat-data-provider'); const { Time, CacheKeys, ViolationTypes } = require('librechat-data-provider');
const { logFile } = require('./keyvFiles'); const {
const keyvMongo = require('./keyvMongo'); logFile,
const { standardCache, sessionCache, violationCache } = require('./cacheFactory'); keyvMongo,
cacheConfig,
sessionCache,
standardCache,
violationCache,
} = require('@librechat/api');
const namespaces = { const namespaces = {
[ViolationTypes.GENERAL]: new Keyv({ store: logFile, namespace: 'violations' }), [ViolationTypes.GENERAL]: new Keyv({ store: logFile, namespace: 'violations' }),

3
api/cache/index.js vendored
View file

@ -1,5 +1,4 @@
const keyvFiles = require('./keyvFiles');
const getLogStores = require('./getLogStores'); const getLogStores = require('./getLogStores');
const logViolation = require('./logViolation'); const logViolation = require('./logViolation');
module.exports = { ...keyvFiles, getLogStores, logViolation }; module.exports = { getLogStores, logViolation };

View file

@ -1,9 +0,0 @@
const { KeyvFile } = require('keyv-file');
const logFile = new KeyvFile({ filename: './data/logs.json' }).setMaxListeners(20);
const violationFile = new KeyvFile({ filename: './data/violations.json' }).setMaxListeners(20);
module.exports = {
logFile,
violationFile,
};

View file

@ -1,4 +1,4 @@
const { isEnabled } = require('~/server/utils'); const { isEnabled } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const getLogStores = require('./getLogStores'); const getLogStores = require('./getLogStores');
const banViolation = require('./banViolation'); const banViolation = require('./banViolation');

View file

@ -29,12 +29,64 @@ class MeiliSearchClient {
} }
} }
/**
* Deletes documents from MeiliSearch index that are missing the user field
* @param {import('meilisearch').Index} index - MeiliSearch index instance
* @param {string} indexName - Name of the index for logging
* @returns {Promise<number>} - Number of documents deleted
*/
async function deleteDocumentsWithoutUserField(index, indexName) {
let deletedCount = 0;
let offset = 0;
const batchSize = 1000;
try {
while (true) {
const searchResult = await index.search('', {
limit: batchSize,
offset: offset,
});
if (searchResult.hits.length === 0) {
break;
}
const idsToDelete = searchResult.hits.filter((hit) => !hit.user).map((hit) => hit.id);
if (idsToDelete.length > 0) {
logger.info(
`[indexSync] Deleting ${idsToDelete.length} documents without user field from ${indexName} index`,
);
await index.deleteDocuments(idsToDelete);
deletedCount += idsToDelete.length;
}
if (searchResult.hits.length < batchSize) {
break;
}
offset += batchSize;
}
if (deletedCount > 0) {
logger.info(`[indexSync] Deleted ${deletedCount} orphaned documents from ${indexName} index`);
}
} catch (error) {
logger.error(`[indexSync] Error deleting documents from ${indexName}:`, error);
}
return deletedCount;
}
/** /**
* Ensures indexes have proper filterable attributes configured and checks if documents have user field * Ensures indexes have proper filterable attributes configured and checks if documents have user field
* @param {MeiliSearch} client - MeiliSearch client instance * @param {MeiliSearch} client - MeiliSearch client instance
* @returns {Promise<boolean>} - true if configuration was updated or re-sync is needed * @returns {Promise<{settingsUpdated: boolean, orphanedDocsFound: boolean}>} - Status of what was done
*/ */
async function ensureFilterableAttributes(client) { async function ensureFilterableAttributes(client) {
let settingsUpdated = false;
let hasOrphanedDocs = false;
try { try {
// Check and update messages index // Check and update messages index
try { try {
@ -47,16 +99,17 @@ async function ensureFilterableAttributes(client) {
filterableAttributes: ['user'], filterableAttributes: ['user'],
}); });
logger.info('[indexSync] Messages index configured for user filtering'); logger.info('[indexSync] Messages index configured for user filtering');
logger.info('[indexSync] Index configuration updated. Full re-sync will be triggered.'); settingsUpdated = true;
return true;
} }
// Check if existing documents have user field indexed // Check if existing documents have user field indexed
try { try {
const searchResult = await messagesIndex.search('', { limit: 1 }); const searchResult = await messagesIndex.search('', { limit: 1 });
if (searchResult.hits.length > 0 && !searchResult.hits[0].user) { if (searchResult.hits.length > 0 && !searchResult.hits[0].user) {
logger.info('[indexSync] Existing messages missing user field, re-sync needed'); logger.info(
return true; '[indexSync] Existing messages missing user field, will clean up orphaned documents...',
);
hasOrphanedDocs = true;
} }
} catch (searchError) { } catch (searchError) {
logger.debug('[indexSync] Could not check message documents:', searchError.message); logger.debug('[indexSync] Could not check message documents:', searchError.message);
@ -78,16 +131,17 @@ async function ensureFilterableAttributes(client) {
filterableAttributes: ['user'], filterableAttributes: ['user'],
}); });
logger.info('[indexSync] Convos index configured for user filtering'); logger.info('[indexSync] Convos index configured for user filtering');
logger.info('[indexSync] Index configuration updated. Full re-sync will be triggered.'); settingsUpdated = true;
return true;
} }
// Check if existing documents have user field indexed // Check if existing documents have user field indexed
try { try {
const searchResult = await convosIndex.search('', { limit: 1 }); const searchResult = await convosIndex.search('', { limit: 1 });
if (searchResult.hits.length > 0 && !searchResult.hits[0].user) { if (searchResult.hits.length > 0 && !searchResult.hits[0].user) {
logger.info('[indexSync] Existing conversations missing user field, re-sync needed'); logger.info(
return true; '[indexSync] Existing conversations missing user field, will clean up orphaned documents...',
);
hasOrphanedDocs = true;
} }
} catch (searchError) { } catch (searchError) {
logger.debug('[indexSync] Could not check conversation documents:', searchError.message); logger.debug('[indexSync] Could not check conversation documents:', searchError.message);
@ -97,101 +151,143 @@ async function ensureFilterableAttributes(client) {
logger.warn('[indexSync] Could not check/update convos index settings:', error.message); logger.warn('[indexSync] Could not check/update convos index settings:', error.message);
} }
} }
// If either index has orphaned documents, clean them up (but don't force resync)
if (hasOrphanedDocs) {
try {
const messagesIndex = client.index('messages');
await deleteDocumentsWithoutUserField(messagesIndex, 'messages');
} catch (error) {
logger.debug('[indexSync] Could not clean up messages:', error.message);
}
try {
const convosIndex = client.index('convos');
await deleteDocumentsWithoutUserField(convosIndex, 'convos');
} catch (error) {
logger.debug('[indexSync] Could not clean up convos:', error.message);
}
logger.info('[indexSync] Orphaned documents cleaned up without forcing resync.');
}
if (settingsUpdated) {
logger.info('[indexSync] Index settings updated. Full re-sync will be triggered.');
}
} catch (error) { } catch (error) {
logger.error('[indexSync] Error ensuring filterable attributes:', error); logger.error('[indexSync] Error ensuring filterable attributes:', error);
} }
return false; return { settingsUpdated, orphanedDocsFound: hasOrphanedDocs };
} }
/** /**
* Performs the actual sync operations for messages and conversations * Performs the actual sync operations for messages and conversations
* @param {FlowStateManager} flowManager - Flow state manager instance
* @param {string} flowId - Flow identifier
* @param {string} flowType - Flow type
*/ */
async function performSync() { async function performSync(flowManager, flowId, flowType) {
const client = MeiliSearchClient.getInstance(); try {
const client = MeiliSearchClient.getInstance();
const { status } = await client.health(); const { status } = await client.health();
if (status !== 'available') { if (status !== 'available') {
throw new Error('Meilisearch not available'); throw new Error('Meilisearch not available');
}
if (indexingDisabled === true) {
logger.info('[indexSync] Indexing is disabled, skipping...');
return { messagesSync: false, convosSync: false };
}
/** Ensures indexes have proper filterable attributes configured */
const configUpdated = await ensureFilterableAttributes(client);
let messagesSync = false;
let convosSync = false;
// If configuration was just updated or documents are missing user field, force a full re-sync
if (configUpdated) {
logger.info('[indexSync] Forcing full re-sync to ensure user field is properly indexed...');
// Reset sync flags to force full re-sync
await Message.collection.updateMany({ _meiliIndex: true }, { $set: { _meiliIndex: false } });
await Conversation.collection.updateMany(
{ _meiliIndex: true },
{ $set: { _meiliIndex: false } },
);
}
// Check if we need to sync messages
const messageProgress = await Message.getSyncProgress();
if (!messageProgress.isComplete || configUpdated) {
logger.info(
`[indexSync] Messages need syncing: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments} indexed`,
);
// Check if we should do a full sync or incremental
const messageCount = await Message.countDocuments();
const messagesIndexed = messageProgress.totalProcessed;
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
if (messageCount - messagesIndexed > syncThreshold) {
logger.info('[indexSync] Starting full message sync due to large difference');
await Message.syncWithMeili();
messagesSync = true;
} else if (messageCount !== messagesIndexed) {
logger.warn('[indexSync] Messages out of sync, performing incremental sync');
await Message.syncWithMeili();
messagesSync = true;
} }
} else {
logger.info(
`[indexSync] Messages are fully synced: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments}`,
);
}
// Check if we need to sync conversations if (indexingDisabled === true) {
const convoProgress = await Conversation.getSyncProgress(); logger.info('[indexSync] Indexing is disabled, skipping...');
if (!convoProgress.isComplete || configUpdated) { return { messagesSync: false, convosSync: false };
logger.info(
`[indexSync] Conversations need syncing: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments} indexed`,
);
const convoCount = await Conversation.countDocuments();
const convosIndexed = convoProgress.totalProcessed;
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
if (convoCount - convosIndexed > syncThreshold) {
logger.info('[indexSync] Starting full conversation sync due to large difference');
await Conversation.syncWithMeili();
convosSync = true;
} else if (convoCount !== convosIndexed) {
logger.warn('[indexSync] Convos out of sync, performing incremental sync');
await Conversation.syncWithMeili();
convosSync = true;
} }
} else {
logger.info(
`[indexSync] Conversations are fully synced: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments}`,
);
}
return { messagesSync, convosSync }; /** Ensures indexes have proper filterable attributes configured */
const { settingsUpdated, orphanedDocsFound: _orphanedDocsFound } =
await ensureFilterableAttributes(client);
let messagesSync = false;
let convosSync = false;
// Only reset flags if settings were actually updated (not just for orphaned doc cleanup)
if (settingsUpdated) {
logger.info(
'[indexSync] Settings updated. Forcing full re-sync to reindex with new configuration...',
);
// Reset sync flags to force full re-sync
await Message.collection.updateMany({ _meiliIndex: true }, { $set: { _meiliIndex: false } });
await Conversation.collection.updateMany(
{ _meiliIndex: true },
{ $set: { _meiliIndex: false } },
);
}
// Check if we need to sync messages
const messageProgress = await Message.getSyncProgress();
if (!messageProgress.isComplete || settingsUpdated) {
logger.info(
`[indexSync] Messages need syncing: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments} indexed`,
);
// Check if we should do a full sync or incremental
const messageCount = await Message.countDocuments();
const messagesIndexed = messageProgress.totalProcessed;
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
if (messageCount - messagesIndexed > syncThreshold) {
logger.info('[indexSync] Starting full message sync due to large difference');
await Message.syncWithMeili();
messagesSync = true;
} else if (messageCount !== messagesIndexed) {
logger.warn('[indexSync] Messages out of sync, performing incremental sync');
await Message.syncWithMeili();
messagesSync = true;
}
} else {
logger.info(
`[indexSync] Messages are fully synced: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments}`,
);
}
// Check if we need to sync conversations
const convoProgress = await Conversation.getSyncProgress();
if (!convoProgress.isComplete || settingsUpdated) {
logger.info(
`[indexSync] Conversations need syncing: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments} indexed`,
);
const convoCount = await Conversation.countDocuments();
const convosIndexed = convoProgress.totalProcessed;
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
if (convoCount - convosIndexed > syncThreshold) {
logger.info('[indexSync] Starting full conversation sync due to large difference');
await Conversation.syncWithMeili();
convosSync = true;
} else if (convoCount !== convosIndexed) {
logger.warn('[indexSync] Convos out of sync, performing incremental sync');
await Conversation.syncWithMeili();
convosSync = true;
}
} else {
logger.info(
`[indexSync] Conversations are fully synced: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments}`,
);
}
return { messagesSync, convosSync };
} finally {
if (indexingDisabled === true) {
logger.info('[indexSync] Indexing is disabled, skipping cleanup...');
} else if (flowManager && flowId && flowType) {
try {
await flowManager.deleteFlow(flowId, flowType);
logger.debug('[indexSync] Flow state cleaned up');
} catch (cleanupErr) {
logger.debug('[indexSync] Could not clean up flow state:', cleanupErr.message);
}
}
}
} }
/** /**
@ -204,24 +300,26 @@ async function indexSync() {
logger.info('[indexSync] Starting index synchronization check...'); logger.info('[indexSync] Starting index synchronization check...');
// Get or create FlowStateManager instance
const flowsCache = getLogStores(CacheKeys.FLOWS);
if (!flowsCache) {
logger.warn('[indexSync] Flows cache not available, falling back to direct sync');
return await performSync(null, null, null);
}
const flowManager = new FlowStateManager(flowsCache, {
ttl: 60000 * 10, // 10 minutes TTL for sync operations
});
// Use a unique flow ID for the sync operation
const flowId = 'meili-index-sync';
const flowType = 'MEILI_SYNC';
try { try {
// Get or create FlowStateManager instance
const flowsCache = getLogStores(CacheKeys.FLOWS);
if (!flowsCache) {
logger.warn('[indexSync] Flows cache not available, falling back to direct sync');
return await performSync();
}
const flowManager = new FlowStateManager(flowsCache, {
ttl: 60000 * 10, // 10 minutes TTL for sync operations
});
// Use a unique flow ID for the sync operation
const flowId = 'meili-index-sync';
const flowType = 'MEILI_SYNC';
// This will only execute the handler if no other instance is running the sync // This will only execute the handler if no other instance is running the sync
const result = await flowManager.createFlowWithHandler(flowId, flowType, performSync); const result = await flowManager.createFlowWithHandler(flowId, flowType, () =>
performSync(flowManager, flowId, flowType),
);
if (result.messagesSync || result.convosSync) { if (result.messagesSync || result.convosSync) {
logger.info('[indexSync] Sync completed successfully'); logger.info('[indexSync] Sync completed successfully');

View file

@ -1,4 +1,4 @@
const { logger } = require('~/config'); const { logger } = require('@librechat/data-schemas');
const options = [ const options = [
{ {

View file

@ -1,4 +1,4 @@
const { logger } = require('~/config'); const { logger } = require('@librechat/data-schemas');
const { createTransaction, createStructuredTransaction } = require('./Transaction'); const { createTransaction, createStructuredTransaction } = require('./Transaction');
/** /**
* Creates up to two transactions to record the spending of tokens. * Creates up to two transactions to record the spending of tokens.

View file

@ -111,8 +111,8 @@ const tokenValues = Object.assign(
'claude-': { prompt: 0.8, completion: 2.4 }, 'claude-': { prompt: 0.8, completion: 2.4 },
'command-r-plus': { prompt: 3, completion: 15 }, 'command-r-plus': { prompt: 3, completion: 15 },
'command-r': { prompt: 0.5, completion: 1.5 }, 'command-r': { prompt: 0.5, completion: 1.5 },
'deepseek-reasoner': { prompt: 0.55, completion: 2.19 }, 'deepseek-reasoner': { prompt: 0.28, completion: 0.42 },
deepseek: { prompt: 0.14, completion: 0.28 }, deepseek: { prompt: 0.28, completion: 0.42 },
/* cohere doesn't have rates for the older command models, /* cohere doesn't have rates for the older command models,
so this was from https://artificialanalysis.ai/models/command-light/providers */ so this was from https://artificialanalysis.ai/models/command-light/providers */
command: { prompt: 0.38, completion: 0.38 }, command: { prompt: 0.38, completion: 0.38 },
@ -124,7 +124,8 @@ const tokenValues = Object.assign(
'gemini-2.0-flash': { prompt: 0.1, completion: 0.4 }, 'gemini-2.0-flash': { prompt: 0.1, completion: 0.4 },
'gemini-2.0': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing 'gemini-2.0': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
'gemini-2.5-pro': { prompt: 1.25, completion: 10 }, 'gemini-2.5-pro': { prompt: 1.25, completion: 10 },
'gemini-2.5-flash': { prompt: 0.15, completion: 3.5 }, 'gemini-2.5-flash': { prompt: 0.3, completion: 2.5 },
'gemini-2.5-flash-lite': { prompt: 0.075, completion: 0.4 },
'gemini-2.5': { prompt: 0, completion: 0 }, // Free for a period of time 'gemini-2.5': { prompt: 0, completion: 0 }, // Free for a period of time
'gemini-1.5-flash-8b': { prompt: 0.075, completion: 0.3 }, 'gemini-1.5-flash-8b': { prompt: 0.075, completion: 0.3 },
'gemini-1.5-flash': { prompt: 0.15, completion: 0.6 }, 'gemini-1.5-flash': { prompt: 0.15, completion: 0.6 },
@ -151,8 +152,19 @@ const tokenValues = Object.assign(
'ministral-8b': { prompt: 0.1, completion: 0.1 }, 'ministral-8b': { prompt: 0.1, completion: 0.1 },
'ministral-3b': { prompt: 0.04, completion: 0.04 }, 'ministral-3b': { prompt: 0.04, completion: 0.04 },
// GPT-OSS models // GPT-OSS models
'gpt-oss': { prompt: 0.05, completion: 0.2 },
'gpt-oss:20b': { prompt: 0.05, completion: 0.2 },
'gpt-oss-20b': { prompt: 0.05, completion: 0.2 }, 'gpt-oss-20b': { prompt: 0.05, completion: 0.2 },
'gpt-oss:120b': { prompt: 0.15, completion: 0.6 },
'gpt-oss-120b': { prompt: 0.15, completion: 0.6 }, 'gpt-oss-120b': { prompt: 0.15, completion: 0.6 },
// GLM models (Zhipu AI)
glm4: { prompt: 0.1, completion: 0.1 },
'glm-4': { prompt: 0.1, completion: 0.1 },
'glm-4-32b': { prompt: 0.1, completion: 0.1 },
'glm-4.5': { prompt: 0.35, completion: 1.55 },
'glm-4.5v': { prompt: 0.6, completion: 1.8 },
'glm-4.5-air': { prompt: 0.14, completion: 0.86 },
'glm-4.6': { prompt: 0.5, completion: 1.75 },
}, },
bedrockValues, bedrockValues,
); );

View file

@ -184,6 +184,16 @@ describe('getValueKey', () => {
expect(getValueKey('claude-3.5-haiku-turbo')).toBe('claude-3.5-haiku'); expect(getValueKey('claude-3.5-haiku-turbo')).toBe('claude-3.5-haiku');
expect(getValueKey('claude-3.5-haiku-0125')).toBe('claude-3.5-haiku'); expect(getValueKey('claude-3.5-haiku-0125')).toBe('claude-3.5-haiku');
}); });
it('should return expected value keys for "gpt-oss" models', () => {
expect(getValueKey('openai/gpt-oss-120b')).toBe('gpt-oss-120b');
expect(getValueKey('openai/gpt-oss:120b')).toBe('gpt-oss:120b');
expect(getValueKey('openai/gpt-oss-570b')).toBe('gpt-oss');
expect(getValueKey('gpt-oss-570b')).toBe('gpt-oss');
expect(getValueKey('groq/gpt-oss-1080b')).toBe('gpt-oss');
expect(getValueKey('gpt-oss-20b')).toBe('gpt-oss-20b');
expect(getValueKey('oai/gpt-oss:20b')).toBe('gpt-oss:20b');
});
}); });
describe('getMultiplier', () => { describe('getMultiplier', () => {
@ -394,6 +404,18 @@ describe('getMultiplier', () => {
expect(getMultiplier({ model: key, tokenType: 'completion' })).toBe(expectedCompletion); expect(getMultiplier({ model: key, tokenType: 'completion' })).toBe(expectedCompletion);
}); });
}); });
it('should return correct multipliers for GLM models', () => {
const models = ['glm-4.6', 'glm-4.5v', 'glm-4.5-air', 'glm-4.5', 'glm-4-32b', 'glm-4', 'glm4'];
models.forEach((key) => {
const expectedPrompt = tokenValues[key].prompt;
const expectedCompletion = tokenValues[key].completion;
expect(getMultiplier({ valueKey: key, tokenType: 'prompt' })).toBe(expectedPrompt);
expect(getMultiplier({ valueKey: key, tokenType: 'completion' })).toBe(expectedCompletion);
expect(getMultiplier({ model: key, tokenType: 'prompt' })).toBe(expectedPrompt);
expect(getMultiplier({ model: key, tokenType: 'completion' })).toBe(expectedCompletion);
});
});
}); });
describe('AWS Bedrock Model Tests', () => { describe('AWS Bedrock Model Tests', () => {
@ -571,6 +593,9 @@ describe('getCacheMultiplier', () => {
describe('Google Model Tests', () => { describe('Google Model Tests', () => {
const googleModels = [ const googleModels = [
'gemini-2.5-pro',
'gemini-2.5-flash',
'gemini-2.5-flash-lite',
'gemini-2.5-pro-preview-05-06', 'gemini-2.5-pro-preview-05-06',
'gemini-2.5-flash-preview-04-17', 'gemini-2.5-flash-preview-04-17',
'gemini-2.5-exp', 'gemini-2.5-exp',
@ -611,6 +636,9 @@ describe('Google Model Tests', () => {
it('should map to the correct model keys', () => { it('should map to the correct model keys', () => {
const expected = { const expected = {
'gemini-2.5-pro': 'gemini-2.5-pro',
'gemini-2.5-flash': 'gemini-2.5-flash',
'gemini-2.5-flash-lite': 'gemini-2.5-flash-lite',
'gemini-2.5-pro-preview-05-06': 'gemini-2.5-pro', 'gemini-2.5-pro-preview-05-06': 'gemini-2.5-pro',
'gemini-2.5-flash-preview-04-17': 'gemini-2.5-flash', 'gemini-2.5-flash-preview-04-17': 'gemini-2.5-flash',
'gemini-2.5-exp': 'gemini-2.5', 'gemini-2.5-exp': 'gemini-2.5',
@ -766,6 +794,110 @@ describe('Grok Model Tests - Pricing', () => {
}); });
}); });
describe('GLM Model Tests', () => {
it('should return expected value keys for GLM models', () => {
expect(getValueKey('glm-4.6')).toBe('glm-4.6');
expect(getValueKey('glm-4.5')).toBe('glm-4.5');
expect(getValueKey('glm-4.5v')).toBe('glm-4.5v');
expect(getValueKey('glm-4.5-air')).toBe('glm-4.5-air');
expect(getValueKey('glm-4-32b')).toBe('glm-4-32b');
expect(getValueKey('glm-4')).toBe('glm-4');
expect(getValueKey('glm4')).toBe('glm4');
});
it('should match GLM model variations with provider prefixes', () => {
expect(getValueKey('z-ai/glm-4.6')).toBe('glm-4.6');
expect(getValueKey('z-ai/glm-4.5')).toBe('glm-4.5');
expect(getValueKey('z-ai/glm-4.5-air')).toBe('glm-4.5-air');
expect(getValueKey('z-ai/glm-4.5v')).toBe('glm-4.5v');
expect(getValueKey('z-ai/glm-4-32b')).toBe('glm-4-32b');
expect(getValueKey('zai/glm-4.6')).toBe('glm-4.6');
expect(getValueKey('zai/glm-4.5')).toBe('glm-4.5');
expect(getValueKey('zai/glm-4.5-air')).toBe('glm-4.5-air');
expect(getValueKey('zai/glm-4.5v')).toBe('glm-4.5v');
expect(getValueKey('zai-org/GLM-4.6')).toBe('glm-4.6');
expect(getValueKey('zai-org/GLM-4.5')).toBe('glm-4.5');
expect(getValueKey('zai-org/GLM-4.5-Air')).toBe('glm-4.5-air');
expect(getValueKey('zai-org/GLM-4.5V')).toBe('glm-4.5v');
expect(getValueKey('zai-org/GLM-4-32B-0414')).toBe('glm-4-32b');
});
it('should match GLM model variations with suffixes', () => {
expect(getValueKey('glm-4.6-fp8')).toBe('glm-4.6');
expect(getValueKey('zai-org/GLM-4.6-FP8')).toBe('glm-4.6');
expect(getValueKey('zai-org/GLM-4.5-Air-FP8')).toBe('glm-4.5-air');
});
it('should prioritize more specific GLM model patterns', () => {
expect(getValueKey('glm-4.5-air-something')).toBe('glm-4.5-air');
expect(getValueKey('glm-4.5-something')).toBe('glm-4.5');
expect(getValueKey('glm-4.5v-something')).toBe('glm-4.5v');
});
it('should return correct multipliers for all GLM models', () => {
expect(getMultiplier({ model: 'glm-4.6', tokenType: 'prompt' })).toBe(
tokenValues['glm-4.6'].prompt,
);
expect(getMultiplier({ model: 'glm-4.6', tokenType: 'completion' })).toBe(
tokenValues['glm-4.6'].completion,
);
expect(getMultiplier({ model: 'glm-4.5v', tokenType: 'prompt' })).toBe(
tokenValues['glm-4.5v'].prompt,
);
expect(getMultiplier({ model: 'glm-4.5v', tokenType: 'completion' })).toBe(
tokenValues['glm-4.5v'].completion,
);
expect(getMultiplier({ model: 'glm-4.5-air', tokenType: 'prompt' })).toBe(
tokenValues['glm-4.5-air'].prompt,
);
expect(getMultiplier({ model: 'glm-4.5-air', tokenType: 'completion' })).toBe(
tokenValues['glm-4.5-air'].completion,
);
expect(getMultiplier({ model: 'glm-4.5', tokenType: 'prompt' })).toBe(
tokenValues['glm-4.5'].prompt,
);
expect(getMultiplier({ model: 'glm-4.5', tokenType: 'completion' })).toBe(
tokenValues['glm-4.5'].completion,
);
expect(getMultiplier({ model: 'glm-4-32b', tokenType: 'prompt' })).toBe(
tokenValues['glm-4-32b'].prompt,
);
expect(getMultiplier({ model: 'glm-4-32b', tokenType: 'completion' })).toBe(
tokenValues['glm-4-32b'].completion,
);
expect(getMultiplier({ model: 'glm-4', tokenType: 'prompt' })).toBe(
tokenValues['glm-4'].prompt,
);
expect(getMultiplier({ model: 'glm-4', tokenType: 'completion' })).toBe(
tokenValues['glm-4'].completion,
);
expect(getMultiplier({ model: 'glm4', tokenType: 'prompt' })).toBe(tokenValues['glm4'].prompt);
expect(getMultiplier({ model: 'glm4', tokenType: 'completion' })).toBe(
tokenValues['glm4'].completion,
);
});
it('should return correct multipliers for GLM models with provider prefixes', () => {
expect(getMultiplier({ model: 'z-ai/glm-4.6', tokenType: 'prompt' })).toBe(
tokenValues['glm-4.6'].prompt,
);
expect(getMultiplier({ model: 'zai/glm-4.5-air', tokenType: 'completion' })).toBe(
tokenValues['glm-4.5-air'].completion,
);
expect(getMultiplier({ model: 'zai-org/GLM-4.5V', tokenType: 'prompt' })).toBe(
tokenValues['glm-4.5v'].prompt,
);
});
});
describe('Claude Model Tests', () => { describe('Claude Model Tests', () => {
it('should return correct prompt and completion rates for Claude 4 models', () => { it('should return correct prompt and completion rates for Claude 4 models', () => {
expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'prompt' })).toBe( expect(getMultiplier({ model: 'claude-sonnet-4', tokenType: 'prompt' })).toBe(

View file

@ -1,6 +1,6 @@
{ {
"name": "@librechat/backend", "name": "@librechat/backend",
"version": "v0.8.0-rc4", "version": "v0.8.0",
"description": "", "description": "",
"scripts": { "scripts": {
"start": "echo 'please run this from the root directory'", "start": "echo 'please run this from the root directory'",
@ -47,9 +47,8 @@
"@langchain/core": "^0.3.62", "@langchain/core": "^0.3.62",
"@langchain/google-genai": "^0.2.13", "@langchain/google-genai": "^0.2.13",
"@langchain/google-vertexai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13",
"@langchain/openai": "^0.5.18",
"@langchain/textsplitters": "^0.1.0", "@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.4.81", "@librechat/agents": "^2.4.85",
"@librechat/api": "*", "@librechat/api": "*",
"@librechat/data-schemas": "*", "@librechat/data-schemas": "*",
"@microsoft/microsoft-graph-client": "^3.0.7", "@microsoft/microsoft-graph-client": "^3.0.7",

View file

@ -1,7 +1,7 @@
const { logger } = require('@librechat/data-schemas');
const { CacheKeys } = require('librechat-data-provider'); const { CacheKeys } = require('librechat-data-provider');
const { loadDefaultModels, loadConfigModels } = require('~/server/services/Config'); const { loadDefaultModels, loadConfigModels } = require('~/server/services/Config');
const { getLogStores } = require('~/cache'); const { getLogStores } = require('~/cache');
const { logger } = require('~/config');
/** /**
* @param {ServerRequest} req * @param {ServerRequest} req

View file

@ -1,7 +1,6 @@
const { logger } = require('@librechat/data-schemas'); const { logger, webSearchKeys } = require('@librechat/data-schemas');
const { Tools, CacheKeys, Constants, FileSources } = require('librechat-data-provider'); const { Tools, CacheKeys, Constants, FileSources } = require('librechat-data-provider');
const { const {
webSearchKeys,
MCPOAuthHandler, MCPOAuthHandler,
MCPTokenStorage, MCPTokenStorage,
normalizeHttpError, normalizeHttpError,

View file

@ -257,7 +257,7 @@ class AgentClient extends BaseClient {
}; };
} }
const files = await this.addImageURLs( const files = await this.processAttachments(
orderedMessages[orderedMessages.length - 1], orderedMessages[orderedMessages.length - 1],
attachments, attachments,
); );
@ -1116,8 +1116,8 @@ class AgentClient extends BaseClient {
appConfig.endpoints?.[endpoint] ?? appConfig.endpoints?.[endpoint] ??
titleProviderConfig.customEndpointConfig; titleProviderConfig.customEndpointConfig;
if (!endpointConfig) { if (!endpointConfig) {
logger.warn( logger.debug(
'[api/server/controllers/agents/client.js #titleConvo] Error getting endpoint config', `[api/server/controllers/agents/client.js #titleConvo] No endpoint config for "${endpoint}"`,
); );
} }

View file

@ -1,6 +1,6 @@
const { logger } = require('@librechat/data-schemas');
const { generate2FATempToken } = require('~/server/services/twoFactorService'); const { generate2FATempToken } = require('~/server/services/twoFactorService');
const { setAuthTokens } = require('~/server/services/AuthService'); const { setAuthTokens } = require('~/server/services/AuthService');
const { logger } = require('~/config');
const loginController = async (req, res) => { const loginController = async (req, res) => {
try { try {

View file

@ -1,8 +1,8 @@
const cookies = require('cookie'); const cookies = require('cookie');
const { getOpenIdConfig } = require('~/strategies'); const { isEnabled } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { logoutUser } = require('~/server/services/AuthService'); const { logoutUser } = require('~/server/services/AuthService');
const { isEnabled } = require('~/server/utils'); const { getOpenIdConfig } = require('~/strategies');
const { logger } = require('~/config');
const logoutController = async (req, res) => { const logoutController = async (req, res) => {
const refreshToken = req.headers.cookie ? cookies.parse(req.headers.cookie).refreshToken : null; const refreshToken = req.headers.cookie ? cookies.parse(req.headers.cookie).refreshToken : null;

View file

@ -10,7 +10,12 @@ const compression = require('compression');
const cookieParser = require('cookie-parser'); const cookieParser = require('cookie-parser');
const { logger } = require('@librechat/data-schemas'); const { logger } = require('@librechat/data-schemas');
const mongoSanitize = require('express-mongo-sanitize'); const mongoSanitize = require('express-mongo-sanitize');
const { isEnabled, ErrorController } = require('@librechat/api'); const {
isEnabled,
ErrorController,
performStartupChecks,
initializeFileStorage,
} = require('@librechat/api');
const { connectDb, indexSync } = require('~/db'); const { connectDb, indexSync } = require('~/db');
const initializeOAuthReconnectManager = require('./services/initializeOAuthReconnectManager'); const initializeOAuthReconnectManager = require('./services/initializeOAuthReconnectManager');
const createValidateImageRequest = require('./middleware/validateImageRequest'); const createValidateImageRequest = require('./middleware/validateImageRequest');
@ -49,9 +54,11 @@ const startServer = async () => {
app.set('trust proxy', trusted_proxy); app.set('trust proxy', trusted_proxy);
await seedDatabase(); await seedDatabase();
const appConfig = await getAppConfig(); const appConfig = await getAppConfig();
initializeFileStorage(appConfig);
await performStartupChecks(appConfig);
await updateInterfacePermissions(appConfig); await updateInterfacePermissions(appConfig);
const indexPath = path.join(appConfig.paths.dist, 'index.html'); const indexPath = path.join(appConfig.paths.dist, 'index.html');
let indexHTML = fs.readFileSync(indexPath, 'utf8'); let indexHTML = fs.readFileSync(indexPath, 'utf8');

View file

@ -1,6 +1,6 @@
const { isEnabled } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { SystemRoles } = require('librechat-data-provider'); const { SystemRoles } = require('librechat-data-provider');
const { isEnabled } = require('~/server/utils');
const { logger } = require('~/config');
/** /**
* Checks if the user can delete their account * Checks if the user can delete their account

View file

@ -1,9 +1,9 @@
const { Keyv } = require('keyv'); const { Keyv } = require('keyv');
const uap = require('ua-parser-js'); const uap = require('ua-parser-js');
const { logger } = require('@librechat/data-schemas'); const { logger } = require('@librechat/data-schemas');
const { isEnabled, keyvMongo } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const { isEnabled, removePorts } = require('~/server/utils'); const { removePorts } = require('~/server/utils');
const keyvMongo = require('~/cache/keyvMongo');
const denyRequest = require('./denyRequest'); const denyRequest = require('./denyRequest');
const { getLogStores } = require('~/cache'); const { getLogStores } = require('~/cache');
const { findUser } = require('~/models'); const { findUser } = require('~/models');

View file

@ -1,6 +1,6 @@
const { logger } = require('@librechat/data-schemas');
const { PrincipalType, PermissionTypes, Permissions } = require('librechat-data-provider'); const { PrincipalType, PermissionTypes, Permissions } = require('librechat-data-provider');
const { getRoleByName } = require('~/models/Role'); const { getRoleByName } = require('~/models/Role');
const { logger } = require('~/config');
/** /**
* Middleware to check if user has permission to access people picker functionality * Middleware to check if user has permission to access people picker functionality

View file

@ -1,10 +1,11 @@
const { logger } = require('@librechat/data-schemas');
const { PrincipalType, PermissionTypes, Permissions } = require('librechat-data-provider'); const { PrincipalType, PermissionTypes, Permissions } = require('librechat-data-provider');
const { checkPeoplePickerAccess } = require('./checkPeoplePickerAccess'); const { checkPeoplePickerAccess } = require('./checkPeoplePickerAccess');
const { getRoleByName } = require('~/models/Role'); const { getRoleByName } = require('~/models/Role');
const { logger } = require('~/config');
jest.mock('~/models/Role'); jest.mock('~/models/Role');
jest.mock('~/config', () => ({ jest.mock('@librechat/data-schemas', () => ({
...jest.requireActual('@librechat/data-schemas'),
logger: { logger: {
error: jest.fn(), error: jest.fn(),
}, },

View file

@ -1,7 +1,7 @@
const { isEnabled } = require('@librechat/api');
const { Time, CacheKeys, ViolationTypes } = require('librechat-data-provider'); const { Time, CacheKeys, ViolationTypes } = require('librechat-data-provider');
const clearPendingReq = require('~/cache/clearPendingReq'); const clearPendingReq = require('~/cache/clearPendingReq');
const { logViolation, getLogStores } = require('~/cache'); const { logViolation, getLogStores } = require('~/cache');
const { isEnabled } = require('~/server/utils');
const denyRequest = require('./denyRequest'); const denyRequest = require('./denyRequest');
const { const {

View file

@ -1,6 +1,6 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const { limiterCache } = require('~/cache/cacheFactory');
const logViolation = require('~/cache/logViolation'); const logViolation = require('~/cache/logViolation');
const getEnvironmentVariables = () => { const getEnvironmentVariables = () => {

View file

@ -1,6 +1,6 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const { limiterCache } = require('~/cache/cacheFactory');
const logViolation = require('~/cache/logViolation'); const logViolation = require('~/cache/logViolation');
const getEnvironmentVariables = () => { const getEnvironmentVariables = () => {

View file

@ -1,7 +1,7 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const { removePorts } = require('~/server/utils'); const { removePorts } = require('~/server/utils');
const { limiterCache } = require('~/cache/cacheFactory');
const { logViolation } = require('~/cache'); const { logViolation } = require('~/cache');
const { LOGIN_WINDOW = 5, LOGIN_MAX = 7, LOGIN_VIOLATION_SCORE: score } = process.env; const { LOGIN_WINDOW = 5, LOGIN_MAX = 7, LOGIN_VIOLATION_SCORE: score } = process.env;

View file

@ -1,7 +1,7 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const denyRequest = require('~/server/middleware/denyRequest'); const denyRequest = require('~/server/middleware/denyRequest');
const { limiterCache } = require('~/cache/cacheFactory');
const { logViolation } = require('~/cache'); const { logViolation } = require('~/cache');
const { const {

View file

@ -1,7 +1,7 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const { removePorts } = require('~/server/utils'); const { removePorts } = require('~/server/utils');
const { limiterCache } = require('~/cache/cacheFactory');
const { logViolation } = require('~/cache'); const { logViolation } = require('~/cache');
const { REGISTER_WINDOW = 60, REGISTER_MAX = 5, REGISTRATION_VIOLATION_SCORE: score } = process.env; const { REGISTER_WINDOW = 60, REGISTER_MAX = 5, REGISTRATION_VIOLATION_SCORE: score } = process.env;

View file

@ -1,7 +1,7 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const { removePorts } = require('~/server/utils'); const { removePorts } = require('~/server/utils');
const { limiterCache } = require('~/cache/cacheFactory');
const { logViolation } = require('~/cache'); const { logViolation } = require('~/cache');
const { const {

View file

@ -1,6 +1,6 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const { limiterCache } = require('~/cache/cacheFactory');
const logViolation = require('~/cache/logViolation'); const logViolation = require('~/cache/logViolation');
const getEnvironmentVariables = () => { const getEnvironmentVariables = () => {

View file

@ -1,6 +1,6 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const { limiterCache } = require('~/cache/cacheFactory');
const logViolation = require('~/cache/logViolation'); const logViolation = require('~/cache/logViolation');
const { TOOL_CALL_VIOLATION_SCORE: score } = process.env; const { TOOL_CALL_VIOLATION_SCORE: score } = process.env;

View file

@ -1,7 +1,7 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const logViolation = require('~/cache/logViolation'); const logViolation = require('~/cache/logViolation');
const { limiterCache } = require('~/cache/cacheFactory');
const getEnvironmentVariables = () => { const getEnvironmentVariables = () => {
const TTS_IP_MAX = parseInt(process.env.TTS_IP_MAX) || 100; const TTS_IP_MAX = parseInt(process.env.TTS_IP_MAX) || 100;

View file

@ -1,6 +1,6 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const { limiterCache } = require('~/cache/cacheFactory');
const logViolation = require('~/cache/logViolation'); const logViolation = require('~/cache/logViolation');
const getEnvironmentVariables = () => { const getEnvironmentVariables = () => {

View file

@ -1,7 +1,7 @@
const rateLimit = require('express-rate-limit'); const rateLimit = require('express-rate-limit');
const { limiterCache } = require('@librechat/api');
const { ViolationTypes } = require('librechat-data-provider'); const { ViolationTypes } = require('librechat-data-provider');
const { removePorts } = require('~/server/utils'); const { removePorts } = require('~/server/utils');
const { limiterCache } = require('~/cache/cacheFactory');
const { logViolation } = require('~/cache'); const { logViolation } = require('~/cache');
const { const {

View file

@ -1,4 +1,4 @@
const { logger } = require('~/config'); const { logger } = require('@librechat/data-schemas');
/** /**
* Middleware to log Forwarded Headers * Middleware to log Forwarded Headers

View file

@ -1,8 +1,8 @@
const axios = require('axios'); const axios = require('axios');
const { isEnabled } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { ErrorTypes } = require('librechat-data-provider'); const { ErrorTypes } = require('librechat-data-provider');
const { isEnabled } = require('~/server/utils');
const denyRequest = require('./denyRequest'); const denyRequest = require('./denyRequest');
const { logger } = require('~/config');
async function moderateText(req, res, next) { async function moderateText(req, res, next) {
if (!isEnabled(process.env.OPENAI_MODERATION)) { if (!isEnabled(process.env.OPENAI_MODERATION)) {

View file

@ -1,6 +1,6 @@
const cookies = require('cookie'); const cookies = require('cookie');
const { isEnabled } = require('~/server/utils');
const passport = require('passport'); const passport = require('passport');
const { isEnabled } = require('@librechat/api');
// This middleware does not require authentication, // This middleware does not require authentication,
// but if the user is authenticated, it will set the user object. // but if the user is authenticated, it will set the user object.

View file

@ -1,6 +1,6 @@
const passport = require('passport');
const cookies = require('cookie'); const cookies = require('cookie');
const { isEnabled } = require('~/server/utils'); const passport = require('passport');
const { isEnabled } = require('@librechat/api');
/** /**
* Custom Middleware to handle JWT authentication, with support for OpenID token reuse * Custom Middleware to handle JWT authentication, with support for OpenID token reuse

View file

@ -1,5 +1,5 @@
const passport = require('passport'); const passport = require('passport');
const { logger } = require('~/config'); const { logger } = require('@librechat/data-schemas');
const requireLocalAuth = (req, res, next) => { const requireLocalAuth = (req, res, next) => {
passport.authenticate('local', (err, user, info) => { passport.authenticate('local', (err, user, info) => {

View file

@ -1,5 +1,5 @@
const { isEnabled } = require('~/server/utils'); const { isEnabled } = require('@librechat/api');
const { logger } = require('~/config'); const { logger } = require('@librechat/data-schemas');
function validatePasswordReset(req, res, next) { function validatePasswordReset(req, res, next) {
if (isEnabled(process.env.ALLOW_PASSWORD_RESET)) { if (isEnabled(process.env.ALLOW_PASSWORD_RESET)) {

View file

@ -1,4 +1,4 @@
const { isEnabled } = require('~/server/utils'); const { isEnabled } = require('@librechat/api');
function validateRegistration(req, res, next) { function validateRegistration(req, res, next) {
if (req.invite) { if (req.invite) {

View file

@ -1,10 +1,13 @@
const request = require('supertest');
const express = require('express'); const express = require('express');
const request = require('supertest');
const { isEnabled } = require('@librechat/api');
const { getLdapConfig } = require('~/server/services/Config/ldap'); const { getLdapConfig } = require('~/server/services/Config/ldap');
const { isEnabled } = require('~/server/utils');
jest.mock('~/server/services/Config/ldap'); jest.mock('~/server/services/Config/ldap');
jest.mock('~/server/utils'); jest.mock('@librechat/api', () => ({
...jest.requireActual('@librechat/api'),
isEnabled: jest.fn(),
}));
const app = express(); const app = express();

View file

@ -1,4 +1,5 @@
const express = require('express'); const express = require('express');
const { isEnabled } = require('@librechat/api');
const { const {
uaParser, uaParser,
checkBan, checkBan,
@ -8,7 +9,6 @@ const {
concurrentLimiter, concurrentLimiter,
messageUserLimiter, messageUserLimiter,
} = require('~/server/middleware'); } = require('~/server/middleware');
const { isEnabled } = require('~/server/utils');
const { v1 } = require('./v1'); const { v1 } = require('./v1');
const chat = require('./chat'); const chat = require('./chat');

View file

@ -156,7 +156,7 @@ router.get('/', async function (req, res) {
if ( if (
webSearchConfig != null && webSearchConfig != null &&
(webSearchConfig.searchProvider || (webSearchConfig.searchProvider ||
webSearchConfig.scraperType || webSearchConfig.scraperProvider ||
webSearchConfig.rerankerType) webSearchConfig.rerankerType)
) { ) {
payload.webSearch = {}; payload.webSearch = {};
@ -165,8 +165,8 @@ router.get('/', async function (req, res) {
if (webSearchConfig?.searchProvider) { if (webSearchConfig?.searchProvider) {
payload.webSearch.searchProvider = webSearchConfig.searchProvider; payload.webSearch.searchProvider = webSearchConfig.searchProvider;
} }
if (webSearchConfig?.scraperType) { if (webSearchConfig?.scraperProvider) {
payload.webSearch.scraperType = webSearchConfig.scraperType; payload.webSearch.scraperProvider = webSearchConfig.scraperProvider;
} }
if (webSearchConfig?.rerankerType) { if (webSearchConfig?.rerankerType) {
payload.webSearch.rerankerType = webSearchConfig.rerankerType; payload.webSearch.rerankerType = webSearchConfig.rerankerType;

View file

@ -1,19 +1,19 @@
const { isEnabled } = require('@librechat/api');
const { EModelEndpoint } = require('librechat-data-provider');
const {
validateConvoAccess,
messageUserLimiter,
concurrentLimiter,
messageIpLimiter,
requireJwtAuth,
checkBan,
uaParser,
} = require('~/server/middleware');
const anthropic = require('./anthropic');
const express = require('express'); const express = require('express');
const openAI = require('./openAI'); const openAI = require('./openAI');
const custom = require('./custom'); const custom = require('./custom');
const google = require('./google'); const google = require('./google');
const anthropic = require('./anthropic');
const { isEnabled } = require('~/server/utils');
const { EModelEndpoint } = require('librechat-data-provider');
const {
checkBan,
uaParser,
requireJwtAuth,
messageIpLimiter,
concurrentLimiter,
messageUserLimiter,
validateConvoAccess,
} = require('~/server/middleware');
const { LIMIT_CONCURRENT_MESSAGES, LIMIT_MESSAGE_IP, LIMIT_MESSAGE_USER } = process.env ?? {}; const { LIMIT_CONCURRENT_MESSAGES, LIMIT_MESSAGE_IP, LIMIT_MESSAGE_USER } = process.env ?? {};

View file

@ -1,6 +1,7 @@
const fs = require('fs').promises; const fs = require('fs').promises;
const express = require('express'); const express = require('express');
const { EnvVar } = require('@librechat/agents'); const { EnvVar } = require('@librechat/agents');
const { logger } = require('@librechat/data-schemas');
const { const {
Time, Time,
isUUID, isUUID,
@ -30,7 +31,6 @@ const { cleanFileName } = require('~/server/utils/files');
const { getAssistant } = require('~/models/Assistant'); const { getAssistant } = require('~/models/Assistant');
const { getAgent } = require('~/models/Agent'); const { getAgent } = require('~/models/Agent');
const { getLogStores } = require('~/cache'); const { getLogStores } = require('~/cache');
const { logger } = require('~/config');
const { Readable } = require('stream'); const { Readable } = require('stream');
const router = express.Router(); const router = express.Router();

View file

@ -1,9 +1,9 @@
const multer = require('multer'); const multer = require('multer');
const express = require('express'); const express = require('express');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys } = require('librechat-data-provider'); const { CacheKeys } = require('librechat-data-provider');
const { getVoices, streamAudio, textToSpeech } = require('~/server/services/Files/Audio'); const { getVoices, streamAudio, textToSpeech } = require('~/server/services/Files/Audio');
const { getLogStores } = require('~/cache'); const { getLogStores } = require('~/cache');
const { logger } = require('~/config');
const router = express.Router(); const router = express.Router();
const upload = multer(); const upload = multer();

View file

@ -1,8 +1,8 @@
const express = require('express');
const crypto = require('crypto'); const crypto = require('crypto');
const express = require('express');
const { logger } = require('@librechat/data-schemas');
const { getPresets, savePreset, deletePresets } = require('~/models'); const { getPresets, savePreset, deletePresets } = require('~/models');
const requireJwtAuth = require('~/server/middleware/requireJwtAuth'); const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
const { logger } = require('~/config');
const router = express.Router(); const router = express.Router();
router.use(requireJwtAuth); router.use(requireJwtAuth);

View file

@ -1,7 +1,7 @@
const express = require('express'); const express = require('express');
const { MeiliSearch } = require('meilisearch'); const { MeiliSearch } = require('meilisearch');
const { isEnabled } = require('@librechat/api');
const requireJwtAuth = require('~/server/middleware/requireJwtAuth'); const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
const { isEnabled } = require('~/server/utils');
const router = express.Router(); const router = express.Router();

View file

@ -1,7 +1,7 @@
const express = require('express'); const express = require('express');
const { isEnabled } = require('@librechat/api');
const staticCache = require('../utils/staticCache'); const staticCache = require('../utils/staticCache');
const paths = require('~/config/paths'); const paths = require('~/config/paths');
const { isEnabled } = require('~/server/utils');
const skipGzipScan = !isEnabled(process.env.ENABLE_IMAGE_OUTPUT_GZIP_SCAN); const skipGzipScan = !isEnabled(process.env.ENABLE_IMAGE_OUTPUT_GZIP_SCAN);

View file

@ -1,8 +1,9 @@
const express = require('express'); const express = require('express');
const router = express.Router(); const { logger } = require('@librechat/data-schemas');
const requireJwtAuth = require('~/server/middleware/requireJwtAuth'); const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
const { countTokens } = require('~/server/utils'); const { countTokens } = require('~/server/utils');
const { logger } = require('~/config');
const router = express.Router();
router.post('/', requireJwtAuth, async (req, res) => { router.post('/', requireJwtAuth, async (req, res) => {
try { try {

View file

@ -1,198 +0,0 @@
jest.mock('@librechat/data-schemas', () => ({
logger: {
info: jest.fn(),
warn: jest.fn(),
error: jest.fn(),
},
}));
jest.mock('@librechat/api', () => ({
...jest.requireActual('@librechat/api'),
loadDefaultInterface: jest.fn(),
}));
jest.mock('./start/tools', () => ({
loadAndFormatTools: jest.fn().mockReturnValue({}),
}));
jest.mock('./start/checks', () => ({
checkVariables: jest.fn(),
checkHealth: jest.fn(),
checkConfig: jest.fn(),
checkAzureVariables: jest.fn(),
checkWebSearchConfig: jest.fn(),
}));
jest.mock('./Config/loadCustomConfig', () => jest.fn());
const AppService = require('./AppService');
const { loadDefaultInterface } = require('@librechat/api');
describe('AppService interface configuration', () => {
let mockLoadCustomConfig;
beforeEach(() => {
jest.resetModules();
jest.clearAllMocks();
mockLoadCustomConfig = require('./Config/loadCustomConfig');
});
it('should set prompts and bookmarks to true when loadDefaultInterface returns true for both', async () => {
mockLoadCustomConfig.mockResolvedValue({});
loadDefaultInterface.mockResolvedValue({ prompts: true, bookmarks: true });
const result = await AppService();
expect(result).toEqual(
expect.objectContaining({
interfaceConfig: expect.objectContaining({
prompts: true,
bookmarks: true,
}),
}),
);
expect(loadDefaultInterface).toHaveBeenCalled();
});
it('should set prompts and bookmarks to false when loadDefaultInterface returns false for both', async () => {
mockLoadCustomConfig.mockResolvedValue({ interface: { prompts: false, bookmarks: false } });
loadDefaultInterface.mockResolvedValue({ prompts: false, bookmarks: false });
const result = await AppService();
expect(result).toEqual(
expect.objectContaining({
interfaceConfig: expect.objectContaining({
prompts: false,
bookmarks: false,
}),
}),
);
expect(loadDefaultInterface).toHaveBeenCalled();
});
it('should not set prompts and bookmarks when loadDefaultInterface returns undefined for both', async () => {
mockLoadCustomConfig.mockResolvedValue({});
loadDefaultInterface.mockResolvedValue({});
const result = await AppService();
expect(result).toEqual(
expect.objectContaining({
interfaceConfig: expect.anything(),
}),
);
// Verify that prompts and bookmarks are undefined when not provided
expect(result.interfaceConfig.prompts).toBeUndefined();
expect(result.interfaceConfig.bookmarks).toBeUndefined();
expect(loadDefaultInterface).toHaveBeenCalled();
});
it('should set prompts and bookmarks to different values when loadDefaultInterface returns different values', async () => {
mockLoadCustomConfig.mockResolvedValue({ interface: { prompts: true, bookmarks: false } });
loadDefaultInterface.mockResolvedValue({ prompts: true, bookmarks: false });
const result = await AppService();
expect(result).toEqual(
expect.objectContaining({
interfaceConfig: expect.objectContaining({
prompts: true,
bookmarks: false,
}),
}),
);
expect(loadDefaultInterface).toHaveBeenCalled();
});
it('should correctly configure peoplePicker permissions including roles', async () => {
mockLoadCustomConfig.mockResolvedValue({
interface: {
peoplePicker: {
users: true,
groups: true,
roles: true,
},
},
});
loadDefaultInterface.mockResolvedValue({
peoplePicker: {
users: true,
groups: true,
roles: true,
},
});
const result = await AppService();
expect(result).toEqual(
expect.objectContaining({
interfaceConfig: expect.objectContaining({
peoplePicker: expect.objectContaining({
users: true,
groups: true,
roles: true,
}),
}),
}),
);
expect(loadDefaultInterface).toHaveBeenCalled();
});
it('should handle mixed peoplePicker permissions', async () => {
mockLoadCustomConfig.mockResolvedValue({
interface: {
peoplePicker: {
users: true,
groups: false,
roles: true,
},
},
});
loadDefaultInterface.mockResolvedValue({
peoplePicker: {
users: true,
groups: false,
roles: true,
},
});
const result = await AppService();
expect(result).toEqual(
expect.objectContaining({
interfaceConfig: expect.objectContaining({
peoplePicker: expect.objectContaining({
users: true,
groups: false,
roles: true,
}),
}),
}),
);
});
it('should set default peoplePicker permissions when not provided', async () => {
mockLoadCustomConfig.mockResolvedValue({});
loadDefaultInterface.mockResolvedValue({
peoplePicker: {
users: true,
groups: true,
roles: true,
},
});
const result = await AppService();
expect(result).toEqual(
expect.objectContaining({
interfaceConfig: expect.objectContaining({
peoplePicker: expect.objectContaining({
users: true,
groups: true,
roles: true,
}),
}),
}),
);
});
});

View file

@ -129,7 +129,7 @@ const verifyEmail = async (req) => {
return { message: 'Email already verified', status: 'success' }; return { message: 'Email already verified', status: 'success' };
} }
let emailVerificationData = await findToken({ email: decodedEmail }); let emailVerificationData = await findToken({ email: decodedEmail }, { sort: { createdAt: -1 } });
if (!emailVerificationData) { if (!emailVerificationData) {
logger.warn(`[verifyEmail] [No email verification data found] [Email: ${decodedEmail}]`); logger.warn(`[verifyEmail] [No email verification data found] [Email: ${decodedEmail}]`);
@ -319,9 +319,12 @@ const requestPasswordReset = async (req) => {
* @returns * @returns
*/ */
const resetPassword = async (userId, token, password) => { const resetPassword = async (userId, token, password) => {
let passwordResetToken = await findToken({ let passwordResetToken = await findToken(
userId, {
}); userId,
},
{ sort: { createdAt: -1 } },
);
if (!passwordResetToken) { if (!passwordResetToken) {
return new Error('Invalid or expired password reset token'); return new Error('Invalid or expired password reset token');

View file

@ -1,11 +1,25 @@
const { logger } = require('@librechat/data-schemas');
const { CacheKeys } = require('librechat-data-provider'); const { CacheKeys } = require('librechat-data-provider');
const AppService = require('~/server/services/AppService'); const { logger, AppService } = require('@librechat/data-schemas');
const { loadAndFormatTools } = require('~/server/services/start/tools');
const loadCustomConfig = require('./loadCustomConfig');
const { setCachedTools } = require('./getCachedTools'); const { setCachedTools } = require('./getCachedTools');
const getLogStores = require('~/cache/getLogStores'); const getLogStores = require('~/cache/getLogStores');
const paths = require('~/config/paths');
const BASE_CONFIG_KEY = '_BASE_'; const BASE_CONFIG_KEY = '_BASE_';
const loadBaseConfig = async () => {
/** @type {TCustomConfig} */
const config = (await loadCustomConfig()) ?? {};
/** @type {Record<string, FunctionTool>} */
const systemTools = loadAndFormatTools({
adminFilter: config.filteredTools,
adminIncluded: config.includedTools,
directory: paths.structuredTools,
});
return AppService({ config, paths, systemTools });
};
/** /**
* Get the app configuration based on user context * Get the app configuration based on user context
* @param {Object} [options] * @param {Object} [options]
@ -29,7 +43,7 @@ async function getAppConfig(options = {}) {
let baseConfig = await cache.get(BASE_CONFIG_KEY); let baseConfig = await cache.get(BASE_CONFIG_KEY);
if (!baseConfig) { if (!baseConfig) {
logger.info('[getAppConfig] App configuration not initialized. Initializing AppService...'); logger.info('[getAppConfig] App configuration not initialized. Initializing AppService...');
baseConfig = await AppService(); baseConfig = await loadBaseConfig();
if (!baseConfig) { if (!baseConfig) {
throw new Error('Failed to initialize app configuration through AppService.'); throw new Error('Failed to initialize app configuration through AppService.');

View file

@ -1,48 +0,0 @@
const { RateLimitPrefix } = require('librechat-data-provider');
/**
*
* @param {TCustomConfig['rateLimits'] | undefined} rateLimits
*/
const handleRateLimits = (rateLimits) => {
if (!rateLimits) {
return;
}
const rateLimitKeys = {
fileUploads: RateLimitPrefix.FILE_UPLOAD,
conversationsImport: RateLimitPrefix.IMPORT,
tts: RateLimitPrefix.TTS,
stt: RateLimitPrefix.STT,
};
Object.entries(rateLimitKeys).forEach(([key, prefix]) => {
const rateLimit = rateLimits[key];
if (rateLimit) {
setRateLimitEnvVars(prefix, rateLimit);
}
});
};
/**
* Set environment variables for rate limit configurations
*
* @param {string} prefix - Prefix for environment variable names
* @param {object} rateLimit - Rate limit configuration object
*/
const setRateLimitEnvVars = (prefix, rateLimit) => {
const envVarsMapping = {
ipMax: `${prefix}_IP_MAX`,
ipWindowInMinutes: `${prefix}_IP_WINDOW`,
userMax: `${prefix}_USER_MAX`,
userWindowInMinutes: `${prefix}_USER_WINDOW`,
};
Object.entries(envVarsMapping).forEach(([key, envVar]) => {
if (rateLimit[key] !== undefined) {
process.env[envVar] = rateLimit[key];
}
});
};
module.exports = handleRateLimits;

View file

@ -1,4 +1,4 @@
const { isEnabled } = require('~/server/utils'); const { isEnabled } = require('@librechat/api');
/** @returns {TStartupConfig['ldap'] | undefined} */ /** @returns {TStartupConfig['ldap'] | undefined} */
const getLdapConfig = () => { const getLdapConfig = () => {

View file

@ -5,14 +5,12 @@ const keyBy = require('lodash/keyBy');
const { loadYaml } = require('@librechat/api'); const { loadYaml } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas'); const { logger } = require('@librechat/data-schemas');
const { const {
CacheKeys,
configSchema, configSchema,
paramSettings, paramSettings,
EImageOutputType, EImageOutputType,
agentParamSettings, agentParamSettings,
validateSettingDefinitions, validateSettingDefinitions,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores');
const projectRoot = path.resolve(__dirname, '..', '..', '..', '..'); const projectRoot = path.resolve(__dirname, '..', '..', '..', '..');
const defaultConfigPath = path.resolve(projectRoot, 'librechat.yaml'); const defaultConfigPath = path.resolve(projectRoot, 'librechat.yaml');
@ -119,7 +117,6 @@ https://www.librechat.ai/docs/configuration/stt_tts`);
.filter((endpoint) => endpoint.customParams) .filter((endpoint) => endpoint.customParams)
.forEach((endpoint) => parseCustomParams(endpoint.name, endpoint.customParams)); .forEach((endpoint) => parseCustomParams(endpoint.name, endpoint.customParams));
if (result.data.modelSpecs) { if (result.data.modelSpecs) {
customConfig.modelSpecs = result.data.modelSpecs; customConfig.modelSpecs = result.data.modelSpecs;
} }

View file

@ -1,6 +1,6 @@
const { isEnabled } = require('@librechat/api');
const { CacheKeys } = require('librechat-data-provider'); const { CacheKeys } = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores'); const getLogStores = require('~/cache/getLogStores');
const { isEnabled } = require('~/server/utils');
const { saveConvo } = require('~/models'); const { saveConvo } = require('~/models');
const addTitle = async (req, { text, response, client }) => { const addTitle = async (req, { text, response, client }) => {

View file

@ -1,7 +1,7 @@
const { isEnabled } = require('@librechat/api');
const { CacheKeys } = require('librechat-data-provider'); const { CacheKeys } = require('librechat-data-provider');
const { saveConvo } = require('~/models/Conversation'); const { saveConvo } = require('~/models/Conversation');
const getLogStores = require('~/cache/getLogStores'); const getLogStores = require('~/cache/getLogStores');
const { isEnabled } = require('~/server/utils');
const addTitle = async (req, { text, responseText, conversationId, client }) => { const addTitle = async (req, { text, responseText, conversationId, client }) => {
const { TITLE_CONVO = 'true' } = process.env ?? {}; const { TITLE_CONVO = 'true' } = process.env ?? {};

View file

@ -1,6 +1,6 @@
const { isEnabled } = require('@librechat/api');
const { CacheKeys } = require('librechat-data-provider'); const { CacheKeys } = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores'); const getLogStores = require('~/cache/getLogStores');
const { isEnabled } = require('~/server/utils');
const { saveConvo } = require('~/models'); const { saveConvo } = require('~/models');
const addTitle = async (req, { text, response, client }) => { const addTitle = async (req, { text, response, client }) => {

View file

@ -3,8 +3,8 @@ const path = require('path');
const mime = require('mime'); const mime = require('mime');
const axios = require('axios'); const axios = require('axios');
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const { logger } = require('~/config'); const { logger } = require('@librechat/data-schemas');
const { getAzureContainerClient } = require('./initialize'); const { getAzureContainerClient } = require('@librechat/api');
const defaultBasePath = 'images'; const defaultBasePath = 'images';
const { AZURE_STORAGE_PUBLIC_ACCESS = 'true', AZURE_CONTAINER_NAME = 'files' } = process.env; const { AZURE_STORAGE_PUBLIC_ACCESS = 'true', AZURE_CONTAINER_NAME = 'files' } = process.env;
@ -30,7 +30,7 @@ async function saveBufferToAzure({
containerName, containerName,
}) { }) {
try { try {
const containerClient = getAzureContainerClient(containerName); const containerClient = await getAzureContainerClient(containerName);
const access = AZURE_STORAGE_PUBLIC_ACCESS?.toLowerCase() === 'true' ? 'blob' : undefined; const access = AZURE_STORAGE_PUBLIC_ACCESS?.toLowerCase() === 'true' ? 'blob' : undefined;
// Create the container if it doesn't exist. This is done per operation. // Create the container if it doesn't exist. This is done per operation.
await containerClient.createIfNotExists({ access }); await containerClient.createIfNotExists({ access });
@ -84,7 +84,7 @@ async function saveURLToAzure({
*/ */
async function getAzureURL({ fileName, basePath = defaultBasePath, userId, containerName }) { async function getAzureURL({ fileName, basePath = defaultBasePath, userId, containerName }) {
try { try {
const containerClient = getAzureContainerClient(containerName); const containerClient = await getAzureContainerClient(containerName);
const blobPath = userId ? `${basePath}/${userId}/${fileName}` : `${basePath}/${fileName}`; const blobPath = userId ? `${basePath}/${userId}/${fileName}` : `${basePath}/${fileName}`;
const blockBlobClient = containerClient.getBlockBlobClient(blobPath); const blockBlobClient = containerClient.getBlockBlobClient(blobPath);
return blockBlobClient.url; return blockBlobClient.url;
@ -103,7 +103,7 @@ async function getAzureURL({ fileName, basePath = defaultBasePath, userId, conta
*/ */
async function deleteFileFromAzure(req, file) { async function deleteFileFromAzure(req, file) {
try { try {
const containerClient = getAzureContainerClient(AZURE_CONTAINER_NAME); const containerClient = await getAzureContainerClient(AZURE_CONTAINER_NAME);
const blobPath = file.filepath.split(`${AZURE_CONTAINER_NAME}/`)[1]; const blobPath = file.filepath.split(`${AZURE_CONTAINER_NAME}/`)[1];
if (!blobPath.includes(req.user.id)) { if (!blobPath.includes(req.user.id)) {
throw new Error('User ID not found in blob path'); throw new Error('User ID not found in blob path');
@ -140,7 +140,7 @@ async function streamFileToAzure({
containerName, containerName,
}) { }) {
try { try {
const containerClient = getAzureContainerClient(containerName); const containerClient = await getAzureContainerClient(containerName);
const access = AZURE_STORAGE_PUBLIC_ACCESS?.toLowerCase() === 'true' ? 'blob' : undefined; const access = AZURE_STORAGE_PUBLIC_ACCESS?.toLowerCase() === 'true' ? 'blob' : undefined;
// Create the container if it doesn't exist // Create the container if it doesn't exist

View file

@ -1,9 +1,7 @@
const crud = require('./crud'); const crud = require('./crud');
const images = require('./images'); const images = require('./images');
const initialize = require('./initialize');
module.exports = { module.exports = {
...crud, ...crud,
...images, ...images,
...initialize,
}; };

View file

@ -3,9 +3,9 @@ const path = require('path');
const axios = require('axios'); const axios = require('axios');
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const { logger } = require('@librechat/data-schemas'); const { logger } = require('@librechat/data-schemas');
const { getFirebaseStorage } = require('@librechat/api');
const { ref, uploadBytes, getDownloadURL, deleteObject } = require('firebase/storage'); const { ref, uploadBytes, getDownloadURL, deleteObject } = require('firebase/storage');
const { getBufferMetadata } = require('~/server/utils'); const { getBufferMetadata } = require('~/server/utils');
const { getFirebaseStorage } = require('./initialize');
/** /**
* Deletes a file from Firebase Storage. * Deletes a file from Firebase Storage.

View file

@ -1,9 +1,7 @@
const crud = require('./crud'); const crud = require('./crud');
const images = require('./images'); const images = require('./images');
const initialize = require('./initialize');
module.exports = { module.exports = {
...crud, ...crud,
...images, ...images,
...initialize,
}; };

View file

@ -1,39 +0,0 @@
const firebase = require('firebase/app');
const { getStorage } = require('firebase/storage');
const { logger } = require('~/config');
let i = 0;
let firebaseApp = null;
const initializeFirebase = () => {
// Return existing instance if already initialized
if (firebaseApp) {
return firebaseApp;
}
const firebaseConfig = {
apiKey: process.env.FIREBASE_API_KEY,
authDomain: process.env.FIREBASE_AUTH_DOMAIN,
projectId: process.env.FIREBASE_PROJECT_ID,
storageBucket: process.env.FIREBASE_STORAGE_BUCKET,
messagingSenderId: process.env.FIREBASE_MESSAGING_SENDER_ID,
appId: process.env.FIREBASE_APP_ID,
};
if (Object.values(firebaseConfig).some((value) => !value)) {
i === 0 && logger.info('[Optional] CDN not initialized.');
i++;
return null;
}
firebaseApp = firebase.initializeApp(firebaseConfig);
logger.info('Firebase CDN initialized');
return firebaseApp;
};
const getFirebaseStorage = () => {
const app = initializeFirebase();
return app ? getStorage(app) : null;
};
module.exports = { initializeFirebase, getFirebaseStorage };

View file

@ -4,6 +4,7 @@ const axios = require('axios');
const { logger } = require('@librechat/data-schemas'); const { logger } = require('@librechat/data-schemas');
const { EModelEndpoint } = require('librechat-data-provider'); const { EModelEndpoint } = require('librechat-data-provider');
const { generateShortLivedToken } = require('@librechat/api'); const { generateShortLivedToken } = require('@librechat/api');
const { resizeImageBuffer } = require('~/server/services/Files/images/resize');
const { getBufferMetadata } = require('~/server/utils'); const { getBufferMetadata } = require('~/server/utils');
const paths = require('~/config/paths'); const paths = require('~/config/paths');
@ -286,7 +287,18 @@ async function uploadLocalFile({ req, file, file_id }) {
await fs.promises.writeFile(newPath, inputBuffer); await fs.promises.writeFile(newPath, inputBuffer);
const filepath = path.posix.join('/', 'uploads', req.user.id, path.basename(newPath)); const filepath = path.posix.join('/', 'uploads', req.user.id, path.basename(newPath));
return { filepath, bytes }; let height, width;
if (file.mimetype && file.mimetype.startsWith('image/')) {
try {
const { width: imgWidth, height: imgHeight } = await resizeImageBuffer(inputBuffer, 'high');
height = imgHeight;
width = imgWidth;
} catch (error) {
logger.warn('[uploadLocalFile] Could not get image dimensions:', error.message);
}
}
return { filepath, bytes, height, width };
} }
/** /**

View file

@ -1,7 +1,7 @@
const fs = require('fs'); const fs = require('fs');
const { sleep } = require('@librechat/agents');
const { logger } = require('@librechat/data-schemas');
const { FilePurpose } = require('librechat-data-provider'); const { FilePurpose } = require('librechat-data-provider');
const { sleep } = require('~/server/utils');
const { logger } = require('~/config');
/** /**
* Uploads a file that can be used across various OpenAI services. * Uploads a file that can be used across various OpenAI services.

View file

@ -1,15 +1,15 @@
const fs = require('fs'); const fs = require('fs');
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const { initializeS3 } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { FileSources } = require('librechat-data-provider'); const { FileSources } = require('librechat-data-provider');
const { getSignedUrl } = require('@aws-sdk/s3-request-presigner');
const { const {
PutObjectCommand, PutObjectCommand,
GetObjectCommand, GetObjectCommand,
HeadObjectCommand, HeadObjectCommand,
DeleteObjectCommand, DeleteObjectCommand,
} = require('@aws-sdk/client-s3'); } = require('@aws-sdk/client-s3');
const { getSignedUrl } = require('@aws-sdk/s3-request-presigner');
const { initializeS3 } = require('./initialize');
const { logger } = require('~/config');
const bucketName = process.env.AWS_BUCKET_NAME; const bucketName = process.env.AWS_BUCKET_NAME;
const defaultBasePath = 'images'; const defaultBasePath = 'images';

View file

@ -1,9 +1,7 @@
const crud = require('./crud'); const crud = require('./crud');
const images = require('./images'); const images = require('./images');
const initialize = require('./initialize');
module.exports = { module.exports = {
...crud, ...crud,
...images, ...images,
...initialize,
}; };

View file

@ -1,9 +1,9 @@
const sharp = require('sharp'); const sharp = require('sharp');
const fs = require('fs').promises; const fs = require('fs').promises;
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const { logger } = require('@librechat/data-schemas');
const { EImageOutputType } = require('librechat-data-provider'); const { EImageOutputType } = require('librechat-data-provider');
const { resizeAndConvert } = require('./resize'); const { resizeAndConvert } = require('./resize');
const { logger } = require('~/config');
/** /**
* Uploads an avatar image for a user. This function can handle various types of input (URL, Buffer, or File object), * Uploads an avatar image for a user. This function can handle various types of input (URL, Buffer, or File object),

View file

@ -1,9 +1,9 @@
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const sharp = require('sharp'); const sharp = require('sharp');
const { logger } = require('@librechat/data-schemas');
const { getStrategyFunctions } = require('../strategies'); const { getStrategyFunctions } = require('../strategies');
const { resizeImageBuffer } = require('./resize'); const { resizeImageBuffer } = require('./resize');
const { logger } = require('~/config');
/** /**
* Converts an image file or buffer to target output type with specified resolution. * Converts an image file or buffer to target output type with specified resolution.

View file

@ -522,11 +522,6 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
} }
const isImage = file.mimetype.startsWith('image'); const isImage = file.mimetype.startsWith('image');
if (!isImage && !tool_resource) {
/** Note: this needs to be removed when we can support files to providers */
throw new Error('No tool resource provided for non-image agent file upload');
}
let fileInfoMetadata; let fileInfoMetadata;
const entity_id = messageAttachment === true ? undefined : agent_id; const entity_id = messageAttachment === true ? undefined : agent_id;
const basePath = mime.getType(file.originalname)?.startsWith('image') ? 'images' : 'uploads'; const basePath = mime.getType(file.originalname)?.startsWith('image') ? 'images' : 'uploads';

View file

@ -1,8 +1,8 @@
const { getOpenIdConfig } = require('~/strategies/openidStrategy');
const { logger } = require('~/config');
const { CacheKeys } = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores');
const client = require('openid-client'); const client = require('openid-client');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys } = require('librechat-data-provider');
const { getOpenIdConfig } = require('~/strategies/openidStrategy');
const getLogStores = require('~/cache/getLogStores');
/** /**
* Get Microsoft Graph API token using existing token exchange mechanism * Get Microsoft Graph API token using existing token exchange mechanism

View file

@ -450,7 +450,7 @@ async function getMCPSetupData(userId) {
logger.error(`[MCP][User: ${userId}] Error getting app connections:`, error); logger.error(`[MCP][User: ${userId}] Error getting app connections:`, error);
} }
const userConnections = mcpManager.getUserConnections(userId) || new Map(); const userConnections = mcpManager.getUserConnections(userId) || new Map();
const oauthServers = mcpManager.getOAuthServers() || new Set(); const oauthServers = mcpManager.getOAuthServers();
return { return {
mcpConfig, mcpConfig,

View file

@ -170,7 +170,7 @@ describe('tests for the new helper functions used by the MCP connection status e
const mockMCPManager = { const mockMCPManager = {
appConnections: { getAll: jest.fn(() => null) }, appConnections: { getAll: jest.fn(() => null) },
getUserConnections: jest.fn(() => null), getUserConnections: jest.fn(() => null),
getOAuthServers: jest.fn(() => null), getOAuthServers: jest.fn(() => new Set()),
}; };
mockGetMCPManager.mockReturnValue(mockMCPManager); mockGetMCPManager.mockReturnValue(mockMCPManager);

Some files were not shown because too many files have changed in this diff Show more