mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-20 17:26:12 +01:00
Merge branch 'main' into feature/entra-id-azure-integration
This commit is contained in:
commit
a7cf1ae27b
241 changed files with 25653 additions and 3303 deletions
|
|
@ -176,7 +176,7 @@ const registerUser = async (user, additionalData = {}) => {
|
|||
return { status: 404, message: errorMessage };
|
||||
}
|
||||
|
||||
const { email, password, name, username } = user;
|
||||
const { email, password, name, username, provider } = user;
|
||||
|
||||
let newUserId;
|
||||
try {
|
||||
|
|
@ -207,7 +207,7 @@ const registerUser = async (user, additionalData = {}) => {
|
|||
|
||||
const salt = bcrypt.genSaltSync(10);
|
||||
const newUserData = {
|
||||
provider: 'local',
|
||||
provider: provider ?? 'local',
|
||||
email,
|
||||
username,
|
||||
name,
|
||||
|
|
@ -412,7 +412,7 @@ const setAuthTokens = async (userId, res, _session = null) => {
|
|||
* @param {string} [userId] - Optional MongoDB user ID for image path validation
|
||||
* @returns {String} - access token
|
||||
*/
|
||||
const setOpenIDAuthTokens = (tokenset, res, userId) => {
|
||||
const setOpenIDAuthTokens = (tokenset, res, userId, existingRefreshToken) => {
|
||||
try {
|
||||
if (!tokenset) {
|
||||
logger.error('[setOpenIDAuthTokens] No tokenset found in request');
|
||||
|
|
@ -427,11 +427,25 @@ const setOpenIDAuthTokens = (tokenset, res, userId) => {
|
|||
logger.error('[setOpenIDAuthTokens] No tokenset found in request');
|
||||
return;
|
||||
}
|
||||
if (!tokenset.access_token || !tokenset.refresh_token) {
|
||||
logger.error('[setOpenIDAuthTokens] No access or refresh token found in tokenset');
|
||||
if (!tokenset.access_token) {
|
||||
logger.error('[setOpenIDAuthTokens] No access token found in tokenset');
|
||||
return;
|
||||
}
|
||||
res.cookie('refreshToken', tokenset.refresh_token, {
|
||||
|
||||
const refreshToken = tokenset.refresh_token || existingRefreshToken;
|
||||
|
||||
if (!refreshToken) {
|
||||
logger.error('[setOpenIDAuthTokens] No refresh token available');
|
||||
return;
|
||||
}
|
||||
|
||||
res.cookie('refreshToken', refreshToken, {
|
||||
expires: expirationDate,
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
sameSite: 'strict',
|
||||
});
|
||||
res.cookie('openid_access_token', tokenset.access_token, {
|
||||
expires: expirationDate,
|
||||
httpOnly: true,
|
||||
secure: isProduction,
|
||||
|
|
|
|||
|
|
@ -16,6 +16,11 @@ async function updateMCPServerTools({ userId, serverName, tools }) {
|
|||
const serverTools = {};
|
||||
const mcpDelimiter = Constants.mcp_delimiter;
|
||||
|
||||
if (tools == null || tools.length === 0) {
|
||||
logger.debug(`[MCP Cache] No tools to update for server ${serverName} (user: ${userId})`);
|
||||
return serverTools;
|
||||
}
|
||||
|
||||
for (const tool of tools) {
|
||||
const name = `${tool.name}${mcpDelimiter}${serverName}`;
|
||||
serverTools[name] = {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
const OpenAI = require('openai');
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { isUserProvided } = require('@librechat/api');
|
||||
const { ErrorTypes, EModelEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
getUserKeyValues,
|
||||
|
|
@ -7,7 +8,6 @@ const {
|
|||
checkUserKeyExpiry,
|
||||
} = require('~/server/services/UserService');
|
||||
const OAIClient = require('~/app/clients/OpenAIClient');
|
||||
const { isUserProvided } = require('~/server/utils');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption, version, initAppClient = false }) => {
|
||||
const { PROXY, OPENAI_ORGANIZATION, ASSISTANTS_API_KEY, ASSISTANTS_BASE_URL } = process.env;
|
||||
|
|
|
|||
|
|
@ -1,9 +1,4 @@
|
|||
const {
|
||||
resolveHeaders,
|
||||
isUserProvided,
|
||||
getOpenAIConfig,
|
||||
getCustomEndpointConfig,
|
||||
} = require('@librechat/api');
|
||||
const { isUserProvided, getOpenAIConfig, getCustomEndpointConfig } = require('@librechat/api');
|
||||
const {
|
||||
CacheKeys,
|
||||
ErrorTypes,
|
||||
|
|
@ -34,14 +29,6 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
|||
const CUSTOM_API_KEY = extractEnvVariable(endpointConfig.apiKey);
|
||||
const CUSTOM_BASE_URL = extractEnvVariable(endpointConfig.baseURL);
|
||||
|
||||
/** Intentionally excludes passing `body`, i.e. `req.body`, as
|
||||
* values may not be accurate until `AgentClient` is initialized
|
||||
*/
|
||||
let resolvedHeaders = resolveHeaders({
|
||||
headers: endpointConfig.headers,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (CUSTOM_API_KEY.match(envVarRegex)) {
|
||||
throw new Error(`Missing API Key for ${endpoint}.`);
|
||||
}
|
||||
|
|
@ -108,7 +95,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
|||
}
|
||||
|
||||
const customOptions = {
|
||||
headers: resolvedHeaders,
|
||||
headers: endpointConfig.headers,
|
||||
addParams: endpointConfig.addParams,
|
||||
dropParams: endpointConfig.dropParams,
|
||||
customParams: endpointConfig.customParams,
|
||||
|
|
|
|||
|
|
@ -69,17 +69,21 @@ describe('custom/initializeClient', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('calls resolveHeaders with headers, user, and body for body placeholder support', async () => {
|
||||
const { resolveHeaders } = require('@librechat/api');
|
||||
await initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true });
|
||||
expect(resolveHeaders).toHaveBeenCalledWith({
|
||||
headers: { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
|
||||
user: { id: 'user-123', email: 'test@example.com', role: 'user' },
|
||||
/**
|
||||
* Note: Request-based Header Resolution is deferred until right before LLM request is made
|
||||
body: { endpoint: 'test-endpoint' }, // body - supports {{LIBRECHAT_BODY_*}} placeholders
|
||||
*/
|
||||
it('stores original template headers for deferred resolution', async () => {
|
||||
/**
|
||||
* Note: Request-based Header Resolution is deferred until right before LLM request is made
|
||||
* in the OpenAIClient or AgentClient, not during initialization.
|
||||
* This test verifies that the initialize function completes successfully with optionsOnly flag,
|
||||
* and that headers are passed through to be resolved later during the actual LLM request.
|
||||
*/
|
||||
const result = await initializeClient({
|
||||
req: mockRequest,
|
||||
res: mockResponse,
|
||||
optionsOnly: true,
|
||||
});
|
||||
// Verify that options are returned for later use
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toHaveProperty('useLegacyContent', true);
|
||||
});
|
||||
|
||||
it('throws if endpoint config is missing', async () => {
|
||||
|
|
|
|||
|
|
@ -12,14 +12,13 @@ const initGoogle = require('~/server/services/Endpoints/google/initialize');
|
|||
* @returns {boolean} - True if the provider is a known custom provider, false otherwise
|
||||
*/
|
||||
function isKnownCustomProvider(provider) {
|
||||
return [Providers.XAI, Providers.OLLAMA, Providers.DEEPSEEK, Providers.OPENROUTER].includes(
|
||||
return [Providers.XAI, Providers.DEEPSEEK, Providers.OPENROUTER].includes(
|
||||
provider?.toLowerCase() || '',
|
||||
);
|
||||
}
|
||||
|
||||
const providerConfigMap = {
|
||||
[Providers.XAI]: initCustom,
|
||||
[Providers.OLLAMA]: initCustom,
|
||||
[Providers.DEEPSEEK]: initCustom,
|
||||
[Providers.OPENROUTER]: initCustom,
|
||||
[EModelEndpoint.openAI]: initOpenAI,
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@ const fs = require('fs').promises;
|
|||
const FormData = require('form-data');
|
||||
const { Readable } = require('stream');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { genAzureEndpoint } = require('@librechat/api');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { genAzureEndpoint, logAxiosError } = require('@librechat/api');
|
||||
const { extractEnvVariable, STTProviders } = require('librechat-data-provider');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
|
||||
|
|
@ -34,6 +35,34 @@ const MIME_TO_EXTENSION_MAP = {
|
|||
'audio/x-flac': 'flac',
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates and extracts ISO-639-1 language code from a locale string.
|
||||
* @param {string} language - The language/locale string (e.g., "en-US", "en", "zh-CN")
|
||||
* @returns {string|null} The ISO-639-1 language code (e.g., "en") or null if invalid
|
||||
*/
|
||||
function getValidatedLanguageCode(language) {
|
||||
try {
|
||||
if (!language) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const normalizedLanguage = language.toLowerCase();
|
||||
const isValidLocaleCode = /^[a-z]{2}(-[a-z]{2})?$/.test(normalizedLanguage);
|
||||
|
||||
if (isValidLocaleCode) {
|
||||
return normalizedLanguage.split('-')[0];
|
||||
}
|
||||
|
||||
logger.warn(
|
||||
`[STT] Invalid language format "${language}". Expected ISO-639-1 locale code like "en-US" or "en". Skipping language parameter.`,
|
||||
);
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error(`[STT] Error validating language code "${language}":`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the file extension from the MIME type.
|
||||
* @param {string} mimeType - The MIME type.
|
||||
|
|
@ -172,10 +201,9 @@ class STTService {
|
|||
model: sttSchema.model,
|
||||
};
|
||||
|
||||
if (language) {
|
||||
/** Converted locale code (e.g., "en-US") to ISO-639-1 format (e.g., "en") */
|
||||
const isoLanguage = language.split('-')[0];
|
||||
data.language = isoLanguage;
|
||||
const validLanguage = getValidatedLanguageCode(language);
|
||||
if (validLanguage) {
|
||||
data.language = validLanguage;
|
||||
}
|
||||
|
||||
const headers = {
|
||||
|
|
@ -220,10 +248,9 @@ class STTService {
|
|||
contentType: audioFile.mimetype,
|
||||
});
|
||||
|
||||
if (language) {
|
||||
/** Converted locale code (e.g., "en-US") to ISO-639-1 format (e.g., "en") */
|
||||
const isoLanguage = language.split('-')[0];
|
||||
formData.append('language', isoLanguage);
|
||||
const validLanguage = getValidatedLanguageCode(language);
|
||||
if (validLanguage) {
|
||||
formData.append('language', validLanguage);
|
||||
}
|
||||
|
||||
const headers = {
|
||||
|
|
@ -266,8 +293,14 @@ class STTService {
|
|||
language,
|
||||
);
|
||||
|
||||
const options = { headers };
|
||||
|
||||
if (process.env.PROXY) {
|
||||
options.httpsAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.post(url, data, { headers });
|
||||
const response = await axios.post(url, data, options);
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error('Invalid response from the STT API');
|
||||
|
|
@ -279,7 +312,7 @@ class STTService {
|
|||
|
||||
return response.data.text.trim();
|
||||
} catch (error) {
|
||||
logger.error(`STT request failed for provider ${provider}:`, error);
|
||||
logAxiosError({ message: `STT request failed for provider ${provider}:`, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
|
@ -309,7 +342,7 @@ class STTService {
|
|||
const text = await this.sttRequest(provider, sttSchema, { audioBuffer, audioFile, language });
|
||||
res.json({ text });
|
||||
} catch (error) {
|
||||
logger.error('An error occurred while processing the audio:', error);
|
||||
logAxiosError({ message: 'An error occurred while processing the audio:', error });
|
||||
res.sendStatus(500);
|
||||
} finally {
|
||||
try {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
const axios = require('axios');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { genAzureEndpoint } = require('@librechat/api');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { genAzureEndpoint, logAxiosError } = require('@librechat/api');
|
||||
const { extractEnvVariable, TTSProviders } = require('librechat-data-provider');
|
||||
const { getRandomVoiceId, createChunkProcessor, splitTextIntoChunks } = require('./streamAudio');
|
||||
const { getAppConfig } = require('~/server/services/Config');
|
||||
|
|
@ -266,10 +267,14 @@ class TTSService {
|
|||
|
||||
const options = { headers, responseType: stream ? 'stream' : 'arraybuffer' };
|
||||
|
||||
if (process.env.PROXY) {
|
||||
options.httpsAgent = new HttpsProxyAgent(process.env.PROXY);
|
||||
}
|
||||
|
||||
try {
|
||||
return await axios.post(url, data, options);
|
||||
} catch (error) {
|
||||
logger.error(`TTS request failed for provider ${provider}:`, error);
|
||||
logAxiosError({ message: `TTS request failed for provider ${provider}:`, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
|
@ -325,7 +330,10 @@ class TTSService {
|
|||
break;
|
||||
}
|
||||
} catch (innerError) {
|
||||
logger.error('Error processing manual update:', chunk, innerError);
|
||||
logAxiosError({
|
||||
message: `[TTS] Error processing manual update for chunk: ${chunk?.text?.substring(0, 50)}...`,
|
||||
error: innerError,
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
return res.status(500).end();
|
||||
}
|
||||
|
|
@ -337,7 +345,7 @@ class TTSService {
|
|||
res.end();
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error creating the audio stream:', error);
|
||||
logAxiosError({ message: '[TTS] Error creating the audio stream:', error });
|
||||
if (!res.headersSent) {
|
||||
return res.status(500).send('An error occurred');
|
||||
}
|
||||
|
|
@ -407,7 +415,10 @@ class TTSService {
|
|||
break;
|
||||
}
|
||||
} catch (innerError) {
|
||||
logger.error('Error processing audio stream update:', update, innerError);
|
||||
logAxiosError({
|
||||
message: `[TTS] Error processing audio stream update: ${update?.text?.substring(0, 50)}...`,
|
||||
error: innerError,
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
return res.status(500).end();
|
||||
}
|
||||
|
|
@ -424,7 +435,7 @@ class TTSService {
|
|||
res.end();
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch audio:', error);
|
||||
logAxiosError({ message: '[TTS] Failed to fetch audio:', error });
|
||||
if (!res.headersSent) {
|
||||
res.status(500).end();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
const path = require('path');
|
||||
const { v4 } = require('uuid');
|
||||
const axios = require('axios');
|
||||
const { logAxiosError } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { getCodeBaseURL } = require('@librechat/agents');
|
||||
const { logAxiosError, getBasePath } = require('@librechat/api');
|
||||
const {
|
||||
Tools,
|
||||
FileContext,
|
||||
|
|
@ -41,11 +41,12 @@ const processCodeOutput = async ({
|
|||
const appConfig = req.config;
|
||||
const currentDate = new Date();
|
||||
const baseURL = getCodeBaseURL();
|
||||
const basePath = getBasePath();
|
||||
const fileExt = path.extname(name);
|
||||
if (!fileExt || !imageExtRegex.test(name)) {
|
||||
return {
|
||||
filename: name,
|
||||
filepath: `/api/files/code/download/${session_id}/${id}`,
|
||||
filepath: `${basePath}/api/files/code/download/${session_id}/${id}`,
|
||||
/** Note: expires 24 hours after creation */
|
||||
expiresAt: currentDate.getTime() + 86400000,
|
||||
conversationId,
|
||||
|
|
|
|||
|
|
@ -169,14 +169,24 @@ function extractFirebaseFilePath(urlString) {
|
|||
const deleteFirebaseFile = async (req, file) => {
|
||||
if (file.embedded && process.env.RAG_API_URL) {
|
||||
const jwtToken = req.headers.authorization.split(' ')[1];
|
||||
axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
data: [file.file_id],
|
||||
});
|
||||
try {
|
||||
await axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
data: [file.file_id],
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.response?.status === 404) {
|
||||
logger.warn(
|
||||
`[deleteFirebaseFile] Document ${file.file_id} not found in RAG API, may have been deleted already`,
|
||||
);
|
||||
} else {
|
||||
logger.error('[deleteFirebaseFile] Error deleting document from RAG API:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const fileName = extractFirebaseFilePath(file.filepath);
|
||||
|
|
|
|||
|
|
@ -210,14 +210,24 @@ const deleteLocalFile = async (req, file) => {
|
|||
|
||||
if (file.embedded && process.env.RAG_API_URL) {
|
||||
const jwtToken = generateShortLivedToken(req.user.id);
|
||||
axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
data: [file.file_id],
|
||||
});
|
||||
try {
|
||||
await axios.delete(`${process.env.RAG_API_URL}/documents`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${jwtToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
data: [file.file_id],
|
||||
});
|
||||
} catch (error) {
|
||||
if (error.response?.status === 404) {
|
||||
logger.warn(
|
||||
`[deleteLocalFile] Document ${file.file_id} not found in RAG API, may have been deleted already`,
|
||||
);
|
||||
} else {
|
||||
logger.error('[deleteLocalFile] Error deleting document from RAG API:', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (cleanFilepath.startsWith(`/uploads/${req.user.id}`)) {
|
||||
|
|
|
|||
|
|
@ -1,11 +1,14 @@
|
|||
const axios = require('axios');
|
||||
const { Providers } = require('@librechat/agents');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||
const { logAxiosError, inputSchema, processModelData } = require('@librechat/api');
|
||||
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
|
||||
const { logAxiosError, inputSchema, processModelData, isUserProvided } = require('@librechat/api');
|
||||
const {
|
||||
CacheKeys,
|
||||
defaultModels,
|
||||
KnownEndpoints,
|
||||
EModelEndpoint,
|
||||
} = require('librechat-data-provider');
|
||||
const { OllamaClient } = require('~/app/clients/OllamaClient');
|
||||
const { isUserProvided } = require('~/server/utils');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { extractBaseURL } = require('~/utils');
|
||||
|
||||
|
|
@ -68,7 +71,7 @@ const fetchModels = async ({
|
|||
return models;
|
||||
}
|
||||
|
||||
if (name && name.toLowerCase().startsWith(Providers.OLLAMA)) {
|
||||
if (name && name.toLowerCase().startsWith(KnownEndpoints.ollama)) {
|
||||
try {
|
||||
return await OllamaClient.fetchModels(baseURL, { headers, user: userObject });
|
||||
} catch (ollamaError) {
|
||||
|
|
@ -80,7 +83,9 @@ const fetchModels = async ({
|
|||
|
||||
try {
|
||||
const options = {
|
||||
headers: {},
|
||||
headers: {
|
||||
...(headers ?? {}),
|
||||
},
|
||||
timeout: 5000,
|
||||
};
|
||||
|
||||
|
|
@ -101,7 +106,7 @@ const fetchModels = async ({
|
|||
options.headers['OpenAI-Organization'] = process.env.OPENAI_ORGANIZATION;
|
||||
}
|
||||
|
||||
const url = new URL(`${baseURL}${azure ? '' : '/models'}`);
|
||||
const url = new URL(`${baseURL.replace(/\/+$/, '')}${azure ? '' : '/models'}`);
|
||||
if (user && userIdQuery) {
|
||||
url.searchParams.append('user', user);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -81,6 +81,70 @@ describe('fetchModels', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('should pass custom headers to the API request', async () => {
|
||||
const customHeaders = {
|
||||
'X-Custom-Header': 'custom-value',
|
||||
'X-API-Version': 'v2',
|
||||
};
|
||||
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com',
|
||||
name: 'TestAPI',
|
||||
headers: customHeaders,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
expect.stringContaining('https://api.test.com/models'),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
'X-Custom-Header': 'custom-value',
|
||||
'X-API-Version': 'v2',
|
||||
Authorization: 'Bearer testApiKey',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle null headers gracefully', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com',
|
||||
name: 'TestAPI',
|
||||
headers: null,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
expect.stringContaining('https://api.test.com/models'),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer testApiKey',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle undefined headers gracefully', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com',
|
||||
name: 'TestAPI',
|
||||
headers: undefined,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
expect.stringContaining('https://api.test.com/models'),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer testApiKey',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
|
@ -372,6 +436,68 @@ describe('fetchModels with Ollama specific logic', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('fetchModels URL construction with trailing slashes', () => {
|
||||
beforeEach(() => {
|
||||
axios.get.mockResolvedValue({
|
||||
data: {
|
||||
data: [{ id: 'model-1' }, { id: 'model-2' }],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should not create double slashes when baseURL has a trailing slash', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com/v1/',
|
||||
name: 'TestAPI',
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith('https://api.test.com/v1/models', expect.any(Object));
|
||||
});
|
||||
|
||||
it('should handle baseURL without trailing slash normally', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com/v1',
|
||||
name: 'TestAPI',
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith('https://api.test.com/v1/models', expect.any(Object));
|
||||
});
|
||||
|
||||
it('should handle baseURL with multiple trailing slashes', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com/v1///',
|
||||
name: 'TestAPI',
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith('https://api.test.com/v1/models', expect.any(Object));
|
||||
});
|
||||
|
||||
it('should correctly append query params after stripping trailing slashes', async () => {
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'testApiKey',
|
||||
baseURL: 'https://api.test.com/v1/',
|
||||
name: 'TestAPI',
|
||||
userIdQuery: true,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
'https://api.test.com/v1/models?user=user123',
|
||||
expect.any(Object),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('splitAndTrim', () => {
|
||||
it('should split a string by commas and trim each value', () => {
|
||||
const input = ' model1, model2 , model3,model4 ';
|
||||
|
|
@ -410,6 +536,64 @@ describe('getAnthropicModels', () => {
|
|||
const models = await getAnthropicModels();
|
||||
expect(models).toEqual(['claude-1', 'claude-2']);
|
||||
});
|
||||
|
||||
it('should use Anthropic-specific headers when fetching models', async () => {
|
||||
delete process.env.ANTHROPIC_MODELS;
|
||||
process.env.ANTHROPIC_API_KEY = 'test-anthropic-key';
|
||||
|
||||
axios.get.mockResolvedValue({
|
||||
data: {
|
||||
data: [{ id: 'claude-3' }, { id: 'claude-4' }],
|
||||
},
|
||||
});
|
||||
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'test-anthropic-key',
|
||||
baseURL: 'https://api.anthropic.com/v1',
|
||||
name: EModelEndpoint.anthropic,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
headers: {
|
||||
'x-api-key': 'test-anthropic-key',
|
||||
'anthropic-version': expect.any(String),
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should pass custom headers for Anthropic endpoint', async () => {
|
||||
const customHeaders = {
|
||||
'X-Custom-Header': 'custom-value',
|
||||
};
|
||||
|
||||
axios.get.mockResolvedValue({
|
||||
data: {
|
||||
data: [{ id: 'claude-3' }],
|
||||
},
|
||||
});
|
||||
|
||||
await fetchModels({
|
||||
user: 'user123',
|
||||
apiKey: 'test-anthropic-key',
|
||||
baseURL: 'https://api.anthropic.com/v1',
|
||||
name: EModelEndpoint.anthropic,
|
||||
headers: customHeaders,
|
||||
});
|
||||
|
||||
expect(axios.get).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
headers: {
|
||||
'x-api-key': 'test-anthropic-key',
|
||||
'anthropic-version': expect.any(String),
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getGoogleModels', () => {
|
||||
|
|
|
|||
|
|
@ -292,7 +292,7 @@ const ensurePrincipalExists = async function (principal) {
|
|||
let existingUser = await findUser({ idOnTheSource: principal.idOnTheSource });
|
||||
|
||||
if (!existingUser) {
|
||||
existingUser = await findUser({ email: principal.email.toLowerCase() });
|
||||
existingUser = await findUser({ email: principal.email });
|
||||
}
|
||||
|
||||
if (existingUser) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
const path = require('path');
|
||||
const { v4 } = require('uuid');
|
||||
const { countTokens, escapeRegExp } = require('@librechat/api');
|
||||
const {
|
||||
Constants,
|
||||
ContentTypes,
|
||||
|
|
@ -8,7 +9,6 @@ const {
|
|||
} = require('librechat-data-provider');
|
||||
const { retrieveAndProcessFile } = require('~/server/services/Files/process');
|
||||
const { recordMessage, getMessages } = require('~/models/Message');
|
||||
const { countTokens, escapeRegExp } = require('~/server/utils');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const { saveConvo } = require('~/models/Conversation');
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue