mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-26 04:06:12 +01:00
Merge branch 'main' into feature/entra-id-azure-integration
This commit is contained in:
commit
a7cf1ae27b
241 changed files with 25653 additions and 3303 deletions
|
|
@ -1,5 +1,6 @@
|
|||
const OpenAI = require('openai');
|
||||
const { ProxyAgent } = require('undici');
|
||||
const { isUserProvided } = require('@librechat/api');
|
||||
const { ErrorTypes, EModelEndpoint } = require('librechat-data-provider');
|
||||
const {
|
||||
getUserKeyValues,
|
||||
|
|
@ -7,7 +8,6 @@ const {
|
|||
checkUserKeyExpiry,
|
||||
} = require('~/server/services/UserService');
|
||||
const OAIClient = require('~/app/clients/OpenAIClient');
|
||||
const { isUserProvided } = require('~/server/utils');
|
||||
|
||||
const initializeClient = async ({ req, res, endpointOption, version, initAppClient = false }) => {
|
||||
const { PROXY, OPENAI_ORGANIZATION, ASSISTANTS_API_KEY, ASSISTANTS_BASE_URL } = process.env;
|
||||
|
|
|
|||
|
|
@ -1,9 +1,4 @@
|
|||
const {
|
||||
resolveHeaders,
|
||||
isUserProvided,
|
||||
getOpenAIConfig,
|
||||
getCustomEndpointConfig,
|
||||
} = require('@librechat/api');
|
||||
const { isUserProvided, getOpenAIConfig, getCustomEndpointConfig } = require('@librechat/api');
|
||||
const {
|
||||
CacheKeys,
|
||||
ErrorTypes,
|
||||
|
|
@ -34,14 +29,6 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
|||
const CUSTOM_API_KEY = extractEnvVariable(endpointConfig.apiKey);
|
||||
const CUSTOM_BASE_URL = extractEnvVariable(endpointConfig.baseURL);
|
||||
|
||||
/** Intentionally excludes passing `body`, i.e. `req.body`, as
|
||||
* values may not be accurate until `AgentClient` is initialized
|
||||
*/
|
||||
let resolvedHeaders = resolveHeaders({
|
||||
headers: endpointConfig.headers,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (CUSTOM_API_KEY.match(envVarRegex)) {
|
||||
throw new Error(`Missing API Key for ${endpoint}.`);
|
||||
}
|
||||
|
|
@ -108,7 +95,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
|||
}
|
||||
|
||||
const customOptions = {
|
||||
headers: resolvedHeaders,
|
||||
headers: endpointConfig.headers,
|
||||
addParams: endpointConfig.addParams,
|
||||
dropParams: endpointConfig.dropParams,
|
||||
customParams: endpointConfig.customParams,
|
||||
|
|
|
|||
|
|
@ -69,17 +69,21 @@ describe('custom/initializeClient', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('calls resolveHeaders with headers, user, and body for body placeholder support', async () => {
|
||||
const { resolveHeaders } = require('@librechat/api');
|
||||
await initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true });
|
||||
expect(resolveHeaders).toHaveBeenCalledWith({
|
||||
headers: { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
|
||||
user: { id: 'user-123', email: 'test@example.com', role: 'user' },
|
||||
/**
|
||||
* Note: Request-based Header Resolution is deferred until right before LLM request is made
|
||||
body: { endpoint: 'test-endpoint' }, // body - supports {{LIBRECHAT_BODY_*}} placeholders
|
||||
*/
|
||||
it('stores original template headers for deferred resolution', async () => {
|
||||
/**
|
||||
* Note: Request-based Header Resolution is deferred until right before LLM request is made
|
||||
* in the OpenAIClient or AgentClient, not during initialization.
|
||||
* This test verifies that the initialize function completes successfully with optionsOnly flag,
|
||||
* and that headers are passed through to be resolved later during the actual LLM request.
|
||||
*/
|
||||
const result = await initializeClient({
|
||||
req: mockRequest,
|
||||
res: mockResponse,
|
||||
optionsOnly: true,
|
||||
});
|
||||
// Verify that options are returned for later use
|
||||
expect(result).toBeDefined();
|
||||
expect(result).toHaveProperty('useLegacyContent', true);
|
||||
});
|
||||
|
||||
it('throws if endpoint config is missing', async () => {
|
||||
|
|
|
|||
|
|
@ -12,14 +12,13 @@ const initGoogle = require('~/server/services/Endpoints/google/initialize');
|
|||
* @returns {boolean} - True if the provider is a known custom provider, false otherwise
|
||||
*/
|
||||
function isKnownCustomProvider(provider) {
|
||||
return [Providers.XAI, Providers.OLLAMA, Providers.DEEPSEEK, Providers.OPENROUTER].includes(
|
||||
return [Providers.XAI, Providers.DEEPSEEK, Providers.OPENROUTER].includes(
|
||||
provider?.toLowerCase() || '',
|
||||
);
|
||||
}
|
||||
|
||||
const providerConfigMap = {
|
||||
[Providers.XAI]: initCustom,
|
||||
[Providers.OLLAMA]: initCustom,
|
||||
[Providers.DEEPSEEK]: initCustom,
|
||||
[Providers.OPENROUTER]: initCustom,
|
||||
[EModelEndpoint.openAI]: initOpenAI,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue