mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-19 01:40:15 +01:00
184 lines
5.7 KiB
TypeScript
184 lines
5.7 KiB
TypeScript
import { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } from 'librechat-data-provider';
|
|
import type {
|
|
InitializeOpenAIOptionsParams,
|
|
OpenAIConfigOptions,
|
|
LLMConfigResult,
|
|
UserKeyValues,
|
|
} from '~/types';
|
|
import { getAzureCredentials, getEntraIdAccessToken, shouldUseEntraId } from '~/utils/azure';
|
|
import { isUserProvided } from '~/utils/common';
|
|
import { resolveHeaders } from '~/utils/env';
|
|
import { getOpenAIConfig } from './config';
|
|
|
|
/**
|
|
* Initializes OpenAI options for agent usage. This function always returns configuration
|
|
* options and never creates a client instance (equivalent to optionsOnly=true behavior).
|
|
*
|
|
* @param params - Configuration parameters
|
|
* @returns Promise resolving to OpenAI configuration options
|
|
* @throws Error if API key is missing or user key has expired
|
|
*/
|
|
export const initializeOpenAI = async ({
|
|
req,
|
|
appConfig,
|
|
overrideModel,
|
|
endpointOption,
|
|
overrideEndpoint,
|
|
getUserKeyValues,
|
|
checkUserKeyExpiry,
|
|
}: InitializeOpenAIOptionsParams): Promise<LLMConfigResult> => {
|
|
const { PROXY, OPENAI_API_KEY, AZURE_API_KEY, OPENAI_REVERSE_PROXY, AZURE_OPENAI_BASEURL } =
|
|
process.env;
|
|
|
|
const { key: expiresAt } = req.body;
|
|
const modelName = overrideModel ?? req.body.model;
|
|
const endpoint = overrideEndpoint ?? req.body.endpoint;
|
|
|
|
if (!endpoint) {
|
|
throw new Error('Endpoint is required');
|
|
}
|
|
|
|
const credentials = {
|
|
[EModelEndpoint.openAI]: OPENAI_API_KEY,
|
|
[EModelEndpoint.azureOpenAI]: AZURE_API_KEY,
|
|
};
|
|
|
|
const baseURLOptions = {
|
|
[EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY,
|
|
[EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL,
|
|
};
|
|
|
|
const userProvidesKey = isUserProvided(credentials[endpoint as keyof typeof credentials]);
|
|
const userProvidesURL = isUserProvided(baseURLOptions[endpoint as keyof typeof baseURLOptions]);
|
|
|
|
let userValues: UserKeyValues | null = null;
|
|
if (expiresAt && (userProvidesKey || userProvidesURL)) {
|
|
checkUserKeyExpiry(expiresAt, endpoint);
|
|
userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint });
|
|
}
|
|
|
|
let apiKey = userProvidesKey
|
|
? userValues?.apiKey
|
|
: credentials[endpoint as keyof typeof credentials];
|
|
const baseURL = userProvidesURL
|
|
? userValues?.baseURL
|
|
: baseURLOptions[endpoint as keyof typeof baseURLOptions];
|
|
|
|
const clientOptions: OpenAIConfigOptions = {
|
|
proxy: PROXY ?? undefined,
|
|
reverseProxyUrl: baseURL || undefined,
|
|
streaming: true,
|
|
};
|
|
|
|
const isAzureOpenAI = endpoint === EModelEndpoint.azureOpenAI;
|
|
const azureConfig = isAzureOpenAI && appConfig.endpoints?.[EModelEndpoint.azureOpenAI];
|
|
|
|
if (isAzureOpenAI && azureConfig) {
|
|
const { modelGroupMap, groupMap } = azureConfig;
|
|
const {
|
|
azureOptions,
|
|
baseURL: configBaseURL,
|
|
headers = {},
|
|
serverless,
|
|
} = mapModelToAzureConfig({
|
|
modelName: modelName || '',
|
|
modelGroupMap,
|
|
groupMap,
|
|
});
|
|
|
|
clientOptions.reverseProxyUrl = configBaseURL ?? clientOptions.reverseProxyUrl;
|
|
clientOptions.headers = resolveHeaders({
|
|
headers: { ...headers, ...(clientOptions.headers ?? {}) },
|
|
user: req.user,
|
|
});
|
|
|
|
const groupName = modelGroupMap[modelName || '']?.group;
|
|
if (groupName && groupMap[groupName]) {
|
|
clientOptions.addParams = groupMap[groupName]?.addParams;
|
|
clientOptions.dropParams = groupMap[groupName]?.dropParams;
|
|
}
|
|
|
|
apiKey = shouldUseEntraId() ? 'entra-id-placeholder' : azureOptions.azureOpenAIApiKey;
|
|
clientOptions.azure = !serverless ? azureOptions : undefined;
|
|
|
|
if (serverless === true) {
|
|
clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion
|
|
? { 'api-version': azureOptions.azureOpenAIApiVersion }
|
|
: undefined;
|
|
|
|
if (!clientOptions.headers) {
|
|
clientOptions.headers = {};
|
|
}
|
|
if (shouldUseEntraId()) {
|
|
clientOptions.headers['Authorization'] = `Bearer ${await getEntraIdAccessToken()}`;
|
|
} else {
|
|
clientOptions.headers['api-key'] = apiKey || '';
|
|
}
|
|
} else {
|
|
apiKey = azureOptions.azureOpenAIApiKey || '';
|
|
clientOptions.azure = azureOptions;
|
|
if (shouldUseEntraId()) {
|
|
apiKey = 'entra-id-placeholder';
|
|
clientOptions.headers['Authorization'] = `Bearer ${await getEntraIdAccessToken()}`;
|
|
}
|
|
}
|
|
} else if (isAzureOpenAI) {
|
|
clientOptions.azure =
|
|
userProvidesKey && userValues?.apiKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
|
|
if (shouldUseEntraId()) {
|
|
clientOptions.headers = {
|
|
...clientOptions.headers,
|
|
Authorization: `Bearer ${await getEntraIdAccessToken()}`,
|
|
};
|
|
} else {
|
|
apiKey = clientOptions.azure ? clientOptions.azure.azureOpenAIApiKey : undefined;
|
|
}
|
|
}
|
|
|
|
if (userProvidesKey && !apiKey) {
|
|
throw new Error(
|
|
JSON.stringify({
|
|
type: ErrorTypes.NO_USER_KEY,
|
|
}),
|
|
);
|
|
}
|
|
|
|
if (!apiKey) {
|
|
throw new Error(`${endpoint} API Key not provided.`);
|
|
}
|
|
|
|
const modelOptions = {
|
|
...endpointOption.model_parameters,
|
|
model: modelName,
|
|
user: req.user.id,
|
|
};
|
|
|
|
const finalClientOptions: OpenAIConfigOptions = {
|
|
...clientOptions,
|
|
modelOptions,
|
|
};
|
|
|
|
const options = getOpenAIConfig(apiKey, finalClientOptions, endpoint);
|
|
|
|
const openAIConfig = appConfig.endpoints?.[EModelEndpoint.openAI];
|
|
const allConfig = appConfig.endpoints?.all;
|
|
const azureRate = modelName?.includes('gpt-4') ? 30 : 17;
|
|
|
|
let streamRate: number | undefined;
|
|
|
|
if (isAzureOpenAI && azureConfig) {
|
|
streamRate = azureConfig.streamRate ?? azureRate;
|
|
} else if (!isAzureOpenAI && openAIConfig) {
|
|
streamRate = openAIConfig.streamRate;
|
|
}
|
|
|
|
if (allConfig?.streamRate) {
|
|
streamRate = allConfig.streamRate;
|
|
}
|
|
|
|
if (streamRate) {
|
|
options.llmConfig._lc_stream_delay = streamRate;
|
|
}
|
|
|
|
return options;
|
|
};
|