🔗 feat: User Provided Base URL for OpenAI endpoints (#1919)

* chore: bump browserslist-db@latest

* refactor(EndpointService): simplify with `generateConfig`, utilize optional baseURL for OpenAI-based endpoints, use `isUserProvided` helper fn wherever needed

* refactor(custom/initializeClient): use standardized naming for common variables

* feat: user provided baseURL for openAI-based endpoints

* refactor(custom/initializeClient): re-order operations

* fix: knownendpoints enum definition and add FetchTokenConfig, bump data-provider

* refactor(custom): use tokenKey dependent on userProvided conditions for caching and fetching endpointTokenConfig, anticipate token rates from custom config

* refactor(custom): assure endpointTokenConfig is only accessed from cache if qualifies for fetching

* fix(ci): update tests for initializeClient based on userProvideURL changes

* fix(EndpointService): correct baseURL env var for assistants: `ASSISTANTS_BASE_URL`

* fix: unnecessary run cancellation on res.close() when response.run is completed

* feat(assistants): user provided URL option

* ci: update tests and add test for `assistants` endpoint

* chore: leaner condition for request closing

* chore: more descriptive error message to provide keys again
This commit is contained in:
Danny Avila 2024-02-28 14:27:19 -05:00 committed by GitHub
parent 53ae2d7bfb
commit 2f92b54787
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 762 additions and 226 deletions

View file

@ -97,11 +97,16 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
const cache = getLogStores(CacheKeys.ABORT_KEYS); const cache = getLogStores(CacheKeys.ABORT_KEYS);
const cacheKey = `${req.user.id}:${conversationId}`; const cacheKey = `${req.user.id}:${conversationId}`;
/** @type {Run | undefined} - The completed run, undefined if incomplete */
let completedRun;
const handleError = async (error) => { const handleError = async (error) => {
if (error.message === 'Run cancelled') { if (error.message === 'Run cancelled') {
return res.end(); return res.end();
} }
if (error.message === 'Request closed') { if (error.message === 'Request closed' && completedRun) {
return;
} else if (error.message === 'Request closed') {
logger.debug('[/assistants/chat/] Request aborted on close'); logger.debug('[/assistants/chat/] Request aborted on close');
} }
@ -161,7 +166,9 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
try { try {
res.on('close', async () => { res.on('close', async () => {
await handleError(new Error('Request closed')); if (!completedRun) {
await handleError(new Error('Request closed'));
}
}); });
if (convoId && !_thread_id) { if (convoId && !_thread_id) {
@ -322,6 +329,8 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
}); });
} }
completedRun = response.run;
/** @type {ResponseMessage} */ /** @type {ResponseMessage} */
const responseMessage = { const responseMessage = {
...openai.responseMessage, ...openai.responseMessage,
@ -367,7 +376,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
if (!response.run.usage) { if (!response.run.usage) {
await sleep(3000); await sleep(3000);
const completedRun = await openai.beta.threads.runs.retrieve(thread_id, run.id); completedRun = await openai.beta.threads.runs.retrieve(thread_id, run.id);
if (completedRun.usage) { if (completedRun.usage) {
await recordUsage({ await recordUsage({
...completedRun.usage, ...completedRun.usage,

View file

@ -1,4 +1,5 @@
const { EModelEndpoint } = require('librechat-data-provider'); const { EModelEndpoint } = require('librechat-data-provider');
const { isUserProvided, generateConfig } = require('~/server/utils');
const { const {
OPENAI_API_KEY: openAIApiKey, OPENAI_API_KEY: openAIApiKey,
@ -9,17 +10,16 @@ const {
BINGAI_TOKEN: bingToken, BINGAI_TOKEN: bingToken,
PLUGINS_USE_AZURE, PLUGINS_USE_AZURE,
GOOGLE_KEY: googleKey, GOOGLE_KEY: googleKey,
OPENAI_REVERSE_PROXY,
AZURE_OPENAI_BASEURL,
ASSISTANTS_BASE_URL,
} = process.env ?? {}; } = process.env ?? {};
const useAzurePlugins = !!PLUGINS_USE_AZURE; const useAzurePlugins = !!PLUGINS_USE_AZURE;
const userProvidedOpenAI = useAzurePlugins const userProvidedOpenAI = useAzurePlugins
? azureOpenAIApiKey === 'user_provided' ? isUserProvided(azureOpenAIApiKey)
: openAIApiKey === 'user_provided'; : isUserProvided(openAIApiKey);
function isUserProvided(key) {
return key ? { userProvide: key === 'user_provided' } : false;
}
module.exports = { module.exports = {
config: { config: {
@ -28,11 +28,11 @@ module.exports = {
useAzurePlugins, useAzurePlugins,
userProvidedOpenAI, userProvidedOpenAI,
googleKey, googleKey,
[EModelEndpoint.openAI]: isUserProvided(openAIApiKey), [EModelEndpoint.openAI]: generateConfig(openAIApiKey, OPENAI_REVERSE_PROXY),
[EModelEndpoint.assistants]: isUserProvided(assistantsApiKey), [EModelEndpoint.assistants]: generateConfig(assistantsApiKey, ASSISTANTS_BASE_URL),
[EModelEndpoint.azureOpenAI]: isUserProvided(azureOpenAIApiKey), [EModelEndpoint.azureOpenAI]: generateConfig(azureOpenAIApiKey, AZURE_OPENAI_BASEURL),
[EModelEndpoint.chatGPTBrowser]: isUserProvided(chatGPTToken), [EModelEndpoint.chatGPTBrowser]: generateConfig(chatGPTToken),
[EModelEndpoint.anthropic]: isUserProvided(anthropicApiKey), [EModelEndpoint.anthropic]: generateConfig(anthropicApiKey),
[EModelEndpoint.bingAI]: isUserProvided(bingToken), [EModelEndpoint.bingAI]: generateConfig(bingToken),
}, },
}; };

View file

@ -1,8 +1,10 @@
const { EModelEndpoint } = require('librechat-data-provider'); const { EModelEndpoint } = require('librechat-data-provider');
const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs'); const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs');
const { availableTools } = require('~/app/clients/tools'); const { availableTools } = require('~/app/clients/tools');
const { openAIApiKey, azureOpenAIApiKey, useAzurePlugins, userProvidedOpenAI, googleKey } = const { isUserProvided } = require('~/server/utils');
require('./EndpointService').config; const { config } = require('./EndpointService');
const { openAIApiKey, azureOpenAIApiKey, useAzurePlugins, userProvidedOpenAI, googleKey } = config;
/** /**
* Load async endpoints and return a configuration object * Load async endpoints and return a configuration object
@ -19,7 +21,7 @@ async function loadAsyncEndpoints(req) {
} }
} }
if (googleKey === 'user_provided') { if (isUserProvided(googleKey)) {
googleUserProvides = true; googleUserProvides = true;
if (i <= 1) { if (i <= 1) {
i++; i++;
@ -44,6 +46,10 @@ async function loadAsyncEndpoints(req) {
plugins, plugins,
availableAgents: ['classic', 'functions'], availableAgents: ['classic', 'functions'],
userProvide: useAzure ? false : userProvidedOpenAI, userProvide: useAzure ? false : userProvidedOpenAI,
userProvideURL: useAzure
? false
: config[EModelEndpoint.openAI]?.userProvideURL ||
config[EModelEndpoint.azureOpenAI]?.userProvideURL,
azure: useAzurePlugins || useAzure, azure: useAzurePlugins || useAzure,
} }
: false; : false;

View file

@ -7,12 +7,42 @@ const {
checkUserKeyExpiry, checkUserKeyExpiry,
} = require('~/server/services/UserService'); } = require('~/server/services/UserService');
const OpenAIClient = require('~/app/clients/OpenAIClient'); const OpenAIClient = require('~/app/clients/OpenAIClient');
const { isUserProvided } = require('~/server/utils');
const initializeClient = async ({ req, res, endpointOption, initAppClient = false }) => { const initializeClient = async ({ req, res, endpointOption, initAppClient = false }) => {
const { PROXY, OPENAI_ORGANIZATION, ASSISTANTS_API_KEY, ASSISTANTS_BASE_URL } = process.env; const { PROXY, OPENAI_ORGANIZATION, ASSISTANTS_API_KEY, ASSISTANTS_BASE_URL } = process.env;
const userProvidesKey = isUserProvided(ASSISTANTS_API_KEY);
const userProvidesURL = isUserProvided(ASSISTANTS_BASE_URL);
let userValues = null;
if (userProvidesKey || userProvidesURL) {
const expiresAt = await getUserKeyExpiry({
userId: req.user.id,
name: EModelEndpoint.assistants,
});
checkUserKeyExpiry(
expiresAt,
'Your Assistants API key has expired. Please provide your API key again.',
);
userValues = await getUserKey({ userId: req.user.id, name: EModelEndpoint.assistants });
try {
userValues = JSON.parse(userValues);
} catch (e) {
throw new Error(
'Invalid JSON provided for Assistants API user values. Please provide them again.',
);
}
}
let apiKey = userProvidesKey ? userValues.apiKey : ASSISTANTS_API_KEY;
let baseURL = userProvidesURL ? userValues.baseURL : ASSISTANTS_BASE_URL;
if (!apiKey) {
throw new Error('Assistants API key not provided. Please provide it again.');
}
const opts = {}; const opts = {};
const baseURL = ASSISTANTS_BASE_URL ?? null;
if (baseURL) { if (baseURL) {
opts.baseURL = baseURL; opts.baseURL = baseURL;
@ -26,29 +56,6 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals
opts.organization = OPENAI_ORGANIZATION; opts.organization = OPENAI_ORGANIZATION;
} }
const credentials = ASSISTANTS_API_KEY;
const isUserProvided = credentials === 'user_provided';
let userKey = null;
if (isUserProvided) {
const expiresAt = await getUserKeyExpiry({
userId: req.user.id,
name: EModelEndpoint.assistants,
});
checkUserKeyExpiry(
expiresAt,
'Your Assistants API key has expired. Please provide your API key again.',
);
userKey = await getUserKey({ userId: req.user.id, name: EModelEndpoint.assistants });
}
let apiKey = isUserProvided ? userKey : credentials;
if (!apiKey) {
throw new Error(`${EModelEndpoint.assistants} API key not provided.`);
}
/** @type {OpenAIClient} */ /** @type {OpenAIClient} */
const openai = new OpenAI({ const openai = new OpenAI({
apiKey, apiKey,

View file

@ -0,0 +1,99 @@
// const OpenAI = require('openai');
const { HttpsProxyAgent } = require('https-proxy-agent');
const { getUserKey, getUserKeyExpiry } = require('~/server/services/UserService');
const initializeClient = require('./initializeClient');
// const { OpenAIClient } = require('~/app');
jest.mock('~/server/services/UserService', () => ({
getUserKey: jest.fn(),
getUserKeyExpiry: jest.fn(),
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
}));
const today = new Date();
const tenDaysFromToday = new Date(today.setDate(today.getDate() + 10));
const isoString = tenDaysFromToday.toISOString();
describe('initializeClient', () => {
// Set up environment variables
const originalEnvironment = process.env;
const app = {
locals: {},
};
beforeEach(() => {
jest.resetModules(); // Clears the cache
process.env = { ...originalEnvironment }; // Make a copy
});
afterAll(() => {
process.env = originalEnvironment; // Restore original env vars
});
test('initializes OpenAI client with default API key and URL', async () => {
process.env.ASSISTANTS_API_KEY = 'default-api-key';
process.env.ASSISTANTS_BASE_URL = 'https://default.api.url';
// Assuming 'isUserProvided' to return false for this test case
jest.mock('~/server/utils', () => ({
isUserProvided: jest.fn().mockReturnValueOnce(false),
}));
const req = { user: { id: 'user123' }, app };
const res = {};
const { openai, openAIApiKey } = await initializeClient({ req, res });
expect(openai.apiKey).toBe('default-api-key');
expect(openAIApiKey).toBe('default-api-key');
expect(openai.baseURL).toBe('https://default.api.url');
});
test('initializes OpenAI client with user-provided API key and URL', async () => {
process.env.ASSISTANTS_API_KEY = 'user_provided';
process.env.ASSISTANTS_BASE_URL = 'user_provided';
getUserKey.mockResolvedValue(
JSON.stringify({ apiKey: 'user-api-key', baseURL: 'https://user.api.url' }),
);
getUserKeyExpiry.mockResolvedValue(isoString);
const req = { user: { id: 'user123' } };
const res = {};
const { openai, openAIApiKey } = await initializeClient({ req, res });
expect(openAIApiKey).toBe('user-api-key');
expect(openai.apiKey).toBe('user-api-key');
expect(openai.baseURL).toBe('https://user.api.url');
});
test('throws error for invalid JSON in user-provided values', async () => {
process.env.ASSISTANTS_API_KEY = 'user_provided';
getUserKey.mockResolvedValue('invalid-json');
getUserKeyExpiry.mockResolvedValue(isoString);
const req = { user: { id: 'user123' } };
const res = {};
await expect(initializeClient({ req, res })).rejects.toThrow(/Invalid JSON/);
});
test('throws error if API key is not provided', async () => {
delete process.env.ASSISTANTS_API_KEY; // Simulate missing API key
const req = { user: { id: 'user123' } };
const res = {};
await expect(initializeClient({ req, res })).rejects.toThrow(/Assistants API key not/);
});
test('initializes OpenAI client with proxy configuration', async () => {
process.env.ASSISTANTS_API_KEY = 'test-key';
process.env.PROXY = 'http://proxy.server';
const req = { user: { id: 'user123' }, app };
const res = {};
const { openai } = await initializeClient({ req, res });
expect(openai.httpAgent).toBeInstanceOf(HttpsProxyAgent);
});
});

View file

@ -1,8 +1,9 @@
const { const {
EModelEndpoint,
CacheKeys, CacheKeys,
extractEnvVariable,
envVarRegex, envVarRegex,
EModelEndpoint,
FetchTokenConfig,
extractEnvVariable,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService'); const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const getCustomConfig = require('~/server/services/Config/getCustomConfig'); const getCustomConfig = require('~/server/services/Config/getCustomConfig');
@ -42,11 +43,53 @@ const initializeClient = async ({ req, res, endpointOption }) => {
throw new Error(`Missing Base URL for ${endpoint}.`); throw new Error(`Missing Base URL for ${endpoint}.`);
} }
const userProvidesKey = isUserProvided(CUSTOM_API_KEY);
const userProvidesURL = isUserProvided(CUSTOM_BASE_URL);
let userValues = null;
if (expiresAt && (userProvidesKey || userProvidesURL)) {
checkUserKeyExpiry(
expiresAt,
`Your API values for ${endpoint} have expired. Please configure them again.`,
);
userValues = await getUserKey({ userId: req.user.id, name: endpoint });
try {
userValues = JSON.parse(userValues);
} catch (e) {
throw new Error(`Invalid JSON provided for ${endpoint} user values.`);
}
}
let apiKey = userProvidesKey ? userValues.apiKey : CUSTOM_API_KEY;
let baseURL = userProvidesURL ? userValues.baseURL : CUSTOM_BASE_URL;
if (!apiKey) {
throw new Error(`${endpoint} API key not provided.`);
}
if (!baseURL) {
throw new Error(`${endpoint} Base URL not provided.`);
}
const cache = getLogStores(CacheKeys.TOKEN_CONFIG); const cache = getLogStores(CacheKeys.TOKEN_CONFIG);
let endpointTokenConfig = await cache.get(endpoint); const tokenKey =
if (endpointConfig && endpointConfig.models.fetch && !endpointTokenConfig) { !endpointConfig.tokenConfig && (userProvidesKey || userProvidesURL)
await fetchModels({ apiKey: CUSTOM_API_KEY, baseURL: CUSTOM_BASE_URL, name: endpoint }); ? `${endpoint}:${req.user.id}`
endpointTokenConfig = await cache.get(endpoint); : endpoint;
let endpointTokenConfig =
!endpointConfig.tokenConfig &&
FetchTokenConfig[endpoint.toLowerCase()] &&
(await cache.get(tokenKey));
if (
FetchTokenConfig[endpoint.toLowerCase()] &&
endpointConfig &&
endpointConfig.models.fetch &&
!endpointTokenConfig
) {
await fetchModels({ apiKey, baseURL, name: endpoint, user: req.user.id, tokenKey });
endpointTokenConfig = await cache.get(tokenKey);
} }
const customOptions = { const customOptions = {
@ -63,34 +106,6 @@ const initializeClient = async ({ req, res, endpointOption }) => {
endpointTokenConfig, endpointTokenConfig,
}; };
const useUserKey = isUserProvided(CUSTOM_API_KEY);
const useUserURL = isUserProvided(CUSTOM_BASE_URL);
let userValues = null;
if (expiresAt && (useUserKey || useUserURL)) {
checkUserKeyExpiry(
expiresAt,
`Your API values for ${endpoint} have expired. Please configure them again.`,
);
userValues = await getUserKey({ userId: req.user.id, name: endpoint });
try {
userValues = JSON.parse(userValues);
} catch (e) {
throw new Error(`Invalid JSON provided for ${endpoint} user values.`);
}
}
let apiKey = useUserKey ? userValues.apiKey : CUSTOM_API_KEY;
let baseURL = useUserURL ? userValues.baseURL : CUSTOM_BASE_URL;
if (!apiKey) {
throw new Error(`${endpoint} API key not provided.`);
}
if (!baseURL) {
throw new Error(`${endpoint} Base URL not provided.`);
}
const clientOptions = { const clientOptions = {
reverseProxyUrl: baseURL ?? null, reverseProxyUrl: baseURL ?? null,
proxy: PROXY ?? null, proxy: PROXY ?? null,

View file

@ -4,8 +4,8 @@ const {
resolveHeaders, resolveHeaders,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService'); const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const { isEnabled, isUserProvided } = require('~/server/utils');
const { getAzureCredentials } = require('~/utils'); const { getAzureCredentials } = require('~/utils');
const { isEnabled } = require('~/server/utils');
const { PluginsClient } = require('~/app'); const { PluginsClient } = require('~/app');
const initializeClient = async ({ req, res, endpointOption }) => { const initializeClient = async ({ req, res, endpointOption }) => {
@ -34,43 +34,48 @@ const initializeClient = async ({ req, res, endpointOption }) => {
endpoint = EModelEndpoint.azureOpenAI; endpoint = EModelEndpoint.azureOpenAI;
} }
const credentials = {
[EModelEndpoint.openAI]: OPENAI_API_KEY,
[EModelEndpoint.azureOpenAI]: AZURE_API_KEY,
};
const baseURLOptions = { const baseURLOptions = {
[EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY, [EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY,
[EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL, [EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL,
}; };
const reverseProxyUrl = baseURLOptions[endpoint] ?? null; const userProvidesKey = isUserProvided(credentials[endpoint]);
const userProvidesURL = isUserProvided(baseURLOptions[endpoint]);
let userValues = null;
if (expiresAt && (userProvidesKey || userProvidesURL)) {
checkUserKeyExpiry(
expiresAt,
'Your OpenAI API values have expired. Please provide them again.',
);
userValues = await getUserKey({ userId: req.user.id, name: endpoint });
try {
userValues = JSON.parse(userValues);
} catch (e) {
throw new Error(
`Invalid JSON provided for ${endpoint} user values. Please provide them again.`,
);
}
}
let apiKey = userProvidesKey ? userValues.apiKey : credentials[endpoint];
let baseURL = userProvidesURL ? userValues.baseURL : baseURLOptions[endpoint];
const clientOptions = { const clientOptions = {
contextStrategy, contextStrategy,
debug: isEnabled(DEBUG_PLUGINS), debug: isEnabled(DEBUG_PLUGINS),
reverseProxyUrl, reverseProxyUrl: baseURL ? baseURL : null,
proxy: PROXY ?? null, proxy: PROXY ?? null,
req, req,
res, res,
...endpointOption, ...endpointOption,
}; };
const credentials = {
[EModelEndpoint.openAI]: OPENAI_API_KEY,
[EModelEndpoint.azureOpenAI]: AZURE_API_KEY,
};
const isUserProvided = credentials[endpoint] === 'user_provided';
let userKey = null;
if (expiresAt && isUserProvided) {
checkUserKeyExpiry(
expiresAt,
'Your OpenAI API key has expired. Please provide your API key again.',
);
userKey = await getUserKey({
userId: req.user.id,
name: endpoint,
});
}
let apiKey = isUserProvided ? userKey : credentials[endpoint];
if (useAzure && azureConfig) { if (useAzure && azureConfig) {
const { modelGroupMap, groupMap } = azureConfig; const { modelGroupMap, groupMap } = azureConfig;
const { const {
@ -99,12 +104,12 @@ const initializeClient = async ({ req, res, endpointOption }) => {
apiKey = azureOptions.azureOpenAIApiKey; apiKey = azureOptions.azureOpenAIApiKey;
clientOptions.azure = !serverless && azureOptions; clientOptions.azure = !serverless && azureOptions;
} else if (useAzure || (apiKey && apiKey.includes('{"azure') && !clientOptions.azure)) { } else if (useAzure || (apiKey && apiKey.includes('{"azure') && !clientOptions.azure)) {
clientOptions.azure = isUserProvided ? JSON.parse(userKey) : getAzureCredentials(); clientOptions.azure = userProvidesKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
apiKey = clientOptions.azure.azureOpenAIApiKey; apiKey = clientOptions.azure.azureOpenAIApiKey;
} }
if (!apiKey) { if (!apiKey) {
throw new Error(`${endpoint} API key not provided.`); throw new Error(`${endpoint} API key not provided. Please provide it again.`);
} }
const client = new PluginsClient(apiKey, clientOptions); const client = new PluginsClient(apiKey, clientOptions);

View file

@ -1,5 +1,5 @@
// gptPlugins/initializeClient.spec.js // gptPlugins/initializeClient.spec.js
const { EModelEndpoint } = require('librechat-data-provider'); const { EModelEndpoint, validateAzureGroups } = require('librechat-data-provider');
const { getUserKey } = require('~/server/services/UserService'); const { getUserKey } = require('~/server/services/UserService');
const initializeClient = require('./initializeClient'); const initializeClient = require('./initializeClient');
const { PluginsClient } = require('~/app'); const { PluginsClient } = require('~/app');
@ -17,6 +17,69 @@ describe('gptPlugins/initializeClient', () => {
locals: {}, locals: {},
}; };
const validAzureConfigs = [
{
group: 'librechat-westus',
apiKey: 'WESTUS_API_KEY',
instanceName: 'librechat-westus',
version: '2023-12-01-preview',
models: {
'gpt-4-vision-preview': {
deploymentName: 'gpt-4-vision-preview',
version: '2024-02-15-preview',
},
'gpt-3.5-turbo': {
deploymentName: 'gpt-35-turbo',
},
'gpt-3.5-turbo-1106': {
deploymentName: 'gpt-35-turbo-1106',
},
'gpt-4': {
deploymentName: 'gpt-4',
},
'gpt-4-1106-preview': {
deploymentName: 'gpt-4-1106-preview',
},
},
},
{
group: 'librechat-eastus',
apiKey: 'EASTUS_API_KEY',
instanceName: 'librechat-eastus',
deploymentName: 'gpt-4-turbo',
version: '2024-02-15-preview',
models: {
'gpt-4-turbo': true,
},
baseURL: 'https://eastus.example.com',
additionalHeaders: {
'x-api-key': 'x-api-key-value',
},
},
{
group: 'mistral-inference',
apiKey: 'AZURE_MISTRAL_API_KEY',
baseURL:
'https://Mistral-large-vnpet-serverless.region.inference.ai.azure.com/v1/chat/completions',
serverless: true,
models: {
'mistral-large': true,
},
},
{
group: 'llama-70b-chat',
apiKey: 'AZURE_LLAMA2_70B_API_KEY',
baseURL:
'https://Llama-2-70b-chat-qmvyb-serverless.region.inference.ai.azure.com/v1/chat/completions',
serverless: true,
models: {
'llama-70b-chat': true,
},
},
];
const { modelNames, modelGroupMap, groupMap } = validateAzureGroups(validAzureConfigs);
beforeEach(() => { beforeEach(() => {
jest.resetModules(); // Clears the cache jest.resetModules(); // Clears the cache
process.env = { ...originalEnvironment }; // Make a copy process.env = { ...originalEnvironment }; // Make a copy
@ -142,7 +205,7 @@ describe('gptPlugins/initializeClient', () => {
const res = {}; const res = {};
const endpointOption = { modelOptions: { model: 'default-model' } }; const endpointOption = { modelOptions: { model: 'default-model' } };
getUserKey.mockResolvedValue('test-user-provided-openai-api-key'); getUserKey.mockResolvedValue(JSON.stringify({ apiKey: 'test-user-provided-openai-api-key' }));
const { openAIApiKey } = await initializeClient({ req, res, endpointOption }); const { openAIApiKey } = await initializeClient({ req, res, endpointOption });
@ -164,8 +227,10 @@ describe('gptPlugins/initializeClient', () => {
getUserKey.mockResolvedValue( getUserKey.mockResolvedValue(
JSON.stringify({ JSON.stringify({
azureOpenAIApiKey: 'test-user-provided-azure-api-key', apiKey: JSON.stringify({
azureOpenAIApiDeploymentName: 'test-deployment', azureOpenAIApiKey: 'test-user-provided-azure-api-key',
azureOpenAIApiDeploymentName: 'test-deployment',
}),
}), }),
); );
@ -186,9 +251,7 @@ describe('gptPlugins/initializeClient', () => {
const res = {}; const res = {};
const endpointOption = { modelOptions: { model: 'default-model' } }; const endpointOption = { modelOptions: { model: 'default-model' } };
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow( await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(/Your OpenAI API/);
/Your OpenAI API key has expired/,
);
}); });
test('should throw an error if the user-provided Azure key is invalid JSON', async () => { test('should throw an error if the user-provided Azure key is invalid JSON', async () => {
@ -207,7 +270,7 @@ describe('gptPlugins/initializeClient', () => {
getUserKey.mockResolvedValue('invalid-json'); getUserKey.mockResolvedValue('invalid-json');
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow( await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
/Unexpected token/, /Invalid JSON provided/,
); );
}); });
@ -229,4 +292,92 @@ describe('gptPlugins/initializeClient', () => {
expect(client.options.reverseProxyUrl).toBe('http://reverse.proxy'); expect(client.options.reverseProxyUrl).toBe('http://reverse.proxy');
expect(client.options.proxy).toBe('http://proxy'); expect(client.options.proxy).toBe('http://proxy');
}); });
test('should throw an error when user-provided values are not valid JSON', async () => {
process.env.OPENAI_API_KEY = 'user_provided';
const req = {
body: { key: new Date(Date.now() + 10000).toISOString(), endpoint: 'openAI' },
user: { id: '123' },
app,
};
const res = {};
const endpointOption = {};
// Mock getUserKey to return a non-JSON string
getUserKey.mockResolvedValue('not-a-json');
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
/Invalid JSON provided for openAI user values/,
);
});
test('should initialize client correctly for Azure OpenAI with valid configuration', async () => {
const req = {
body: {
key: null,
endpoint: EModelEndpoint.gptPlugins,
model: modelNames[0],
},
user: { id: '123' },
app: {
locals: {
[EModelEndpoint.azureOpenAI]: {
plugins: true,
modelNames,
modelGroupMap,
groupMap,
},
},
},
};
const res = {};
const endpointOption = {};
const client = await initializeClient({ req, res, endpointOption });
expect(client.client.options.azure).toBeDefined();
});
test('should initialize client with default options when certain env vars are not set', async () => {
delete process.env.DEBUG_OPENAI;
delete process.env.OPENAI_SUMMARIZE;
const req = {
body: { key: null, endpoint: 'openAI' },
user: { id: '123' },
app,
};
const res = {};
const endpointOption = {};
const client = await initializeClient({ req, res, endpointOption });
expect(client.client.options.debug).toBe(false);
expect(client.client.options.contextStrategy).toBe(null);
});
test('should correctly use user-provided apiKey and baseURL when provided', async () => {
process.env.OPENAI_API_KEY = 'user_provided';
process.env.OPENAI_REVERSE_PROXY = 'user_provided';
const req = {
body: {
key: new Date(Date.now() + 10000).toISOString(),
endpoint: 'openAI',
},
user: {
id: '123',
},
app,
};
const res = {};
const endpointOption = {};
getUserKey.mockResolvedValue(
JSON.stringify({ apiKey: 'test', baseURL: 'https://user-provided-url.com' }),
);
const result = await initializeClient({ req, res, endpointOption });
expect(result.openAIApiKey).toBe('test');
expect(result.client.options.reverseProxyUrl).toBe('https://user-provided-url.com');
});
}); });

View file

@ -4,8 +4,8 @@ const {
resolveHeaders, resolveHeaders,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService'); const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const { isEnabled, isUserProvided } = require('~/server/utils');
const { getAzureCredentials } = require('~/utils'); const { getAzureCredentials } = require('~/utils');
const { isEnabled } = require('~/server/utils');
const { OpenAIClient } = require('~/app'); const { OpenAIClient } = require('~/app');
const initializeClient = async ({ req, res, endpointOption }) => { const initializeClient = async ({ req, res, endpointOption }) => {
@ -21,40 +21,48 @@ const initializeClient = async ({ req, res, endpointOption }) => {
const { key: expiresAt, endpoint, model: modelName } = req.body; const { key: expiresAt, endpoint, model: modelName } = req.body;
const contextStrategy = isEnabled(OPENAI_SUMMARIZE) ? 'summarize' : null; const contextStrategy = isEnabled(OPENAI_SUMMARIZE) ? 'summarize' : null;
const credentials = {
[EModelEndpoint.openAI]: OPENAI_API_KEY,
[EModelEndpoint.azureOpenAI]: AZURE_API_KEY,
};
const baseURLOptions = { const baseURLOptions = {
[EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY, [EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY,
[EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL, [EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL,
}; };
const reverseProxyUrl = baseURLOptions[endpoint] ?? null; const userProvidesKey = isUserProvided(credentials[endpoint]);
const userProvidesURL = isUserProvided(baseURLOptions[endpoint]);
let userValues = null;
if (expiresAt && (userProvidesKey || userProvidesURL)) {
checkUserKeyExpiry(
expiresAt,
'Your OpenAI API values have expired. Please provide them again.',
);
userValues = await getUserKey({ userId: req.user.id, name: endpoint });
try {
userValues = JSON.parse(userValues);
} catch (e) {
throw new Error(
`Invalid JSON provided for ${endpoint} user values. Please provide them again.`,
);
}
}
let apiKey = userProvidesKey ? userValues.apiKey : credentials[endpoint];
let baseURL = userProvidesURL ? userValues.baseURL : baseURLOptions[endpoint];
const clientOptions = { const clientOptions = {
debug: isEnabled(DEBUG_OPENAI), debug: isEnabled(DEBUG_OPENAI),
contextStrategy, contextStrategy,
reverseProxyUrl, reverseProxyUrl: baseURL ? baseURL : null,
proxy: PROXY ?? null, proxy: PROXY ?? null,
req, req,
res, res,
...endpointOption, ...endpointOption,
}; };
const credentials = {
[EModelEndpoint.openAI]: OPENAI_API_KEY,
[EModelEndpoint.azureOpenAI]: AZURE_API_KEY,
};
const isUserProvided = credentials[endpoint] === 'user_provided';
let userKey = null;
if (expiresAt && isUserProvided) {
checkUserKeyExpiry(
expiresAt,
'Your OpenAI API key has expired. Please provide your API key again.',
);
userKey = await getUserKey({ userId: req.user.id, name: endpoint });
}
let apiKey = isUserProvided ? userKey : credentials[endpoint];
const isAzureOpenAI = endpoint === EModelEndpoint.azureOpenAI; const isAzureOpenAI = endpoint === EModelEndpoint.azureOpenAI;
/** @type {false | TAzureConfig} */ /** @type {false | TAzureConfig} */
const azureConfig = isAzureOpenAI && req.app.locals[EModelEndpoint.azureOpenAI]; const azureConfig = isAzureOpenAI && req.app.locals[EModelEndpoint.azureOpenAI];
@ -87,12 +95,12 @@ const initializeClient = async ({ req, res, endpointOption }) => {
apiKey = azureOptions.azureOpenAIApiKey; apiKey = azureOptions.azureOpenAIApiKey;
clientOptions.azure = !serverless && azureOptions; clientOptions.azure = !serverless && azureOptions;
} else if (isAzureOpenAI) { } else if (isAzureOpenAI) {
clientOptions.azure = isUserProvided ? JSON.parse(userKey) : getAzureCredentials(); clientOptions.azure = userProvidesKey ? JSON.parse(userValues.apiKey) : getAzureCredentials();
apiKey = clientOptions.azure.azureOpenAIApiKey; apiKey = clientOptions.azure.azureOpenAIApiKey;
} }
if (!apiKey) { if (!apiKey) {
throw new Error(`${endpoint} API key not provided.`); throw new Error(`${endpoint} API key not provided. Please provide it again.`);
} }
const client = new OpenAIClient(apiKey, clientOptions); const client = new OpenAIClient(apiKey, clientOptions);

View file

@ -1,4 +1,4 @@
const { EModelEndpoint } = require('librechat-data-provider'); const { EModelEndpoint, validateAzureGroups } = require('librechat-data-provider');
const { getUserKey } = require('~/server/services/UserService'); const { getUserKey } = require('~/server/services/UserService');
const initializeClient = require('./initializeClient'); const initializeClient = require('./initializeClient');
const { OpenAIClient } = require('~/app'); const { OpenAIClient } = require('~/app');
@ -16,6 +16,69 @@ describe('initializeClient', () => {
locals: {}, locals: {},
}; };
const validAzureConfigs = [
{
group: 'librechat-westus',
apiKey: 'WESTUS_API_KEY',
instanceName: 'librechat-westus',
version: '2023-12-01-preview',
models: {
'gpt-4-vision-preview': {
deploymentName: 'gpt-4-vision-preview',
version: '2024-02-15-preview',
},
'gpt-3.5-turbo': {
deploymentName: 'gpt-35-turbo',
},
'gpt-3.5-turbo-1106': {
deploymentName: 'gpt-35-turbo-1106',
},
'gpt-4': {
deploymentName: 'gpt-4',
},
'gpt-4-1106-preview': {
deploymentName: 'gpt-4-1106-preview',
},
},
},
{
group: 'librechat-eastus',
apiKey: 'EASTUS_API_KEY',
instanceName: 'librechat-eastus',
deploymentName: 'gpt-4-turbo',
version: '2024-02-15-preview',
models: {
'gpt-4-turbo': true,
},
baseURL: 'https://eastus.example.com',
additionalHeaders: {
'x-api-key': 'x-api-key-value',
},
},
{
group: 'mistral-inference',
apiKey: 'AZURE_MISTRAL_API_KEY',
baseURL:
'https://Mistral-large-vnpet-serverless.region.inference.ai.azure.com/v1/chat/completions',
serverless: true,
models: {
'mistral-large': true,
},
},
{
group: 'llama-70b-chat',
apiKey: 'AZURE_LLAMA2_70B_API_KEY',
baseURL:
'https://Llama-2-70b-chat-qmvyb-serverless.region.inference.ai.azure.com/v1/chat/completions',
serverless: true,
models: {
'llama-70b-chat': true,
},
},
];
const { modelNames, modelGroupMap, groupMap } = validateAzureGroups(validAzureConfigs);
beforeEach(() => { beforeEach(() => {
jest.resetModules(); // Clears the cache jest.resetModules(); // Clears the cache
process.env = { ...originalEnvironment }; // Make a copy process.env = { ...originalEnvironment }; // Make a copy
@ -38,10 +101,10 @@ describe('initializeClient', () => {
const res = {}; const res = {};
const endpointOption = {}; const endpointOption = {};
const client = await initializeClient({ req, res, endpointOption }); const result = await initializeClient({ req, res, endpointOption });
expect(client.openAIApiKey).toBe('test-openai-api-key'); expect(result.openAIApiKey).toBe('test-openai-api-key');
expect(client.client).toBeInstanceOf(OpenAIClient); expect(result.client).toBeInstanceOf(OpenAIClient);
}); });
test('should initialize client with Azure credentials when endpoint is azureOpenAI', async () => { test('should initialize client with Azure credentials when endpoint is azureOpenAI', async () => {
@ -137,9 +200,7 @@ describe('initializeClient', () => {
const res = {}; const res = {};
const endpointOption = {}; const endpointOption = {};
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow( await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(/Your OpenAI API/);
'Your OpenAI API key has expired. Please provide your API key again.',
);
}); });
test('should throw an error if no API keys are provided in the environment', async () => { test('should throw an error if no API keys are provided in the environment', async () => {
@ -180,7 +241,7 @@ describe('initializeClient', () => {
process.env.OPENAI_API_KEY = 'user_provided'; process.env.OPENAI_API_KEY = 'user_provided';
// Mock getUserKey to return the expected key // Mock getUserKey to return the expected key
getUserKey.mockResolvedValue('test-user-provided-openai-api-key'); getUserKey.mockResolvedValue(JSON.stringify({ apiKey: 'test-user-provided-openai-api-key' }));
// Call the initializeClient function // Call the initializeClient function
const result = await initializeClient({ req, res, endpointOption }); const result = await initializeClient({ req, res, endpointOption });
@ -205,8 +266,93 @@ describe('initializeClient', () => {
// Mock getUserKey to return an invalid key // Mock getUserKey to return an invalid key
getUserKey.mockResolvedValue(invalidKey); getUserKey.mockResolvedValue(invalidKey);
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(/Your OpenAI API/);
});
test('should throw an error when user-provided values are not valid JSON', async () => {
process.env.OPENAI_API_KEY = 'user_provided';
const req = {
body: { key: new Date(Date.now() + 10000).toISOString(), endpoint: 'openAI' },
user: { id: '123' },
app,
};
const res = {};
const endpointOption = {};
// Mock getUserKey to return a non-JSON string
getUserKey.mockResolvedValue('not-a-json');
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow( await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
/Your OpenAI API key has expired/, /Invalid JSON provided for openAI user values/,
); );
}); });
test('should initialize client correctly for Azure OpenAI with valid configuration', async () => {
const req = {
body: {
key: null,
endpoint: EModelEndpoint.azureOpenAI,
model: modelNames[0],
},
user: { id: '123' },
app: {
locals: {
[EModelEndpoint.azureOpenAI]: {
modelNames,
modelGroupMap,
groupMap,
},
},
},
};
const res = {};
const endpointOption = {};
const client = await initializeClient({ req, res, endpointOption });
expect(client.client.options.azure).toBeDefined();
});
test('should initialize client with default options when certain env vars are not set', async () => {
delete process.env.DEBUG_OPENAI;
delete process.env.OPENAI_SUMMARIZE;
const req = {
body: { key: null, endpoint: 'openAI' },
user: { id: '123' },
app,
};
const res = {};
const endpointOption = {};
const client = await initializeClient({ req, res, endpointOption });
expect(client.client.options.debug).toBe(false);
expect(client.client.options.contextStrategy).toBe(null);
});
test('should correctly use user-provided apiKey and baseURL when provided', async () => {
process.env.OPENAI_API_KEY = 'user_provided';
process.env.OPENAI_REVERSE_PROXY = 'user_provided';
const req = {
body: {
key: new Date(Date.now() + 10000).toISOString(),
endpoint: 'openAI',
},
user: {
id: '123',
},
app,
};
const res = {};
const endpointOption = {};
getUserKey.mockResolvedValue(
JSON.stringify({ apiKey: 'test', baseURL: 'https://user-provided-url.com' }),
);
const result = await initializeClient({ req, res, endpointOption });
expect(result.openAIApiKey).toBe('test');
expect(result.client.options.reverseProxyUrl).toBe('https://user-provided-url.com');
});
}); });

View file

@ -20,6 +20,7 @@ const { openAIApiKey, userProvidedOpenAI } = require('./Config/EndpointService')
* @param {boolean} [params.azure=false] - Whether to fetch models from Azure. * @param {boolean} [params.azure=false] - Whether to fetch models from Azure.
* @param {boolean} [params.userIdQuery=false] - Whether to send the user ID as a query parameter. * @param {boolean} [params.userIdQuery=false] - Whether to send the user ID as a query parameter.
* @param {boolean} [params.createTokenConfig=true] - Whether to create a token configuration from the API response. * @param {boolean} [params.createTokenConfig=true] - Whether to create a token configuration from the API response.
* @param {string} [params.tokenKey] - The cache key to save the token configuration. Uses `name` if omitted.
* @returns {Promise<string[]>} A promise that resolves to an array of model identifiers. * @returns {Promise<string[]>} A promise that resolves to an array of model identifiers.
* @async * @async
*/ */
@ -31,6 +32,7 @@ const fetchModels = async ({
azure = false, azure = false,
userIdQuery = false, userIdQuery = false,
createTokenConfig = true, createTokenConfig = true,
tokenKey,
}) => { }) => {
let models = []; let models = [];
@ -70,7 +72,7 @@ const fetchModels = async ({
if (validationResult.success && createTokenConfig) { if (validationResult.success && createTokenConfig) {
const endpointTokenConfig = processModelData(input); const endpointTokenConfig = processModelData(input);
const cache = getLogStores(CacheKeys.TOKEN_CONFIG); const cache = getLogStores(CacheKeys.TOKEN_CONFIG);
await cache.set(name, endpointTokenConfig); await cache.set(tokenKey ?? name, endpointTokenConfig);
} }
models = input.data.map((item) => item.id); models = input.data.map((item) => item.id);
} catch (error) { } catch (error) {

View file

@ -172,6 +172,27 @@ function isEnabled(value) {
*/ */
const isUserProvided = (value) => value === 'user_provided'; const isUserProvided = (value) => value === 'user_provided';
/**
* Generate the configuration for a given key and base URL.
* @param {string} key
* @param {string} baseURL
* @returns {boolean | { userProvide: boolean, userProvideURL?: boolean }}
*/
function generateConfig(key, baseURL) {
if (!key) {
return false;
}
/** @type {{ userProvide: boolean, userProvideURL?: boolean }} */
const config = { userProvide: isUserProvided(key) };
if (baseURL) {
config.userProvideURL = isUserProvided(baseURL);
}
return config;
}
module.exports = { module.exports = {
createOnProgress, createOnProgress,
isEnabled, isEnabled,
@ -180,4 +201,5 @@ module.exports = {
formatAction, formatAction,
addSpaceIfNeeded, addSpaceIfNeeded,
isUserProvided, isUserProvided,
generateConfig,
}; };

View file

@ -1,80 +1,101 @@
import { useEffect, useState } from 'react';
import { EModelEndpoint } from 'librechat-data-provider'; import { EModelEndpoint } from 'librechat-data-provider';
import { useMultipleKeys } from '~/hooks/Input'; import { useFormContext, Controller } from 'react-hook-form';
import InputWithLabel from './InputWithLabel'; import InputWithLabel from './InputWithLabel';
import type { TConfigProps } from '~/common';
import { isJson } from '~/utils/json';
const OpenAIConfig = ({ userKey, setUserKey, endpoint }: TConfigProps) => {
const [showPanel, setShowPanel] = useState(endpoint === EModelEndpoint.azureOpenAI);
const { getMultiKey: getAzure, setMultiKey: setAzure } = useMultipleKeys(setUserKey);
useEffect(() => {
if (isJson(userKey)) {
setShowPanel(true);
}
setUserKey('');
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useEffect(() => {
if (!showPanel && isJson(userKey)) {
setUserKey('');
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [showPanel]);
const OpenAIConfig = ({
endpoint,
userProvideURL,
}: {
endpoint: EModelEndpoint | string;
userProvideURL?: boolean | null;
}) => {
const { control } = useFormContext();
const isAzure = endpoint === EModelEndpoint.azureOpenAI;
return ( return (
<> <form className="flex-wrap">
{!showPanel ? ( {!isAzure && (
<Controller
name="apiKey"
control={control}
render={({ field }) => (
<InputWithLabel
id="apiKey"
{...field}
label={`${isAzure ? 'Azure q' : ''}OpenAI API Key`}
labelClassName="mb-1"
inputClassName="mb-2"
/>
)}
/>
)}
{isAzure && (
<> <>
<InputWithLabel <Controller
id={endpoint} name="azureOpenAIApiKey"
value={userKey ?? ''} control={control}
onChange={(e: { target: { value: string } }) => setUserKey(e.target.value ?? '')} render={({ field }) => (
label={'OpenAI API Key'} <InputWithLabel
id="azureOpenAIApiKey"
{...field}
label={'Azure OpenAI API Key'}
labelClassName="mb-1"
/>
)}
/> />
</> <Controller
) : ( name="azureOpenAIApiInstanceName"
<> control={control}
<InputWithLabel render={({ field }) => (
id={'instanceNameLabel'} <InputWithLabel
value={getAzure('azureOpenAIApiInstanceName', userKey) ?? ''} id="azureOpenAIApiInstanceName"
onChange={(e: { target: { value: string } }) => {...field}
setAzure('azureOpenAIApiInstanceName', e.target.value ?? '', userKey) label={'Azure OpenAI Instance Name'}
} labelClassName="mb-1"
label={'Azure OpenAI Instance Name'} />
)}
/> />
<Controller
<InputWithLabel name="azureOpenAIApiDeploymentName"
id={'deploymentNameLabel'} control={control}
value={getAzure('azureOpenAIApiDeploymentName', userKey) ?? ''} render={({ field }) => (
onChange={(e: { target: { value: string } }) => <InputWithLabel
setAzure('azureOpenAIApiDeploymentName', e.target.value ?? '', userKey) id="azureOpenAIApiDeploymentName"
} {...field}
label={'Azure OpenAI Deployment Name'} label={'Azure OpenAI Deployment Name'}
labelClassName="mb-1"
/>
)}
/> />
<Controller
<InputWithLabel name="azureOpenAIApiVersion"
id={'versionLabel'} control={control}
value={getAzure('azureOpenAIApiVersion', userKey) ?? ''} render={({ field }) => (
onChange={(e: { target: { value: string } }) => <InputWithLabel
setAzure('azureOpenAIApiVersion', e.target.value ?? '', userKey) id="azureOpenAIApiVersion"
} {...field}
label={'Azure OpenAI API Version'} label={'Azure OpenAI API Version'}
/> labelClassName="mb-1"
/>
<InputWithLabel )}
id={'apiKeyLabel'}
value={getAzure('azureOpenAIApiKey', userKey) ?? ''}
onChange={(e: { target: { value: string } }) =>
setAzure('azureOpenAIApiKey', e.target.value ?? '', userKey)
}
label={'Azure OpenAI API Key'}
/> />
</> </>
)} )}
</> {userProvideURL && (
<Controller
name="baseURL"
control={control}
render={({ field }) => (
<InputWithLabel
id="baseURL"
{...field}
label={'API Base URL'}
subLabel={'(Optional)'}
labelClassName="mb-1"
/>
)}
/>
)}
</form>
); );
}; };

View file

@ -20,9 +20,18 @@ const endpointComponents = {
[EModelEndpoint.custom]: CustomConfig, [EModelEndpoint.custom]: CustomConfig,
[EModelEndpoint.azureOpenAI]: OpenAIConfig, [EModelEndpoint.azureOpenAI]: OpenAIConfig,
[EModelEndpoint.gptPlugins]: OpenAIConfig, [EModelEndpoint.gptPlugins]: OpenAIConfig,
[EModelEndpoint.assistants]: OpenAIConfig,
default: OtherConfig, default: OtherConfig,
}; };
const formSet: Set<string> = new Set([
EModelEndpoint.openAI,
EModelEndpoint.custom,
EModelEndpoint.azureOpenAI,
EModelEndpoint.gptPlugins,
EModelEndpoint.assistants,
]);
const EXPIRY = { const EXPIRY = {
THIRTY_MINUTES: { display: 'in 30 minutes', value: 30 * 60 * 1000 }, THIRTY_MINUTES: { display: 'in 30 minutes', value: 30 * 60 * 1000 },
TWO_HOURS: { display: 'in 2 hours', value: 2 * 60 * 60 * 1000 }, TWO_HOURS: { display: 'in 2 hours', value: 2 * 60 * 60 * 1000 },
@ -47,6 +56,10 @@ const SetKeyDialog = ({
defaultValues: { defaultValues: {
apiKey: '', apiKey: '',
baseURL: '', baseURL: '',
azureOpenAIApiKey: '',
azureOpenAIApiInstanceName: '',
azureOpenAIApiDeploymentName: '',
azureOpenAIApiVersion: '',
// TODO: allow endpoint definitions from user // TODO: allow endpoint definitions from user
// name: '', // name: '',
// TODO: add custom endpoint models defined by user // TODO: add custom endpoint models defined by user
@ -76,10 +89,26 @@ const SetKeyDialog = ({
onOpenChange(false); onOpenChange(false);
}; };
if (endpoint === EModelEndpoint.custom || endpointType === EModelEndpoint.custom) { if (formSet.has(endpoint) || formSet.has(endpointType ?? '')) {
// TODO: handle other user provided options besides baseURL and apiKey // TODO: handle other user provided options besides baseURL and apiKey
methods.handleSubmit((data) => { methods.handleSubmit((data) => {
const isAzure = endpoint === EModelEndpoint.azureOpenAI;
const isOpenAIBase =
isAzure ||
endpoint === EModelEndpoint.openAI ||
endpoint === EModelEndpoint.gptPlugins ||
endpoint === EModelEndpoint.assistants;
if (isAzure) {
data.apiKey = 'n/a';
}
const emptyValues = Object.keys(data).filter((key) => { const emptyValues = Object.keys(data).filter((key) => {
if (!isAzure && key.startsWith('azure')) {
return false;
}
if (isOpenAIBase && key === 'baseURL') {
return false;
}
if (key === 'baseURL' && !userProvideURL) { if (key === 'baseURL' && !userProvideURL) {
return false; return false;
} }
@ -92,10 +121,22 @@ const SetKeyDialog = ({
status: 'error', status: 'error',
}); });
onOpenChange(true); onOpenChange(true);
} else { return;
saveKey(JSON.stringify(data));
methods.reset();
} }
const { apiKey, baseURL, ...azureOptions } = data;
const userProvidedData = { apiKey, baseURL };
if (isAzure) {
userProvidedData.apiKey = JSON.stringify({
azureOpenAIApiKey: azureOptions.azureOpenAIApiKey,
azureOpenAIApiInstanceName: azureOptions.azureOpenAIApiInstanceName,
azureOpenAIApiDeploymentName: azureOptions.azureOpenAIApiDeploymentName,
azureOpenAIApiVersion: azureOptions.azureOpenAIApiVersion,
});
}
saveKey(JSON.stringify(userProvidedData));
methods.reset();
})(); })();
return; return;
} }

8
package-lock.json generated
View file

@ -11288,9 +11288,9 @@
} }
}, },
"node_modules/caniuse-lite": { "node_modules/caniuse-lite": {
"version": "1.0.30001584", "version": "1.0.30001591",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001584.tgz", "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001591.tgz",
"integrity": "sha512-LOz7CCQ9M1G7OjJOF9/mzmqmj3jE/7VOmrfw6Mgs0E8cjOsbRXQJHsPBfmBOXDskXKrHLyyW3n7kpDW/4BsfpQ==", "integrity": "sha512-PCzRMei/vXjJyL5mJtzNiUCKP59dm8Apqc3PH8gJkMnMXZGox93RbE76jHsmLwmIo6/3nsYIpJtx0O7u5PqFuQ==",
"dev": true, "dev": true,
"funding": [ "funding": [
{ {
@ -27993,7 +27993,7 @@
}, },
"packages/data-provider": { "packages/data-provider": {
"name": "librechat-data-provider", "name": "librechat-data-provider",
"version": "0.4.4", "version": "0.4.5",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"@types/js-yaml": "^4.0.9", "@types/js-yaml": "^4.0.9",

View file

@ -1,6 +1,6 @@
{ {
"name": "librechat-data-provider", "name": "librechat-data-provider",
"version": "0.4.5", "version": "0.4.6",
"description": "data services for librechat apps", "description": "data services for librechat apps",
"main": "dist/index.js", "main": "dist/index.js",
"module": "dist/index.es.js", "module": "dist/index.es.js",

View file

@ -171,16 +171,20 @@ export const configSchema = z.object({
export type TCustomConfig = z.infer<typeof configSchema>; export type TCustomConfig = z.infer<typeof configSchema>;
export const KnownEndpoints = { export enum KnownEndpoints {
mistral: 'mistral', mistral = 'mistral',
openrouter: 'openrouter', openrouter = 'openrouter',
groq: 'groq', groq = 'groq',
anyscale: 'anyscale', anyscale = 'anyscale',
fireworks: 'fireworks', fireworks = 'fireworks',
ollama: 'ollama', ollama = 'ollama',
perplexity: 'perplexity', perplexity = 'perplexity',
'together.ai': 'together.ai', 'together.ai' = 'together.ai',
} as const; }
export enum FetchTokenConfig {
openrouter = KnownEndpoints.openrouter,
}
export const defaultEndpoints: EModelEndpoint[] = [ export const defaultEndpoints: EModelEndpoint[] = [
EModelEndpoint.openAI, EModelEndpoint.openAI,