mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 00:40:14 +01:00
* wip: first pass for azure endpoint schema * refactor: azure config to return groupMap and modelConfigMap * wip: naming and schema changes * refactor(errorsToString): move to data-provider * feat: rename to azureGroups, add additional tests, tests all expected outcomes, return errors * feat(AppService): load Azure groups * refactor(azure): use imported types, write `mapModelToAzureConfig` * refactor: move `extractEnvVariable` to data-provider * refactor(validateAzureGroups): throw on duplicate groups or models; feat(mapModelToAzureConfig): throw if env vars not present, add tests * refactor(AppService): ensure each model is properly configured on startup * refactor: deprecate azureOpenAI environment variables in favor of librechat.yaml config * feat: use helper functions to handle and order enabled/default endpoints; initialize azureOpenAI from config file * refactor: redefine types as well as load azureOpenAI models from config file * chore(ci): fix test description naming * feat(azureOpenAI): use validated model grouping for request authentication * chore: bump data-provider following rebase * chore: bump config file version noting significant changes * feat: add title options and switch azure configs for titling and vision requests * feat: enable azure plugins from config file * fix(ci): pass tests * chore(.env.example): mark `PLUGINS_USE_AZURE` as deprecated * fix(fetchModels): early return if apiKey not passed * chore: fix azure config typing * refactor(mapModelToAzureConfig): return baseURL and headers as well as azureOptions * feat(createLLM): use `azureOpenAIBasePath` * feat(parsers): resolveHeaders * refactor(extractBaseURL): handle invalid input * feat(OpenAIClient): handle headers and baseURL for azureConfig * fix(ci): pass `OpenAIClient` tests * chore: extract env var for azureOpenAI group config, baseURL * docs: azureOpenAI config setup docs * feat: safe check of potential conflicting env vars that map to unique placeholders * fix: reset apiKey when model switches from originally requested model (vision or title) * chore: linting * docs: CONFIG_PATH notes in custom_config.md
232 lines
7.5 KiB
JavaScript
232 lines
7.5 KiB
JavaScript
// gptPlugins/initializeClient.spec.js
|
|
const { EModelEndpoint } = require('librechat-data-provider');
|
|
const { getUserKey } = require('~/server/services/UserService');
|
|
const initializeClient = require('./initializeClient');
|
|
const { PluginsClient } = require('~/app');
|
|
|
|
// Mock getUserKey since it's the only function we want to mock
|
|
jest.mock('~/server/services/UserService', () => ({
|
|
getUserKey: jest.fn(),
|
|
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
|
}));
|
|
|
|
describe('gptPlugins/initializeClient', () => {
|
|
// Set up environment variables
|
|
const originalEnvironment = process.env;
|
|
const app = {
|
|
locals: {},
|
|
};
|
|
|
|
beforeEach(() => {
|
|
jest.resetModules(); // Clears the cache
|
|
process.env = { ...originalEnvironment }; // Make a copy
|
|
});
|
|
|
|
afterAll(() => {
|
|
process.env = originalEnvironment; // Restore original env vars
|
|
});
|
|
|
|
test('should initialize PluginsClient with OpenAI API key and default options', async () => {
|
|
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
|
process.env.PLUGINS_USE_AZURE = 'false';
|
|
process.env.DEBUG_PLUGINS = 'false';
|
|
process.env.OPENAI_SUMMARIZE = 'false';
|
|
|
|
const req = {
|
|
body: { key: null },
|
|
user: { id: '123' },
|
|
app,
|
|
};
|
|
const res = {};
|
|
const endpointOption = { modelOptions: { model: 'default-model' } };
|
|
|
|
const { client, openAIApiKey } = await initializeClient({ req, res, endpointOption });
|
|
|
|
expect(openAIApiKey).toBe('test-openai-api-key');
|
|
expect(client).toBeInstanceOf(PluginsClient);
|
|
});
|
|
|
|
test('should initialize PluginsClient with Azure credentials when PLUGINS_USE_AZURE is true', async () => {
|
|
process.env.AZURE_API_KEY = 'test-azure-api-key';
|
|
(process.env.AZURE_OPENAI_API_INSTANCE_NAME = 'some-value'),
|
|
(process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME = 'some-value'),
|
|
(process.env.AZURE_OPENAI_API_VERSION = 'some-value'),
|
|
(process.env.AZURE_OPENAI_API_COMPLETIONS_DEPLOYMENT_NAME = 'some-value'),
|
|
(process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME = 'some-value'),
|
|
(process.env.PLUGINS_USE_AZURE = 'true');
|
|
process.env.DEBUG_PLUGINS = 'false';
|
|
process.env.OPENAI_SUMMARIZE = 'false';
|
|
|
|
const req = {
|
|
body: { key: null },
|
|
user: { id: '123' },
|
|
app,
|
|
};
|
|
const res = {};
|
|
const endpointOption = { modelOptions: { model: 'test-model' } };
|
|
|
|
const { client, azure } = await initializeClient({ req, res, endpointOption });
|
|
|
|
expect(azure.azureOpenAIApiKey).toBe('test-azure-api-key');
|
|
expect(client).toBeInstanceOf(PluginsClient);
|
|
});
|
|
|
|
test('should use the debug option when DEBUG_PLUGINS is enabled', async () => {
|
|
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
|
process.env.DEBUG_PLUGINS = 'true';
|
|
|
|
const req = {
|
|
body: { key: null },
|
|
user: { id: '123' },
|
|
app,
|
|
};
|
|
const res = {};
|
|
const endpointOption = { modelOptions: { model: 'default-model' } };
|
|
|
|
const { client } = await initializeClient({ req, res, endpointOption });
|
|
|
|
expect(client.options.debug).toBe(true);
|
|
});
|
|
|
|
test('should set contextStrategy to summarize when OPENAI_SUMMARIZE is enabled', async () => {
|
|
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
|
process.env.OPENAI_SUMMARIZE = 'true';
|
|
|
|
const req = {
|
|
body: { key: null },
|
|
user: { id: '123' },
|
|
app,
|
|
};
|
|
const res = {};
|
|
const endpointOption = { modelOptions: { model: 'default-model' } };
|
|
|
|
const { client } = await initializeClient({ req, res, endpointOption });
|
|
|
|
expect(client.options.contextStrategy).toBe('summarize');
|
|
});
|
|
|
|
// ... additional tests for reverseProxyUrl, proxy, user-provided keys, etc.
|
|
|
|
test('should throw an error if no API keys are provided in the environment', async () => {
|
|
// Clear the environment variables for API keys
|
|
delete process.env.OPENAI_API_KEY;
|
|
delete process.env.AZURE_API_KEY;
|
|
|
|
const req = {
|
|
body: { key: null },
|
|
user: { id: '123' },
|
|
app,
|
|
};
|
|
const res = {};
|
|
const endpointOption = { modelOptions: { model: 'default-model' } };
|
|
|
|
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
|
`${EModelEndpoint.openAI} API key not provided.`,
|
|
);
|
|
});
|
|
|
|
// Additional tests for gptPlugins/initializeClient.spec.js
|
|
|
|
// ... (previous test setup code)
|
|
|
|
test('should handle user-provided OpenAI keys and check expiry', async () => {
|
|
process.env.OPENAI_API_KEY = 'user_provided';
|
|
process.env.PLUGINS_USE_AZURE = 'false';
|
|
|
|
const futureDate = new Date(Date.now() + 10000).toISOString();
|
|
const req = {
|
|
body: { key: futureDate },
|
|
user: { id: '123' },
|
|
app,
|
|
};
|
|
const res = {};
|
|
const endpointOption = { modelOptions: { model: 'default-model' } };
|
|
|
|
getUserKey.mockResolvedValue('test-user-provided-openai-api-key');
|
|
|
|
const { openAIApiKey } = await initializeClient({ req, res, endpointOption });
|
|
|
|
expect(openAIApiKey).toBe('test-user-provided-openai-api-key');
|
|
});
|
|
|
|
test('should handle user-provided Azure keys and check expiry', async () => {
|
|
process.env.AZURE_API_KEY = 'user_provided';
|
|
process.env.PLUGINS_USE_AZURE = 'true';
|
|
|
|
const futureDate = new Date(Date.now() + 10000).toISOString();
|
|
const req = {
|
|
body: { key: futureDate },
|
|
user: { id: '123' },
|
|
app,
|
|
};
|
|
const res = {};
|
|
const endpointOption = { modelOptions: { model: 'test-model' } };
|
|
|
|
getUserKey.mockResolvedValue(
|
|
JSON.stringify({
|
|
azureOpenAIApiKey: 'test-user-provided-azure-api-key',
|
|
azureOpenAIApiDeploymentName: 'test-deployment',
|
|
}),
|
|
);
|
|
|
|
const { azure } = await initializeClient({ req, res, endpointOption });
|
|
|
|
expect(azure.azureOpenAIApiKey).toBe('test-user-provided-azure-api-key');
|
|
});
|
|
|
|
test('should throw an error if the user-provided key has expired', async () => {
|
|
process.env.OPENAI_API_KEY = 'user_provided';
|
|
process.env.PLUGINS_USE_AZURE = 'FALSE';
|
|
const expiresAt = new Date(Date.now() - 10000).toISOString(); // Expired
|
|
const req = {
|
|
body: { key: expiresAt },
|
|
user: { id: '123' },
|
|
app,
|
|
};
|
|
const res = {};
|
|
const endpointOption = { modelOptions: { model: 'default-model' } };
|
|
|
|
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
|
/Your OpenAI API key has expired/,
|
|
);
|
|
});
|
|
|
|
test('should throw an error if the user-provided Azure key is invalid JSON', async () => {
|
|
process.env.AZURE_API_KEY = 'user_provided';
|
|
process.env.PLUGINS_USE_AZURE = 'true';
|
|
|
|
const req = {
|
|
body: { key: new Date(Date.now() + 10000).toISOString() },
|
|
user: { id: '123' },
|
|
app,
|
|
};
|
|
const res = {};
|
|
const endpointOption = { modelOptions: { model: 'default-model' } };
|
|
|
|
// Simulate an invalid JSON string returned from getUserKey
|
|
getUserKey.mockResolvedValue('invalid-json');
|
|
|
|
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
|
|
/Unexpected token/,
|
|
);
|
|
});
|
|
|
|
test('should correctly handle the presence of a reverse proxy', async () => {
|
|
process.env.OPENAI_REVERSE_PROXY = 'http://reverse.proxy';
|
|
process.env.PROXY = 'http://proxy';
|
|
process.env.OPENAI_API_KEY = 'test-openai-api-key';
|
|
|
|
const req = {
|
|
body: { key: null },
|
|
user: { id: '123' },
|
|
app,
|
|
};
|
|
const res = {};
|
|
const endpointOption = { modelOptions: { model: 'default-model' } };
|
|
|
|
const { client } = await initializeClient({ req, res, endpointOption });
|
|
|
|
expect(client.options.reverseProxyUrl).toBe('http://reverse.proxy');
|
|
expect(client.options.proxy).toBe('http://proxy');
|
|
});
|
|
});
|