feat(azureOpenAI): Allow Switching Deployment Name by Model Name (#1137)

* feat(azureOpenAI): allow switching deployment name by model name

* ci: add unit tests and throw error on no api key provided to avoid API call

* fix(gptPlugins/initializeClient): check if azure is enabled; ci: add unit tests for gptPlugins/initializeClient

* fix(ci): fix expected error message for partial regex match:  unexpected token
This commit is contained in:
Danny Avila 2023-11-04 15:03:31 -04:00 committed by GitHub
parent a7b5639da1
commit 0886441461
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 562 additions and 38 deletions

View file

@ -517,6 +517,9 @@ If your reverse proxy is compatible to OpenAI specs in every other way, it may s
console.log('There was an issue generating title with LangChain, trying the old method...');
this.options.debug && console.error(e.message, e);
modelOptions.model = OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
if (this.azure) {
this.azureEndpoint = genAzureChatCompletion(this.azure, modelOptions.model);
}
const instructionsPayload = [
{
role: 'system',

View file

@ -1,4 +1,5 @@
const { ChatOpenAI } = require('langchain/chat_models/openai');
const { sanitizeModelName } = require('../../../utils');
function createLLM({
modelOptions,
@ -13,9 +14,12 @@ function createLLM({
apiKey: openAIApiKey,
};
let azureOptions = {};
if (azure) {
credentials = {};
configuration = {};
azureOptions = azure;
azureOptions.azureOpenAIApiDeploymentName = sanitizeModelName(modelOptions.modelName);
}
// console.debug('createLLM: configOptions');
@ -27,7 +31,7 @@ function createLLM({
verbose: true,
credentials,
configuration,
...azure,
...azureOptions,
...modelOptions,
callbacks,
},

View file

@ -1,6 +1,6 @@
const { PluginsClient } = require('../../../../app');
const { isEnabled } = require('../../../utils');
const { getAzureCredentials } = require('../../../../utils');
const { getAzureCredentials, sanitizeModelName } = require('../../../../utils');
const { getUserKey, checkUserKeyExpiry } = require('../../../services/UserService');
const initializeClient = async ({ req, res, endpointOption }) => {
@ -25,38 +25,43 @@ const initializeClient = async ({ req, res, endpointOption }) => {
...endpointOption,
};
const isUserProvided = PLUGINS_USE_AZURE
const useAzure = isEnabled(PLUGINS_USE_AZURE);
const isUserProvided = useAzure
? AZURE_API_KEY === 'user_provided'
: OPENAI_API_KEY === 'user_provided';
let key = null;
let userKey = null;
if (expiresAt && isUserProvided) {
checkUserKeyExpiry(
expiresAt,
'Your OpenAI API key has expired. Please provide your API key again.',
);
key = await getUserKey({
userKey = await getUserKey({
userId: req.user.id,
name: PLUGINS_USE_AZURE ? 'azureOpenAI' : 'openAI',
name: useAzure ? 'azureOpenAI' : 'openAI',
});
}
let openAIApiKey = isUserProvided ? key : OPENAI_API_KEY;
let apiKey = isUserProvided ? userKey : OPENAI_API_KEY;
if (PLUGINS_USE_AZURE) {
clientOptions.azure = isUserProvided ? JSON.parse(key) : getAzureCredentials();
openAIApiKey = clientOptions.azure.azureOpenAIApiKey;
if (useAzure || (apiKey && apiKey.includes('azure') && !clientOptions.azure)) {
clientOptions.azure = isUserProvided ? JSON.parse(userKey) : getAzureCredentials();
clientOptions.azure.azureOpenAIApiDeploymentName = sanitizeModelName(
clientOptions.modelOptions.model,
);
apiKey = clientOptions.azure.azureOpenAIApiKey;
}
if (openAIApiKey && openAIApiKey.includes('azure') && !clientOptions.azure) {
clientOptions.azure = isUserProvided ? JSON.parse(key) : getAzureCredentials();
openAIApiKey = clientOptions.azure.azureOpenAIApiKey;
if (!apiKey) {
throw new Error('API key not provided.');
}
const client = new PluginsClient(openAIApiKey, clientOptions);
const client = new PluginsClient(apiKey, clientOptions);
return {
client,
azure: clientOptions.azure,
openAIApiKey,
openAIApiKey: apiKey,
};
};

View file

@ -0,0 +1,238 @@
// gptPlugins/initializeClient.spec.js
const initializeClient = require('./initializeClient');
const { PluginsClient } = require('../../../../app');
const { getUserKey } = require('../../../services/UserService');
// Mock getUserKey since it's the only function we want to mock
jest.mock('../../../services/UserService', () => ({
getUserKey: jest.fn(),
checkUserKeyExpiry: jest.requireActual('../../../services/UserService').checkUserKeyExpiry,
}));
describe('gptPlugins/initializeClient', () => {
// Set up environment variables
const originalEnvironment = process.env;
beforeEach(() => {
jest.resetModules(); // Clears the cache
process.env = { ...originalEnvironment }; // Make a copy
});
afterAll(() => {
process.env = originalEnvironment; // Restore original env vars
});
test('should initialize PluginsClient with OpenAI API key and default options', async () => {
process.env.OPENAI_API_KEY = 'test-openai-api-key';
process.env.PLUGINS_USE_AZURE = 'false';
process.env.DEBUG_PLUGINS = 'false';
process.env.OPENAI_SUMMARIZE = 'false';
const req = {
body: { key: null },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'default-model' } };
const { client, openAIApiKey } = await initializeClient({ req, res, endpointOption });
expect(openAIApiKey).toBe('test-openai-api-key');
expect(client).toBeInstanceOf(PluginsClient);
});
test('should initialize PluginsClient with Azure credentials when PLUGINS_USE_AZURE is true', async () => {
process.env.AZURE_API_KEY = 'test-azure-api-key';
process.env.PLUGINS_USE_AZURE = 'true';
process.env.DEBUG_PLUGINS = 'false';
process.env.OPENAI_SUMMARIZE = 'false';
const req = {
body: { key: null },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'test-model' } };
const { client, azure } = await initializeClient({ req, res, endpointOption });
expect(azure.azureOpenAIApiKey).toBe('test-azure-api-key');
expect(client).toBeInstanceOf(PluginsClient);
});
test('should use the debug option when DEBUG_PLUGINS is enabled', async () => {
process.env.OPENAI_API_KEY = 'test-openai-api-key';
process.env.DEBUG_PLUGINS = 'true';
const req = {
body: { key: null },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'default-model' } };
const { client } = await initializeClient({ req, res, endpointOption });
expect(client.options.debug).toBe(true);
});
test('should set contextStrategy to summarize when OPENAI_SUMMARIZE is enabled', async () => {
process.env.OPENAI_API_KEY = 'test-openai-api-key';
process.env.OPENAI_SUMMARIZE = 'true';
const req = {
body: { key: null },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'default-model' } };
const { client } = await initializeClient({ req, res, endpointOption });
expect(client.options.contextStrategy).toBe('summarize');
});
// ... additional tests for reverseProxyUrl, proxy, user-provided keys, etc.
test('should throw an error if no API keys are provided in the environment', async () => {
// Clear the environment variables for API keys
delete process.env.OPENAI_API_KEY;
delete process.env.AZURE_API_KEY;
const req = {
body: { key: null },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'default-model' } };
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
'API key not provided.',
);
});
// Additional tests for gptPlugins/initializeClient.spec.js
// ... (previous test setup code)
test('should handle user-provided OpenAI keys and check expiry', async () => {
process.env.OPENAI_API_KEY = 'user_provided';
process.env.PLUGINS_USE_AZURE = 'false';
const futureDate = new Date(Date.now() + 10000).toISOString();
const req = {
body: { key: futureDate },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'default-model' } };
getUserKey.mockResolvedValue('test-user-provided-openai-api-key');
const { openAIApiKey } = await initializeClient({ req, res, endpointOption });
expect(openAIApiKey).toBe('test-user-provided-openai-api-key');
});
test('should handle user-provided Azure keys and check expiry', async () => {
process.env.AZURE_API_KEY = 'user_provided';
process.env.PLUGINS_USE_AZURE = 'true';
const futureDate = new Date(Date.now() + 10000).toISOString();
const req = {
body: { key: futureDate },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'test-model' } };
getUserKey.mockResolvedValue(
JSON.stringify({
azureOpenAIApiKey: 'test-user-provided-azure-api-key',
azureOpenAIApiDeploymentName: 'test-deployment',
}),
);
const { azure } = await initializeClient({ req, res, endpointOption });
expect(azure.azureOpenAIApiKey).toBe('test-user-provided-azure-api-key');
});
test('should throw an error if the user-provided key has expired', async () => {
process.env.OPENAI_API_KEY = 'user_provided';
process.env.PLUGINS_USE_AZURE = 'FALSE';
const expiresAt = new Date(Date.now() - 10000).toISOString(); // Expired
const req = {
body: { key: expiresAt },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'default-model' } };
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
/Your OpenAI API key has expired/,
);
});
test('should sanitize model name for Azure when modelOptions is provided', async () => {
process.env.AZURE_API_KEY = 'azure-provided-api-key';
process.env.PLUGINS_USE_AZURE = 'true';
const modelName = 'test-3.5-model';
const sanitizedModelName = 'test-35-model';
const req = {
body: { key: new Date(Date.now() + 10000).toISOString() },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: modelName } };
getUserKey.mockResolvedValue(
JSON.stringify({
azureOpenAIApiKey: 'test-user-provided-azure-api-key',
azureOpenAIApiDeploymentName: modelName,
}),
);
const { azure } = await initializeClient({ req, res, endpointOption });
expect(azure.azureOpenAIApiDeploymentName).toBe(sanitizedModelName);
});
test('should throw an error if the user-provided Azure key is invalid JSON', async () => {
process.env.AZURE_API_KEY = 'user_provided';
process.env.PLUGINS_USE_AZURE = 'true';
const req = {
body: { key: new Date(Date.now() + 10000).toISOString() },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'default-model' } };
// Simulate an invalid JSON string returned from getUserKey
getUserKey.mockResolvedValue('invalid-json');
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
/Unexpected token/,
);
});
test('should correctly handle the presence of a reverse proxy', async () => {
process.env.OPENAI_REVERSE_PROXY = 'http://reverse.proxy';
process.env.PROXY = 'http://proxy';
process.env.OPENAI_API_KEY = 'test-openai-api-key';
const req = {
body: { key: null },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'default-model' } };
const { client } = await initializeClient({ req, res, endpointOption });
expect(client.options.reverseProxyUrl).toBe('http://reverse.proxy');
expect(client.options.proxy).toBe('http://proxy');
});
});

View file

@ -1,6 +1,6 @@
const { OpenAIClient } = require('../../../../app');
const { isEnabled } = require('../../../utils');
const { getAzureCredentials } = require('../../../../utils');
const { getAzureCredentials, sanitizeModelName } = require('../../../../utils');
const { getUserKey, checkUserKeyExpiry } = require('../../../services/UserService');
const initializeClient = async ({ req, res, endpointOption }) => {
@ -24,29 +24,40 @@ const initializeClient = async ({ req, res, endpointOption }) => {
...endpointOption,
};
const isUserProvided =
endpoint === 'openAI' ? OPENAI_API_KEY === 'user_provided' : AZURE_API_KEY === 'user_provided';
const credentials = {
openAI: OPENAI_API_KEY,
azureOpenAI: AZURE_API_KEY,
};
let key = null;
const isUserProvided = credentials[endpoint] === 'user_provided';
let userKey = null;
if (expiresAt && isUserProvided) {
checkUserKeyExpiry(
expiresAt,
'Your OpenAI API key has expired. Please provide your API key again.',
);
key = await getUserKey({ userId: req.user.id, name: endpoint });
userKey = await getUserKey({ userId: req.user.id, name: endpoint });
}
let openAIApiKey = isUserProvided ? key : OPENAI_API_KEY;
let apiKey = isUserProvided ? userKey : credentials[endpoint];
if (process.env.AZURE_API_KEY && endpoint === 'azureOpenAI') {
clientOptions.azure = isUserProvided ? JSON.parse(key) : getAzureCredentials();
openAIApiKey = clientOptions.azure.azureOpenAIApiKey;
if (endpoint === 'azureOpenAI') {
clientOptions.azure = isUserProvided ? JSON.parse(userKey) : getAzureCredentials();
clientOptions.azure.azureOpenAIApiDeploymentName = sanitizeModelName(
clientOptions.modelOptions.model,
);
apiKey = clientOptions.azure.azureOpenAIApiKey;
}
const client = new OpenAIClient(openAIApiKey, clientOptions);
if (!apiKey) {
throw new Error('API key not provided.');
}
const client = new OpenAIClient(apiKey, clientOptions);
return {
client,
openAIApiKey,
openAIApiKey: apiKey,
};
};

View file

@ -0,0 +1,211 @@
const initializeClient = require('./initializeClient');
const { OpenAIClient } = require('../../../../app');
const { getUserKey } = require('../../../services/UserService');
// Mock getUserKey since it's the only function we want to mock
jest.mock('../../../services/UserService', () => ({
getUserKey: jest.fn(),
checkUserKeyExpiry: jest.requireActual('../../../services/UserService').checkUserKeyExpiry,
}));
describe('initializeClient', () => {
// Set up environment variables
const originalEnvironment = process.env;
beforeEach(() => {
jest.resetModules(); // Clears the cache
process.env = { ...originalEnvironment }; // Make a copy
});
afterAll(() => {
process.env = originalEnvironment; // Restore original env vars
});
test('should initialize client with OpenAI API key and default options', async () => {
process.env.OPENAI_API_KEY = 'test-openai-api-key';
process.env.DEBUG_OPENAI = 'false';
process.env.OPENAI_SUMMARIZE = 'false';
const req = {
body: { key: null, endpoint: 'openAI' },
user: { id: '123' },
};
const res = {};
const endpointOption = {};
const client = await initializeClient({ req, res, endpointOption });
expect(client.openAIApiKey).toBe('test-openai-api-key');
expect(client.client).toBeInstanceOf(OpenAIClient);
});
test('should initialize client with Azure credentials when endpoint is azureOpenAI', async () => {
process.env.AZURE_API_KEY = 'test-azure-api-key';
process.env.OPENAI_API_KEY = 'test-openai-api-key';
process.env.DEBUG_OPENAI = 'false';
process.env.OPENAI_SUMMARIZE = 'false';
const req = {
body: { key: null, endpoint: 'azureOpenAI' },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: 'test-model' } };
const client = await initializeClient({ req, res, endpointOption });
expect(client.openAIApiKey).toBe('test-azure-api-key');
expect(client.client).toBeInstanceOf(OpenAIClient);
});
test('should use the debug option when DEBUG_OPENAI is enabled', async () => {
process.env.OPENAI_API_KEY = 'test-openai-api-key';
process.env.DEBUG_OPENAI = 'true';
const req = {
body: { key: null, endpoint: 'openAI' },
user: { id: '123' },
};
const res = {};
const endpointOption = {};
const client = await initializeClient({ req, res, endpointOption });
expect(client.client.options.debug).toBe(true);
});
test('should set contextStrategy to summarize when OPENAI_SUMMARIZE is enabled', async () => {
process.env.OPENAI_API_KEY = 'test-openai-api-key';
process.env.OPENAI_SUMMARIZE = 'true';
const req = {
body: { key: null, endpoint: 'openAI' },
user: { id: '123' },
};
const res = {};
const endpointOption = {};
const client = await initializeClient({ req, res, endpointOption });
expect(client.client.options.contextStrategy).toBe('summarize');
});
test('should set reverseProxyUrl and proxy when they are provided in the environment', async () => {
process.env.OPENAI_API_KEY = 'test-openai-api-key';
process.env.OPENAI_REVERSE_PROXY = 'http://reverse.proxy';
process.env.PROXY = 'http://proxy';
const req = {
body: { key: null, endpoint: 'openAI' },
user: { id: '123' },
};
const res = {};
const endpointOption = {};
const client = await initializeClient({ req, res, endpointOption });
expect(client.client.options.reverseProxyUrl).toBe('http://reverse.proxy');
expect(client.client.options.proxy).toBe('http://proxy');
});
test('should throw an error if the user-provided key has expired', async () => {
process.env.OPENAI_API_KEY = 'user_provided';
process.env.AZURE_API_KEY = 'user_provided';
process.env.DEBUG_OPENAI = 'false';
process.env.OPENAI_SUMMARIZE = 'false';
const expiresAt = new Date(Date.now() - 10000).toISOString(); // Expired
const req = {
body: { key: expiresAt, endpoint: 'openAI' },
user: { id: '123' },
};
const res = {};
const endpointOption = {};
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
'Your OpenAI API key has expired. Please provide your API key again.',
);
});
test('should throw an error if no API keys are provided in the environment', async () => {
// Clear the environment variables for API keys
delete process.env.OPENAI_API_KEY;
delete process.env.AZURE_API_KEY;
const req = {
body: { key: null, endpoint: 'openAI' },
user: { id: '123' },
};
const res = {};
const endpointOption = {};
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
'API key not provided.',
);
});
it('should handle user-provided keys and check expiry', async () => {
// Set up the req.body to simulate user-provided key scenario
const req = {
body: {
key: new Date(Date.now() + 10000).toISOString(),
endpoint: 'openAI',
},
user: {
id: '123',
},
};
const res = {};
const endpointOption = {};
// Ensure the environment variable is set to 'user_provided' to match the isUserProvided condition
process.env.OPENAI_API_KEY = 'user_provided';
// Mock getUserKey to return the expected key
getUserKey.mockResolvedValue('test-user-provided-openai-api-key');
// Call the initializeClient function
const result = await initializeClient({ req, res, endpointOption });
// Assertions
expect(result.openAIApiKey).toBe('test-user-provided-openai-api-key');
});
test('should throw an error if the user-provided key is invalid', async () => {
const invalidKey = new Date(Date.now() - 100000).toISOString();
const req = {
body: { key: invalidKey, endpoint: 'openAI' },
user: { id: '123' },
};
const res = {};
const endpointOption = {};
// Ensure the environment variable is set to 'user_provided' to match the isUserProvided condition
process.env.OPENAI_API_KEY = 'user_provided';
// Mock getUserKey to return an invalid key
getUserKey.mockResolvedValue(invalidKey);
await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow(
/Your OpenAI API key has expired/,
);
});
test('should sanitize model name for Azure when modelOptions is provided', async () => {
const modelName = 'test-3.5-model';
const sanitizedModelName = 'test-35-model';
const req = {
body: { key: new Date(Date.now() + 10000).toISOString(), endpoint: 'azureOpenAI' },
user: { id: '123' },
};
const res = {};
const endpointOption = { modelOptions: { model: modelName } };
process.env.AZURE_API_KEY = 'azure-provided-api-key';
getUserKey.mockResolvedValue('test-user-provided-openai-api-key');
const result = await initializeClient({ req, res, endpointOption });
expect(result.client.options.azure.azureOpenAIApiDeploymentName).toBe(sanitizedModelName);
});
});

View file

@ -16,6 +16,7 @@ const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _model
let models = _models.slice() ?? [];
let apiKey = openAIApiKey;
let basePath = 'https://api.openai.com/v1';
let reverseProxyUrl = OPENAI_REVERSE_PROXY;
if (opts.azure) {
return models;
// const azure = getAzureCredentials();
@ -23,11 +24,7 @@ const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _model
// .split('/deployments')[0]
// .concat(`/models?api-version=${azure.azureOpenAIApiVersion}`);
// apiKey = azureOpenAIApiKey;
}
let reverseProxyUrl = OPENAI_REVERSE_PROXY;
if (OPENROUTER_API_KEY) {
} else if (OPENROUTER_API_KEY) {
reverseProxyUrl = 'https://openrouter.ai/api/v1';
apiKey = OPENROUTER_API_KEY;
}

View file

@ -1,15 +1,65 @@
/**
* @typedef {Object} AzureCredentials
* @property {string} azureOpenAIApiKey - The Azure OpenAI API key.
* @property {string} azureOpenAIApiInstanceName - The Azure OpenAI API instance name.
* @property {string} azureOpenAIApiDeploymentName - The Azure OpenAI API deployment name.
* @property {string} azureOpenAIApiVersion - The Azure OpenAI API version.
*/
/**
* Sanitizes the model name to be used in the URL by removing or replacing disallowed characters.
* @param {string} modelName - The model name to be sanitized.
* @returns {string} The sanitized model name.
*/
const sanitizeModelName = (modelName) => {
// Replace periods with empty strings and other disallowed characters as needed
return modelName.replace(/\./g, '');
};
/**
* Generates the Azure OpenAI API endpoint URL.
* @param {Object} params - The parameters object.
* @param {string} params.azureOpenAIApiInstanceName - The Azure OpenAI API instance name.
* @param {string} params.azureOpenAIApiDeploymentName - The Azure OpenAI API deployment name.
* @returns {string} The complete endpoint URL for the Azure OpenAI API.
*/
const genAzureEndpoint = ({ azureOpenAIApiInstanceName, azureOpenAIApiDeploymentName }) => {
return `https://${azureOpenAIApiInstanceName}.openai.azure.com/openai/deployments/${azureOpenAIApiDeploymentName}`;
};
const genAzureChatCompletion = ({
azureOpenAIApiInstanceName,
azureOpenAIApiDeploymentName,
azureOpenAIApiVersion,
}) => {
return `https://${azureOpenAIApiInstanceName}.openai.azure.com/openai/deployments/${azureOpenAIApiDeploymentName}/chat/completions?api-version=${azureOpenAIApiVersion}`;
/**
* Generates the Azure OpenAI API chat completion endpoint URL with the API version.
* If both deploymentName and modelName are provided, modelName takes precedence.
* @param {Object} AzureConfig - The Azure configuration object.
* @param {string} AzureConfig.azureOpenAIApiInstanceName - The Azure OpenAI API instance name.
* @param {string} [AzureConfig.azureOpenAIApiDeploymentName] - The Azure OpenAI API deployment name (optional).
* @param {string} AzureConfig.azureOpenAIApiVersion - The Azure OpenAI API version.
* @param {string} [modelName] - The model name to be included in the deployment name (optional).
* @returns {string} The complete chat completion endpoint URL for the Azure OpenAI API.
* @throws {Error} If neither azureOpenAIApiDeploymentName nor modelName is provided.
*/
const genAzureChatCompletion = (
{ azureOpenAIApiInstanceName, azureOpenAIApiDeploymentName, azureOpenAIApiVersion },
modelName,
) => {
// Determine the deployment segment of the URL based on provided modelName or azureOpenAIApiDeploymentName
let deploymentSegment;
if (modelName) {
const sanitizedModelName = sanitizeModelName(modelName);
deploymentSegment = `${sanitizedModelName}`;
} else if (azureOpenAIApiDeploymentName) {
deploymentSegment = azureOpenAIApiDeploymentName;
} else {
throw new Error('Either a model name or a deployment name must be provided.');
}
return `https://${azureOpenAIApiInstanceName}.openai.azure.com/openai/deployments/${deploymentSegment}/chat/completions?api-version=${azureOpenAIApiVersion}`;
};
/**
* Retrieves the Azure OpenAI API credentials from environment variables.
* @returns {AzureCredentials} An object containing the Azure OpenAI API credentials.
*/
const getAzureCredentials = () => {
return {
azureOpenAIApiKey: process.env.AZURE_API_KEY ?? process.env.AZURE_OPENAI_API_KEY,
@ -19,4 +69,9 @@ const getAzureCredentials = () => {
};
};
module.exports = { genAzureEndpoint, genAzureChatCompletion, getAzureCredentials };
module.exports = {
sanitizeModelName,
genAzureEndpoint,
genAzureChatCompletion,
getAzureCredentials,
};