feat(OpenAIClient): AZURE_USE_MODEL_AS_DEPLOYMENT_NAME, AZURE_OPENAI_DEFAULT_MODEL (#1165)

* feat(OpenAIClient): AZURE_USE_MODEL_AS_DEPLOYMENT_NAME, AZURE_OPENAI_DEFAULT_MODEL

* ci: fix initializeClient test
This commit is contained in:
Danny Avila 2023-11-10 09:58:17 -05:00 committed by GitHub
parent 9d100ec0fc
commit d5259e1525
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 242 additions and 60 deletions

View file

@ -1,6 +1,60 @@
const { ChatOpenAI } = require('langchain/chat_models/openai');
const { sanitizeModelName } = require('../../../utils');
const { isEnabled } = require('../../../server/utils');
/**
* @typedef {Object} ModelOptions
* @property {string} modelName - The name of the model.
* @property {number} [temperature] - The temperature setting for the model.
* @property {number} [presence_penalty] - The presence penalty setting.
* @property {number} [frequency_penalty] - The frequency penalty setting.
* @property {number} [max_tokens] - The maximum number of tokens to generate.
*/
/**
* @typedef {Object} ConfigOptions
* @property {string} [basePath] - The base path for the API requests.
* @property {Object} [baseOptions] - Base options for the API requests, including headers.
* @property {Object} [httpAgent] - The HTTP agent for the request.
* @property {Object} [httpsAgent] - The HTTPS agent for the request.
*/
/**
* @typedef {Object} Callbacks
* @property {Function} [handleChatModelStart] - A callback function for handleChatModelStart
* @property {Function} [handleLLMEnd] - A callback function for handleLLMEnd
* @property {Function} [handleLLMError] - A callback function for handleLLMError
*/
/**
* @typedef {Object} AzureOptions
* @property {string} [azureOpenAIApiKey] - The Azure OpenAI API key.
* @property {string} [azureOpenAIApiInstanceName] - The Azure OpenAI API instance name.
* @property {string} [azureOpenAIApiDeploymentName] - The Azure OpenAI API deployment name.
* @property {string} [azureOpenAIApiVersion] - The Azure OpenAI API version.
*/
/**
* Creates a new instance of a language model (LLM) for chat interactions.
*
* @param {Object} options - The options for creating the LLM.
* @param {ModelOptions} options.modelOptions - The options specific to the model, including modelName, temperature, presence_penalty, frequency_penalty, and other model-related settings.
* @param {ConfigOptions} options.configOptions - Configuration options for the API requests, including proxy settings and custom headers.
* @param {Callbacks} options.callbacks - Callback functions for managing the lifecycle of the LLM, including token buffers, context, and initial message count.
* @param {boolean} [options.streaming=false] - Determines if the LLM should operate in streaming mode.
* @param {string} options.openAIApiKey - The API key for OpenAI, used for authentication.
* @param {AzureOptions} [options.azure={}] - Optional Azure-specific configurations. If provided, Azure configurations take precedence over OpenAI configurations.
*
* @returns {ChatOpenAI} An instance of the ChatOpenAI class, configured with the provided options.
*
* @example
* const llm = createLLM({
* modelOptions: { modelName: 'gpt-3.5-turbo', temperature: 0.2 },
* configOptions: { basePath: 'https://example.api/path' },
* callbacks: { onMessage: handleMessage },
* openAIApiKey: 'your-api-key'
* });
*/
function createLLM({
modelOptions,
configOptions,
@ -16,10 +70,19 @@ function createLLM({
let azureOptions = {};
if (azure) {
const useModelName = isEnabled(process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME);
credentials = {};
configuration = {};
azureOptions = azure;
azureOptions.azureOpenAIApiDeploymentName = sanitizeModelName(modelOptions.modelName);
azureOptions.azureOpenAIApiDeploymentName = useModelName
? sanitizeModelName(modelOptions.modelName)
: azureOptions.azureOpenAIApiDeploymentName;
}
if (azure && process.env.AZURE_OPENAI_DEFAULT_MODEL) {
modelOptions.modelName = process.env.AZURE_OPENAI_DEFAULT_MODEL;
}
// console.debug('createLLM: configOptions');