feat(azureOpenAI): Allow Switching Deployment Name by Model Name (#1137)

* feat(azureOpenAI): allow switching deployment name by model name

* ci: add unit tests and throw error on no api key provided to avoid API call

* fix(gptPlugins/initializeClient): check if azure is enabled; ci: add unit tests for gptPlugins/initializeClient

* fix(ci): fix expected error message for partial regex match:  unexpected token
This commit is contained in:
Danny Avila 2023-11-04 15:03:31 -04:00 committed by GitHub
parent a7b5639da1
commit 0886441461
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 562 additions and 38 deletions

View file

@ -517,6 +517,9 @@ If your reverse proxy is compatible to OpenAI specs in every other way, it may s
console.log('There was an issue generating title with LangChain, trying the old method...');
this.options.debug && console.error(e.message, e);
modelOptions.model = OPENAI_TITLE_MODEL ?? 'gpt-3.5-turbo';
if (this.azure) {
this.azureEndpoint = genAzureChatCompletion(this.azure, modelOptions.model);
}
const instructionsPayload = [
{
role: 'system',

View file

@ -1,4 +1,5 @@
const { ChatOpenAI } = require('langchain/chat_models/openai');
const { sanitizeModelName } = require('../../../utils');
function createLLM({
modelOptions,
@ -13,9 +14,12 @@ function createLLM({
apiKey: openAIApiKey,
};
let azureOptions = {};
if (azure) {
credentials = {};
configuration = {};
azureOptions = azure;
azureOptions.azureOpenAIApiDeploymentName = sanitizeModelName(modelOptions.modelName);
}
// console.debug('createLLM: configOptions');
@ -27,7 +31,7 @@ function createLLM({
verbose: true,
credentials,
configuration,
...azure,
...azureOptions,
...modelOptions,
callbacks,
},