Feat: Add Azure support (#219)

feat(api): add support for Azure OpenAI API

- Add Azure OpenAI API environment variables to .env.example
- Modify chatgpt-client.js to use Azure OpenAI API if environment variables are present
- Modify askOpenAI.js to use arrow function syntax
- Modify handlers.js to add console.log statement for partial variable
This commit is contained in:
Danny Avila 2023-05-09 17:42:55 -04:00 committed by GitHub
parent e2dc994b63
commit 5dd9c11326
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 144 additions and 25 deletions

View file

@ -36,6 +36,23 @@ OPENAI_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-0301,text-davinci-003,gpt-4
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
# OPENAI_REVERSE_PROXY=
##########################
# AZURE Endpoint:
##########################
# To use Azure with this project, set the following variables. These will be used to build the API URL.
# Chat completion:
# `https://{AZURE_OPENAI_API_INSTANCE_NAME}.openai.azure.com/openai/deployments/{AZURE_OPENAI_API_DEPLOYMENT_NAME}/chat/completions?api-version={AZURE_OPENAI_API_VERSION}`;
# You should also consider changing the `OPENAI_MODELS` variable above to the models available in your instance/deployment.
# Note: I've noticed that the Azure API is much faster than the OpenAI API, so the streaming looks almost instantaneous.
# AZURE_OPENAI_API_KEY=
# AZURE_OPENAI_API_INSTANCE_NAME=
# AZURE_OPENAI_API_DEPLOYMENT_NAME=
# AZURE_OPENAI_API_VERSION=
# AZURE_OPENAI_API_COMPLETIONS_DEPLOYMENT_NAME= # Optional, but may be used in future updates
# AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME= # Optional, but may be used in future updates
##########################
# BingAI Endpoint:
##########################

View file

@ -16,7 +16,7 @@ const askBing = async ({
token,
onProgress
}) => {
const { BingAIClient } = await import('@waylaidwanderer/chatgpt-api');
const { BingAIClient } = await import('og-chatgpt-api');
const store = {
store: new KeyvFile({ filename: './data/cache.json' })
};

View file

@ -11,7 +11,7 @@ const browserClient = async ({
abortController,
userId
}) => {
const { ChatGPTBrowserClient } = await import('@waylaidwanderer/chatgpt-api');
const { ChatGPTBrowserClient } = await import('og-chatgpt-api');
const store = {
store: new KeyvFile({ filename: './data/cache.json' })
};

View file

@ -1,6 +1,5 @@
require('dotenv').config();
const { KeyvFile } = require('keyv-file');
// const set = new Set(['gpt-4', 'text-davinci-003', 'gpt-3.5-turbo', 'gpt-3.5-turbo-0301']);
const askClient = async ({
text,
@ -17,15 +16,16 @@ const askClient = async ({
abortController,
userId
}) => {
const ChatGPTClient = (await import('@waylaidwanderer/chatgpt-api')).default;
const { ChatGPTClient } = await import('@waylaidwanderer/chatgpt-api');
const store = {
store: new KeyvFile({ filename: './data/cache.json' })
};
const clientOptions = {
// Warning: This will expose your access token to a third party. Consider the risks before using this.
reverseProxyUrl: process.env.OPENAI_REVERSE_PROXY || null,
const azure = process.env.AZURE_OPENAI_API_KEY ? true : false;
const clientOptions = {
reverseProxyUrl: process.env.OPENAI_REVERSE_PROXY || null,
azure,
modelOptions: {
model: model,
temperature,
@ -33,22 +33,28 @@ const askClient = async ({
presence_penalty,
frequency_penalty
},
chatGptLabel,
promptPrefix,
proxy: process.env.PROXY || null,
debug: false,
user: userId
debug: false
};
const client = new ChatGPTClient(process.env.OPENAI_KEY, clientOptions, store);
let options = { onProgress, abortController };
let apiKey = process.env.OPENAI_KEY;
if (!!parentMessageId && !!conversationId) {
options = { ...options, parentMessageId, conversationId };
if (azure) {
apiKey = process.env.AZURE_OPENAI_API_KEY;
clientOptions.reverseProxyUrl = `https://${process.env.AZURE_OPENAI_API_INSTANCE_NAME}.openai.azure.com/openai/deployments/${process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME}/chat/completions?api-version=${process.env.AZURE_OPENAI_API_VERSION}`;
}
const res = await client.sendMessage(text, options);
const client = new ChatGPTClient(apiKey, clientOptions, store);
const options = {
onProgress,
abortController,
...(parentMessageId && conversationId ? { parentMessageId, conversationId } : {})
};
const res = await client.sendMessage(text, { ...options, userId });
return res;
};

111
api/package-lock.json generated
View file

@ -11,7 +11,7 @@
"dependencies": {
"@dqbd/tiktoken": "^1.0.2",
"@keyv/mongo": "^2.1.8",
"@waylaidwanderer/chatgpt-api": "^1.35.0",
"@waylaidwanderer/chatgpt-api": "github:danny-avila/node-chatgpt-api",
"axios": "^1.3.4",
"bcrypt": "^5.1.0",
"bcryptjs": "^2.4.3",
@ -32,6 +32,7 @@
"meilisearch": "^0.31.1",
"mongoose": "^6.9.0",
"nodemailer": "^6.9.1",
"og-chatgpt-api": "npm:@waylaidwanderer/chatgpt-api@^1.35.0",
"openai": "^3.1.0",
"passport": "^0.6.0",
"passport-facebook": "^3.0.0",
@ -1665,8 +1666,8 @@
},
"node_modules/@waylaidwanderer/chatgpt-api": {
"version": "1.35.0",
"resolved": "https://registry.npmjs.org/@waylaidwanderer/chatgpt-api/-/chatgpt-api-1.35.0.tgz",
"integrity": "sha512-JnFH67m+bnjM8pqhLe69FTT27yk3LYsdbn67VLyy6iE6ei7NO43/MYnJ6QGlfjj5esTa96DwLqf6B0jG3V/miA==",
"resolved": "git+ssh://git@github.com/danny-avila/node-chatgpt-api.git#ea4f4e392a908f86f5c00407c142799b48f8349f",
"license": "MIT",
"dependencies": {
"@dqbd/tiktoken": "^1.0.2",
"@fastify/cors": "^8.2.0",
@ -1677,7 +1678,7 @@
"dotenv": "^16.0.3",
"fastify": "^4.11.0",
"fetch-undici": "^3.0.1",
"https-proxy-agent": "^5.0.1",
"https-proxy-agent": "^6.0.0",
"inquirer": "^9.1.4",
"inquirer-autocomplete-prompt": "^3.0.0",
"keyv": "^4.5.2",
@ -1691,6 +1692,29 @@
"chatgpt-cli": "bin/cli.js"
}
},
"node_modules/@waylaidwanderer/chatgpt-api/node_modules/agent-base": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.0.1.tgz",
"integrity": "sha512-V9to8gr2GK7eA+xskWGAFUX/TLSQKuH2TI06c/jGLL6yLp3oEjtnqM7a5tPV9fC1rabLeAgThZeBwsYX+WWHpw==",
"dependencies": {
"debug": "^4.3.4"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/@waylaidwanderer/chatgpt-api/node_modules/https-proxy-agent": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-6.1.0.tgz",
"integrity": "sha512-rvGRAlc3y+iS7AC9Os2joN91mX8wHpJ4TEklmHHxr7Gz2Juqa7fJmJ8wWxXNpTaRt56MQTwojxV5d82UW/+jwg==",
"dependencies": {
"agent-base": "^7.0.1",
"debug": "4"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/@waylaidwanderer/fastify-sse-v2": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@waylaidwanderer/fastify-sse-v2/-/fastify-sse-v2-3.1.0.tgz",
@ -4587,6 +4611,35 @@
"resolved": "https://registry.npmjs.org/obliterator/-/obliterator-2.0.4.tgz",
"integrity": "sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ=="
},
"node_modules/og-chatgpt-api": {
"name": "@waylaidwanderer/chatgpt-api",
"version": "1.35.0",
"resolved": "https://registry.npmjs.org/@waylaidwanderer/chatgpt-api/-/chatgpt-api-1.35.0.tgz",
"integrity": "sha512-JnFH67m+bnjM8pqhLe69FTT27yk3LYsdbn67VLyy6iE6ei7NO43/MYnJ6QGlfjj5esTa96DwLqf6B0jG3V/miA==",
"dependencies": {
"@dqbd/tiktoken": "^1.0.2",
"@fastify/cors": "^8.2.0",
"@waylaidwanderer/fastify-sse-v2": "^3.1.0",
"@waylaidwanderer/fetch-event-source": "^3.0.1",
"boxen": "^7.0.1",
"clipboardy": "^3.0.0",
"dotenv": "^16.0.3",
"fastify": "^4.11.0",
"fetch-undici": "^3.0.1",
"https-proxy-agent": "^5.0.1",
"inquirer": "^9.1.4",
"inquirer-autocomplete-prompt": "^3.0.0",
"keyv": "^4.5.2",
"keyv-file": "^0.2.0",
"ora": "^6.1.2",
"undici": "^5.20.0",
"ws": "^8.12.0"
},
"bin": {
"chatgpt-api": "bin/server.js",
"chatgpt-cli": "bin/cli.js"
}
},
"node_modules/on-exit-leak-free": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.0.tgz",
@ -7602,9 +7655,8 @@
}
},
"@waylaidwanderer/chatgpt-api": {
"version": "1.35.0",
"resolved": "https://registry.npmjs.org/@waylaidwanderer/chatgpt-api/-/chatgpt-api-1.35.0.tgz",
"integrity": "sha512-JnFH67m+bnjM8pqhLe69FTT27yk3LYsdbn67VLyy6iE6ei7NO43/MYnJ6QGlfjj5esTa96DwLqf6B0jG3V/miA==",
"version": "git+ssh://git@github.com/danny-avila/node-chatgpt-api.git#ea4f4e392a908f86f5c00407c142799b48f8349f",
"from": "@waylaidwanderer/chatgpt-api@github:danny-avila/node-chatgpt-api",
"requires": {
"@dqbd/tiktoken": "^1.0.2",
"@fastify/cors": "^8.2.0",
@ -7615,7 +7667,7 @@
"dotenv": "^16.0.3",
"fastify": "^4.11.0",
"fetch-undici": "^3.0.1",
"https-proxy-agent": "^5.0.1",
"https-proxy-agent": "^6.0.0",
"inquirer": "^9.1.4",
"inquirer-autocomplete-prompt": "^3.0.0",
"keyv": "^4.5.2",
@ -7623,6 +7675,25 @@
"ora": "^6.1.2",
"undici": "^5.20.0",
"ws": "^8.12.0"
},
"dependencies": {
"agent-base": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.0.1.tgz",
"integrity": "sha512-V9to8gr2GK7eA+xskWGAFUX/TLSQKuH2TI06c/jGLL6yLp3oEjtnqM7a5tPV9fC1rabLeAgThZeBwsYX+WWHpw==",
"requires": {
"debug": "^4.3.4"
}
},
"https-proxy-agent": {
"version": "6.1.0",
"resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-6.1.0.tgz",
"integrity": "sha512-rvGRAlc3y+iS7AC9Os2joN91mX8wHpJ4TEklmHHxr7Gz2Juqa7fJmJ8wWxXNpTaRt56MQTwojxV5d82UW/+jwg==",
"requires": {
"agent-base": "^7.0.1",
"debug": "4"
}
}
}
},
"@waylaidwanderer/fastify-sse-v2": {
@ -9769,6 +9840,30 @@
"resolved": "https://registry.npmjs.org/obliterator/-/obliterator-2.0.4.tgz",
"integrity": "sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ=="
},
"og-chatgpt-api": {
"version": "npm:@waylaidwanderer/chatgpt-api@1.35.0",
"resolved": "https://registry.npmjs.org/@waylaidwanderer/chatgpt-api/-/chatgpt-api-1.35.0.tgz",
"integrity": "sha512-JnFH67m+bnjM8pqhLe69FTT27yk3LYsdbn67VLyy6iE6ei7NO43/MYnJ6QGlfjj5esTa96DwLqf6B0jG3V/miA==",
"requires": {
"@dqbd/tiktoken": "^1.0.2",
"@fastify/cors": "^8.2.0",
"@waylaidwanderer/fastify-sse-v2": "^3.1.0",
"@waylaidwanderer/fetch-event-source": "^3.0.1",
"boxen": "^7.0.1",
"clipboardy": "^3.0.0",
"dotenv": "^16.0.3",
"fastify": "^4.11.0",
"fetch-undici": "^3.0.1",
"https-proxy-agent": "^5.0.1",
"inquirer": "^9.1.4",
"inquirer-autocomplete-prompt": "^3.0.0",
"keyv": "^4.5.2",
"keyv-file": "^0.2.0",
"ora": "^6.1.2",
"undici": "^5.20.0",
"ws": "^8.12.0"
}
},
"on-exit-leak-free": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.0.tgz",

View file

@ -21,7 +21,7 @@
"dependencies": {
"@dqbd/tiktoken": "^1.0.2",
"@keyv/mongo": "^2.1.8",
"@waylaidwanderer/chatgpt-api": "^1.35.0",
"@waylaidwanderer/chatgpt-api": "github:danny-avila/node-chatgpt-api",
"axios": "^1.3.4",
"bcrypt": "^5.1.0",
"bcryptjs": "^2.4.3",
@ -42,6 +42,7 @@
"meilisearch": "^0.31.1",
"mongoose": "^6.9.0",
"nodemailer": "^6.9.1",
"og-chatgpt-api": "npm:@waylaidwanderer/chatgpt-api@^1.35.0",
"openai": "^3.1.0",
"passport": "^0.6.0",
"passport-facebook": "^3.0.0",

View file

@ -64,7 +64,7 @@ router.post('/', requireJwtAuth, async (req, res) => {
};
const availableModels = getOpenAIModels();
if (availableModels.find((model) => model === endpointOption.model) === undefined)
if (availableModels.find(model => model === endpointOption.model) === undefined)
return handleError(res, { text: 'Illegal request: model' });
console.log('ask log', {