From 5c1e44eff722e438cca48a2b03b1657d76ed8ae3 Mon Sep 17 00:00:00 2001 From: Danny Avila <110412045+danny-avila@users.noreply.github.com> Date: Sun, 29 Oct 2023 13:20:30 -0400 Subject: [PATCH] feat(OpenAIClient): Add HttpsProxyAgent to initializeLLM (#1119) * feat(OpenAIClient): Add HttpsProxyAgent to initializeLLM * chore: fix linting error in ModelService --- api/app/clients/OpenAIClient.js | 6 ++++++ api/server/services/ModelService.js | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/api/app/clients/OpenAIClient.js b/api/app/clients/OpenAIClient.js index 1450a08d7..7d078bdcf 100644 --- a/api/app/clients/OpenAIClient.js +++ b/api/app/clients/OpenAIClient.js @@ -1,4 +1,5 @@ const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken'); +const { HttpsProxyAgent } = require('https-proxy-agent'); const ChatGPTClient = require('./ChatGPTClient'); const BaseClient = require('./BaseClient'); const { getModelMaxTokens, genAzureChatCompletion } = require('../../utils'); @@ -461,6 +462,11 @@ If your reverse proxy is compatible to OpenAI specs in every other way, it may s }; } + if (this.options.proxy) { + configOptions.httpAgent = new HttpsProxyAgent(this.options.proxy); + configOptions.httpsAgent = new HttpsProxyAgent(this.options.proxy); + } + const { req, res, debug } = this.options; const runManager = new RunManager({ req, res, debug, abortController: this.abortController }); this.runManager = runManager; diff --git a/api/server/services/ModelService.js b/api/server/services/ModelService.js index f8857bb5c..cb8bb5b1f 100644 --- a/api/server/services/ModelService.js +++ b/api/server/services/ModelService.js @@ -29,7 +29,7 @@ const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _model if (OPENROUTER_API_KEY) { reverseProxyUrl = 'https://openrouter.ai/api/v1'; - apiKey = OPENROUTER_API_KEY + apiKey = OPENROUTER_API_KEY; } if (reverseProxyUrl) {