refactor: Speed up Config fetching and Setup Config Groundwork 👷🚧 (#1297)

* refactor: move endpoint services to own directory

* refactor: make endpointconfig handling more concise, separate logic, and cache result for subsequent serving

* refactor: ModelController gets same treatment as EndpointController, draft OverrideController

* wip: flesh out override controller more to return real value

* refactor: client/api changes in anticipation of override
This commit is contained in:
Danny Avila 2023-12-06 19:36:57 -05:00 committed by GitHub
parent 9b2359fc27
commit 0bae503a0a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 405 additions and 138 deletions

View file

@ -0,0 +1,37 @@
const { EModelEndpoint } = require('~/server/routes/endpoints/schemas');
const {
OPENAI_API_KEY: openAIApiKey,
AZURE_API_KEY: azureOpenAIApiKey,
ANTHROPIC_API_KEY: anthropicApiKey,
CHATGPT_TOKEN: chatGPTToken,
BINGAI_TOKEN: bingToken,
PLUGINS_USE_AZURE,
PALM_KEY: palmKey,
} = process.env ?? {};
const useAzurePlugins = !!PLUGINS_USE_AZURE;
const userProvidedOpenAI = useAzurePlugins
? azureOpenAIApiKey === 'user_provided'
: openAIApiKey === 'user_provided';
function isUserProvided(key) {
return key ? { userProvide: key === 'user_provided' } : false;
}
module.exports = {
config: {
openAIApiKey,
azureOpenAIApiKey,
useAzurePlugins,
userProvidedOpenAI,
palmKey,
[EModelEndpoint.openAI]: isUserProvided(openAIApiKey),
[EModelEndpoint.assistant]: isUserProvided(openAIApiKey),
[EModelEndpoint.azureOpenAI]: isUserProvided(azureOpenAIApiKey),
[EModelEndpoint.chatGPTBrowser]: isUserProvided(chatGPTToken),
[EModelEndpoint.anthropic]: isUserProvided(anthropicApiKey),
[EModelEndpoint.bingAI]: isUserProvided(bingToken),
},
};

View file

@ -0,0 +1,13 @@
const { config } = require('./EndpointService');
const loadDefaultModels = require('./loadDefaultModels');
const loadOverrideConfig = require('./loadOverrideConfig');
const loadAsyncEndpoints = require('./loadAsyncEndpoints');
const loadDefaultEndpointsConfig = require('./loadDefaultEConfig');
module.exports = {
config,
loadDefaultModels,
loadOverrideConfig,
loadAsyncEndpoints,
loadDefaultEndpointsConfig,
};

View file

@ -0,0 +1,51 @@
const { availableTools } = require('~/app/clients/tools');
const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs');
const { openAIApiKey, azureOpenAIApiKey, useAzurePlugins, userProvidedOpenAI, palmKey } =
require('./EndpointService').config;
/**
* Load async endpoints and return a configuration object
*/
async function loadAsyncEndpoints() {
let i = 0;
let key, palmUser;
try {
key = require('~/data/auth.json');
} catch (e) {
if (i === 0) {
i++;
}
}
if (palmKey === 'user_provided') {
palmUser = true;
if (i <= 1) {
i++;
}
}
const tools = await addOpenAPISpecs(availableTools);
function transformToolsToMap(tools) {
return tools.reduce((map, obj) => {
map[obj.pluginKey] = obj.name;
return map;
}, {});
}
const plugins = transformToolsToMap(tools);
const google = key || palmUser ? { userProvide: palmUser } : false;
const gptPlugins =
openAIApiKey || azureOpenAIApiKey
? {
plugins,
availableAgents: ['classic', 'functions'],
userProvide: userProvidedOpenAI,
azure: useAzurePlugins,
}
: false;
return { google, gptPlugins };
}
module.exports = loadAsyncEndpoints;

View file

@ -0,0 +1,52 @@
const { EModelEndpoint } = require('~/server/routes/endpoints/schemas');
const loadAsyncEndpoints = require('./loadAsyncEndpoints');
const { config } = require('./EndpointService');
/**
* Load async endpoints and return a configuration object
* @function loadDefaultEndpointsConfig
* @returns {Promise<Object.<string, EndpointWithOrder>>} An object whose keys are endpoint names and values are objects that contain the endpoint configuration and an order.
*/
async function loadDefaultEndpointsConfig() {
const { google, gptPlugins } = await loadAsyncEndpoints();
const { openAI, bingAI, anthropic, azureOpenAI, chatGPTBrowser } = config;
let enabledEndpoints = [
EModelEndpoint.openAI,
EModelEndpoint.azureOpenAI,
EModelEndpoint.google,
EModelEndpoint.bingAI,
EModelEndpoint.chatGPTBrowser,
EModelEndpoint.gptPlugins,
EModelEndpoint.anthropic,
];
const endpointsEnv = process.env.ENDPOINTS || '';
if (endpointsEnv) {
enabledEndpoints = endpointsEnv
.split(',')
.filter((endpoint) => endpoint?.trim())
.map((endpoint) => endpoint.trim());
}
const endpointConfig = {
[EModelEndpoint.openAI]: openAI,
[EModelEndpoint.azureOpenAI]: azureOpenAI,
[EModelEndpoint.google]: google,
[EModelEndpoint.bingAI]: bingAI,
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
[EModelEndpoint.gptPlugins]: gptPlugins,
[EModelEndpoint.anthropic]: anthropic,
};
const orderedAndFilteredEndpoints = enabledEndpoints.reduce((config, key, index) => {
if (endpointConfig[key]) {
config[key] = { ...(endpointConfig[key] ?? {}), order: index };
}
return config;
}, {});
return orderedAndFilteredEndpoints;
}
module.exports = loadDefaultEndpointsConfig;

View file

@ -0,0 +1,32 @@
const {
getOpenAIModels,
getChatGPTBrowserModels,
getAnthropicModels,
} = require('~/server/services/ModelService');
const { EModelEndpoint } = require('~/server/routes/endpoints/schemas');
const { useAzurePlugins } = require('~/server/services/Config/EndpointService').config;
const fitlerAssistantModels = (str) => {
return /gpt-4|gpt-3\\.5/i.test(str) && !/vision|instruct/i.test(str);
};
async function loadDefaultModels() {
const openAI = await getOpenAIModels();
const anthropic = getAnthropicModels();
const chatGPTBrowser = getChatGPTBrowserModels();
const azureOpenAI = await getOpenAIModels({ azure: true });
const gptPlugins = await getOpenAIModels({ azure: useAzurePlugins, plugins: true });
return {
[EModelEndpoint.openAI]: openAI,
[EModelEndpoint.azureOpenAI]: azureOpenAI,
[EModelEndpoint.assistant]: openAI.filter(fitlerAssistantModels),
[EModelEndpoint.google]: ['chat-bison', 'text-bison', 'codechat-bison'],
[EModelEndpoint.bingAI]: ['BingAI', 'Sydney'],
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
[EModelEndpoint.gptPlugins]: gptPlugins,
[EModelEndpoint.anthropic]: anthropic,
};
}
module.exports = loadDefaultModels;

View file

@ -0,0 +1,6 @@
// fetch some remote config
async function loadOverrideConfig() {
return false;
}
module.exports = loadOverrideConfig;