LibreChat/api/server/routes/endpoints/gptPlugins/initializeClient.js
Danny Avila fd70e21732
feat: OpenRouter Support & Improve Model Fetching ⇆ (#936)
* chore(ChatGPTClient.js): add support for OpenRouter API
chore(OpenAIClient.js): add support for OpenRouter API

* chore: comment out token debugging

* chore: add back streamResult assignment

* chore: remove double condition/assignment from merging

* refactor(routes/endpoints): -> controller/services logic

* feat: add openrouter model fetching

* chore: remove unused endpointsConfig in cleanupPreset function

* refactor: separate models concern from endpointsConfig

* refactor(data-provider): add TModels type and make TEndpointsConfig adaptible to new endpoint keys

* refactor: complete models endpoint service in data-provider

* refactor: onMutate for refreshToken and login, invalidate models query

* feat: complete models endpoint logic for frontend

* chore: remove requireJwtAuth from /api/endpoints and /api/models as not implemented yet

* fix: endpoint will not be overwritten and instead use active value

* feat: openrouter support for plugins

* chore(EndpointOptionsDialog): remove unused recoil value

* refactor(schemas/parseConvo): add handling of secondaryModels to use first of defined secondary models, which includes last selected one as first, or default to the convo's secondary model value

* refactor: remove hooks from store and move to hooks
refactor(switchToConversation): make switchToConversation use latest recoil state, which is necessary to get the most up-to-date models list, replace wrapper function
refactor(getDefaultConversation): factor out logic into 3 pieces to reduce complexity.

* fix: backend tests

* feat: optimistic update by calling newConvo when models are fetched

* feat: openrouter support for titling convos

* feat: cache models fetch

* chore: add missing dep to AuthContext useEffect

* chore: fix useTimeout types

* chore: delete old getDefaultConvo file

* chore: remove newConvo logic from Root, remove console log from api models caching

* chore: ensure bun is used for building in b:client script

* fix: default endpoint will not default to null on a completely fresh login (no localStorage/cookies)

* chore: add openrouter docs to free_ai_apis.md and .env.example

* chore: remove openrouter console logs

* feat: add debugging env variable for Plugins
2023-09-18 12:55:51 -04:00

58 lines
1.7 KiB
JavaScript

const { PluginsClient } = require('../../../../app');
const { isEnabled } = require('../../../utils');
const { getAzureCredentials } = require('../../../../utils');
const { getUserKey, checkUserKeyExpiry } = require('../../../services/UserService');
const initializeClient = async (req, endpointOption) => {
const {
PROXY,
OPENAI_API_KEY,
AZURE_API_KEY,
PLUGINS_USE_AZURE,
OPENAI_REVERSE_PROXY,
DEBUG_PLUGINS,
} = process.env;
const { key: expiresAt } = req.body;
const clientOptions = {
debug: isEnabled(DEBUG_PLUGINS),
reverseProxyUrl: OPENAI_REVERSE_PROXY ?? null,
proxy: PROXY ?? null,
...endpointOption,
};
const isUserProvided = PLUGINS_USE_AZURE
? AZURE_API_KEY === 'user_provided'
: OPENAI_API_KEY === 'user_provided';
let key = null;
if (expiresAt && isUserProvided) {
checkUserKeyExpiry(
expiresAt,
'Your OpenAI API key has expired. Please provide your API key again.',
);
key = await getUserKey({
userId: req.user.id,
name: PLUGINS_USE_AZURE ? 'azureOpenAI' : 'openAI',
});
}
let openAIApiKey = isUserProvided ? key : OPENAI_API_KEY;
if (PLUGINS_USE_AZURE) {
clientOptions.azure = isUserProvided ? JSON.parse(key) : getAzureCredentials();
openAIApiKey = clientOptions.azure.azureOpenAIApiKey;
}
if (openAIApiKey && openAIApiKey.includes('azure') && !clientOptions.azure) {
clientOptions.azure = isUserProvided ? JSON.parse(key) : getAzureCredentials();
openAIApiKey = clientOptions.azure.azureOpenAIApiKey;
}
const client = new PluginsClient(openAIApiKey, clientOptions);
return {
client,
azure: clientOptions.azure,
openAIApiKey,
};
};
module.exports = initializeClient;