mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-18 01:10:14 +01:00
feat: OpenRouter Support & Improve Model Fetching ⇆ (#936)
* chore(ChatGPTClient.js): add support for OpenRouter API chore(OpenAIClient.js): add support for OpenRouter API * chore: comment out token debugging * chore: add back streamResult assignment * chore: remove double condition/assignment from merging * refactor(routes/endpoints): -> controller/services logic * feat: add openrouter model fetching * chore: remove unused endpointsConfig in cleanupPreset function * refactor: separate models concern from endpointsConfig * refactor(data-provider): add TModels type and make TEndpointsConfig adaptible to new endpoint keys * refactor: complete models endpoint service in data-provider * refactor: onMutate for refreshToken and login, invalidate models query * feat: complete models endpoint logic for frontend * chore: remove requireJwtAuth from /api/endpoints and /api/models as not implemented yet * fix: endpoint will not be overwritten and instead use active value * feat: openrouter support for plugins * chore(EndpointOptionsDialog): remove unused recoil value * refactor(schemas/parseConvo): add handling of secondaryModels to use first of defined secondary models, which includes last selected one as first, or default to the convo's secondary model value * refactor: remove hooks from store and move to hooks refactor(switchToConversation): make switchToConversation use latest recoil state, which is necessary to get the most up-to-date models list, replace wrapper function refactor(getDefaultConversation): factor out logic into 3 pieces to reduce complexity. * fix: backend tests * feat: optimistic update by calling newConvo when models are fetched * feat: openrouter support for titling convos * feat: cache models fetch * chore: add missing dep to AuthContext useEffect * chore: fix useTimeout types * chore: delete old getDefaultConvo file * chore: remove newConvo logic from Root, remove console log from api models caching * chore: ensure bun is used for building in b:client script * fix: default endpoint will not default to null on a completely fresh login (no localStorage/cookies) * chore: add openrouter docs to free_ai_apis.md and .env.example * chore: remove openrouter console logs * feat: add debugging env variable for Plugins
This commit is contained in:
parent
ccb46164c0
commit
fd70e21732
58 changed files with 809 additions and 523 deletions
|
|
@ -41,10 +41,6 @@ router.post('/', setHeaders, async (req, res) => {
|
|||
key: req.body?.key ?? null,
|
||||
};
|
||||
|
||||
// const availableModels = getChatGPTBrowserModels();
|
||||
// if (availableModels.find((model) => model === endpointOption.model) === undefined)
|
||||
// return handleError(res, { text: 'Illegal request: model' });
|
||||
|
||||
console.log('ask log', {
|
||||
userMessage,
|
||||
endpointOption,
|
||||
|
|
|
|||
|
|
@ -1,188 +1,7 @@
|
|||
const axios = require('axios');
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const { availableTools } = require('../../app/clients/tools');
|
||||
const { addOpenAPISpecs } = require('../../app/clients/tools/util/addOpenAPISpecs');
|
||||
// const { getAzureCredentials, genAzureChatCompletion } = require('../../utils/');
|
||||
const endpointController = require('../controllers/EndpointController');
|
||||
|
||||
const openAIApiKey = process.env.OPENAI_API_KEY;
|
||||
const azureOpenAIApiKey = process.env.AZURE_API_KEY;
|
||||
const useAzurePlugins = !!process.env.PLUGINS_USE_AZURE;
|
||||
const userProvidedOpenAI = useAzurePlugins
|
||||
? azureOpenAIApiKey === 'user_provided'
|
||||
: openAIApiKey === 'user_provided';
|
||||
router.get('/', endpointController);
|
||||
|
||||
const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _models = []) => {
|
||||
let models = _models.slice() ?? [];
|
||||
let apiKey = openAIApiKey;
|
||||
let basePath = 'https://api.openai.com/v1';
|
||||
if (opts.azure) {
|
||||
return models;
|
||||
// const azure = getAzureCredentials();
|
||||
// basePath = (genAzureChatCompletion(azure))
|
||||
// .split('/deployments')[0]
|
||||
// .concat(`/models?api-version=${azure.azureOpenAIApiVersion}`);
|
||||
// apiKey = azureOpenAIApiKey;
|
||||
}
|
||||
|
||||
const reverseProxyUrl = process.env.OPENAI_REVERSE_PROXY;
|
||||
if (reverseProxyUrl) {
|
||||
basePath = reverseProxyUrl.match(/.*v1/)[0];
|
||||
}
|
||||
|
||||
if (basePath.includes('v1') || opts.azure) {
|
||||
try {
|
||||
const res = await axios.get(`${basePath}${opts.azure ? '' : '/models'}`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
});
|
||||
|
||||
models = res.data.data.map((item) => item.id);
|
||||
// console.log(`Fetched ${models.length} models from ${opts.azure ? 'Azure ' : ''}OpenAI API`);
|
||||
} catch (err) {
|
||||
console.log(`Failed to fetch models from ${opts.azure ? 'Azure ' : ''}OpenAI API`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!reverseProxyUrl) {
|
||||
const regex = /(text-davinci-003|gpt-)/;
|
||||
models = models.filter((model) => regex.test(model));
|
||||
}
|
||||
return models;
|
||||
};
|
||||
|
||||
const getOpenAIModels = async (opts = { azure: false, plugins: false }) => {
|
||||
let models = [
|
||||
'gpt-4',
|
||||
'gpt-4-0613',
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-3.5-turbo-16k',
|
||||
'gpt-3.5-turbo-0613',
|
||||
'gpt-3.5-turbo-0301',
|
||||
];
|
||||
|
||||
if (!opts.plugins) {
|
||||
models.push('text-davinci-003');
|
||||
}
|
||||
|
||||
let key;
|
||||
if (opts.azure) {
|
||||
key = 'AZURE_OPENAI_MODELS';
|
||||
} else if (opts.plugins) {
|
||||
key = 'PLUGIN_MODELS';
|
||||
} else {
|
||||
key = 'OPENAI_MODELS';
|
||||
}
|
||||
|
||||
if (process.env[key]) {
|
||||
models = String(process.env[key]).split(',');
|
||||
return models;
|
||||
}
|
||||
|
||||
if (userProvidedOpenAI) {
|
||||
return models;
|
||||
}
|
||||
|
||||
models = await fetchOpenAIModels(opts, models);
|
||||
return models;
|
||||
};
|
||||
|
||||
const getChatGPTBrowserModels = () => {
|
||||
let models = ['text-davinci-002-render-sha', 'gpt-4'];
|
||||
if (process.env.CHATGPT_MODELS) {
|
||||
models = String(process.env.CHATGPT_MODELS).split(',');
|
||||
}
|
||||
|
||||
return models;
|
||||
};
|
||||
const getAnthropicModels = () => {
|
||||
let models = [
|
||||
'claude-1',
|
||||
'claude-1-100k',
|
||||
'claude-instant-1',
|
||||
'claude-instant-1-100k',
|
||||
'claude-2',
|
||||
];
|
||||
if (process.env.ANTHROPIC_MODELS) {
|
||||
models = String(process.env.ANTHROPIC_MODELS).split(',');
|
||||
}
|
||||
|
||||
return models;
|
||||
};
|
||||
|
||||
let i = 0;
|
||||
router.get('/', async function (req, res) {
|
||||
let key, palmUser;
|
||||
try {
|
||||
key = require('../../data/auth.json');
|
||||
} catch (e) {
|
||||
if (i === 0) {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.PALM_KEY === 'user_provided') {
|
||||
palmUser = true;
|
||||
if (i <= 1) {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
const tools = await addOpenAPISpecs(availableTools);
|
||||
function transformToolsToMap(tools) {
|
||||
return tools.reduce((map, obj) => {
|
||||
map[obj.pluginKey] = obj.name;
|
||||
return map;
|
||||
}, {});
|
||||
}
|
||||
const plugins = transformToolsToMap(tools);
|
||||
|
||||
const google =
|
||||
key || palmUser
|
||||
? { userProvide: palmUser, availableModels: ['chat-bison', 'text-bison', 'codechat-bison'] }
|
||||
: false;
|
||||
const openAI = openAIApiKey
|
||||
? { availableModels: await getOpenAIModels(), userProvide: openAIApiKey === 'user_provided' }
|
||||
: false;
|
||||
const azureOpenAI = azureOpenAIApiKey
|
||||
? {
|
||||
availableModels: await getOpenAIModels({ azure: true }),
|
||||
userProvide: azureOpenAIApiKey === 'user_provided',
|
||||
}
|
||||
: false;
|
||||
const gptPlugins =
|
||||
openAIApiKey || azureOpenAIApiKey
|
||||
? {
|
||||
availableModels: await getOpenAIModels({ azure: useAzurePlugins, plugins: true }),
|
||||
plugins,
|
||||
availableAgents: ['classic', 'functions'],
|
||||
userProvide: userProvidedOpenAI,
|
||||
azure: useAzurePlugins,
|
||||
}
|
||||
: false;
|
||||
const bingAI = process.env.BINGAI_TOKEN
|
||||
? {
|
||||
availableModels: ['BingAI', 'Sydney'],
|
||||
userProvide: process.env.BINGAI_TOKEN == 'user_provided',
|
||||
}
|
||||
: false;
|
||||
const chatGPTBrowser = process.env.CHATGPT_TOKEN
|
||||
? {
|
||||
userProvide: process.env.CHATGPT_TOKEN == 'user_provided',
|
||||
availableModels: getChatGPTBrowserModels(),
|
||||
}
|
||||
: false;
|
||||
const anthropic = process.env.ANTHROPIC_API_KEY
|
||||
? {
|
||||
userProvide: process.env.ANTHROPIC_API_KEY == 'user_provided',
|
||||
availableModels: getAnthropicModels(),
|
||||
}
|
||||
: false;
|
||||
|
||||
res.send(
|
||||
JSON.stringify({ azureOpenAI, openAI, google, bingAI, chatGPTBrowser, gptPlugins, anthropic }),
|
||||
);
|
||||
});
|
||||
|
||||
module.exports = { router, getOpenAIModels, getChatGPTBrowserModels };
|
||||
module.exports = router;
|
||||
|
|
|
|||
|
|
@ -1,13 +1,20 @@
|
|||
const { PluginsClient } = require('../../../../app');
|
||||
const { isEnabled } = require('../../../utils');
|
||||
const { getAzureCredentials } = require('../../../../utils');
|
||||
const { getUserKey, checkUserKeyExpiry } = require('../../../services/UserService');
|
||||
|
||||
const initializeClient = async (req, endpointOption) => {
|
||||
const { PROXY, OPENAI_API_KEY, AZURE_API_KEY, PLUGINS_USE_AZURE, OPENAI_REVERSE_PROXY } =
|
||||
process.env;
|
||||
const {
|
||||
PROXY,
|
||||
OPENAI_API_KEY,
|
||||
AZURE_API_KEY,
|
||||
PLUGINS_USE_AZURE,
|
||||
OPENAI_REVERSE_PROXY,
|
||||
DEBUG_PLUGINS,
|
||||
} = process.env;
|
||||
const { key: expiresAt } = req.body;
|
||||
const clientOptions = {
|
||||
// debug: true,
|
||||
debug: isEnabled(DEBUG_PLUGINS),
|
||||
reverseProxyUrl: OPENAI_REVERSE_PROXY ?? null,
|
||||
proxy: PROXY ?? null,
|
||||
...endpointOption,
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@ const tokenizer = require('./tokenizer');
|
|||
const auth = require('./auth');
|
||||
const keys = require('./keys');
|
||||
const oauth = require('./oauth');
|
||||
const { router: endpoints } = require('./endpoints');
|
||||
const endpoints = require('./endpoints');
|
||||
const models = require('./models');
|
||||
const plugins = require('./plugins');
|
||||
const user = require('./user');
|
||||
const config = require('./config');
|
||||
|
|
@ -28,6 +29,7 @@ module.exports = {
|
|||
user,
|
||||
tokenizer,
|
||||
endpoints,
|
||||
models,
|
||||
plugins,
|
||||
config,
|
||||
};
|
||||
|
|
|
|||
7
api/server/routes/models.js
Normal file
7
api/server/routes/models.js
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const modelController = require('../controllers/ModelController');
|
||||
|
||||
router.get('/', modelController);
|
||||
|
||||
module.exports = router;
|
||||
Loading…
Add table
Add a link
Reference in a new issue