feat: OpenRouter Support & Improve Model Fetching ⇆ (#936)

* chore(ChatGPTClient.js): add support for OpenRouter API
chore(OpenAIClient.js): add support for OpenRouter API

* chore: comment out token debugging

* chore: add back streamResult assignment

* chore: remove double condition/assignment from merging

* refactor(routes/endpoints): -> controller/services logic

* feat: add openrouter model fetching

* chore: remove unused endpointsConfig in cleanupPreset function

* refactor: separate models concern from endpointsConfig

* refactor(data-provider): add TModels type and make TEndpointsConfig adaptible to new endpoint keys

* refactor: complete models endpoint service in data-provider

* refactor: onMutate for refreshToken and login, invalidate models query

* feat: complete models endpoint logic for frontend

* chore: remove requireJwtAuth from /api/endpoints and /api/models as not implemented yet

* fix: endpoint will not be overwritten and instead use active value

* feat: openrouter support for plugins

* chore(EndpointOptionsDialog): remove unused recoil value

* refactor(schemas/parseConvo): add handling of secondaryModels to use first of defined secondary models, which includes last selected one as first, or default to the convo's secondary model value

* refactor: remove hooks from store and move to hooks
refactor(switchToConversation): make switchToConversation use latest recoil state, which is necessary to get the most up-to-date models list, replace wrapper function
refactor(getDefaultConversation): factor out logic into 3 pieces to reduce complexity.

* fix: backend tests

* feat: optimistic update by calling newConvo when models are fetched

* feat: openrouter support for titling convos

* feat: cache models fetch

* chore: add missing dep to AuthContext useEffect

* chore: fix useTimeout types

* chore: delete old getDefaultConvo file

* chore: remove newConvo logic from Root, remove console log from api models caching

* chore: ensure bun is used for building in b:client script

* fix: default endpoint will not default to null on a completely fresh login (no localStorage/cookies)

* chore: add openrouter docs to free_ai_apis.md and .env.example

* chore: remove openrouter console logs

* feat: add debugging env variable for Plugins
This commit is contained in:
Danny Avila 2023-09-18 12:55:51 -04:00 committed by GitHub
parent ccb46164c0
commit fd70e21732
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
58 changed files with 809 additions and 523 deletions

View file

@ -77,6 +77,19 @@ OPENAI_API_KEY=user_provided
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
# OPENAI_REVERSE_PROXY=
##########################
# OpenRouter (overrides OpenAI and Plugins Endpoints):
##########################
# OpenRouter is a legitimate proxy service to a multitude of LLMs, both closed and open source, including:
# OpenAI models, Anthropic models, Meta's Llama models, pygmalionai/mythalion-13b
# and many more open source models. Newer integrations are usually discounted, too!
# Note: this overrides the OpenAI and Plugins Endpoints.
# See ./docs/install/free_ai_apis.md for more info.
# OPENROUTER_API_KEY=
##########################
# AZURE Endpoint:
##########################
@ -156,6 +169,8 @@ BINGAI_TOKEN=user_provided
# Leave it blank to use internal settings.
# PLUGIN_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-16k,gpt-3.5-turbo-0301,gpt-4,gpt-4-0314,gpt-4-0613
DEBUG_PLUGINS=true # Set to false or comment out to disable debug mode for plugins
# For securely storing credentials, you need a fixed key and IV. You can set them here for prod and dev environments
# If you don't set them, the app will crash on startup.
# You need a 32-byte key (64 characters in hex) and 16-byte IV (32 characters in hex)

View file

@ -179,6 +179,11 @@ class ChatGPTClient extends BaseClient {
opts.headers.Authorization = `Bearer ${this.apiKey}`;
}
if (this.useOpenRouter) {
opts.headers['HTTP-Referer'] = 'https://librechat.ai';
opts.headers['X-Title'] = 'LibreChat';
}
if (this.options.headers) {
opts.headers = { ...opts.headers, ...this.options.headers };
}

View file

@ -61,7 +61,13 @@ class OpenAIClient extends BaseClient {
};
}
if (process.env.OPENROUTER_API_KEY) {
this.apiKey = process.env.OPENROUTER_API_KEY;
this.useOpenRouter = true;
}
this.isChatCompletion =
this.useOpenRouter ||
this.options.reverseProxyUrl ||
this.options.localAI ||
this.modelOptions.model.startsWith('gpt-');
@ -119,6 +125,10 @@ class OpenAIClient extends BaseClient {
console.debug('Using Azure endpoint');
}
if (this.useOpenRouter) {
this.completionsUrl = 'https://openrouter.ai/api/v1/chat/completions';
}
return this;
}
@ -324,12 +334,24 @@ class OpenAIClient extends BaseClient {
return;
}
if (this.options.debug) {
// console.debug('progressMessage');
// console.dir(progressMessage, { depth: null });
}
if (progressMessage.choices) {
streamResult = progressMessage;
}
const token = this.isChatCompletion
? progressMessage.choices?.[0]?.delta?.content
: progressMessage.choices?.[0]?.text;
let token = null;
if (this.isChatCompletion) {
token =
progressMessage.choices?.[0]?.delta?.content ?? progressMessage.choices?.[0]?.text;
}
if (!token && this.useOpenRouter) {
token = progressMessage.choices?.[0]?.message?.content;
}
// first event's delta content is always undefined
if (!token) {
return;
@ -396,6 +418,16 @@ class OpenAIClient extends BaseClient {
configOptions.basePath = this.langchainProxy;
}
if (this.useOpenRouter) {
configOptions.basePath = 'https://openrouter.ai/api/v1';
configOptions.baseOptions = {
headers: {
'HTTP-Referer': 'https://librechat.ai',
'X-Title': 'LibreChat',
},
};
}
try {
const llm = createLLM({
modelOptions,

View file

@ -13,25 +13,27 @@ class PluginsClient extends OpenAIClient {
this.sender = options.sender ?? 'Assistant';
this.tools = [];
this.actions = [];
this.openAIApiKey = apiKey;
this.setOptions(options);
this.openAIApiKey = this.apiKey;
this.executor = null;
}
setOptions(options) {
this.agentOptions = options.agentOptions;
this.agentOptions = { ...options.agentOptions };
this.functionsAgent = this.agentOptions?.agent === 'functions';
this.agentIsGpt3 = this.agentOptions?.model.startsWith('gpt-3');
if (this.functionsAgent && this.agentOptions.model) {
this.agentIsGpt3 = this.agentOptions?.model?.includes('gpt-3');
super.setOptions(options);
if (this.functionsAgent && this.agentOptions.model && !this.useOpenRouter) {
this.agentOptions.model = this.getFunctionModelName(this.agentOptions.model);
}
super.setOptions(options);
this.isGpt3 = this.modelOptions.model.startsWith('gpt-3');
this.isGpt3 = this.modelOptions?.model?.includes('gpt-3');
// if (this.options.reverseProxyUrl) {
// this.langchainProxy = this.options.reverseProxyUrl.match(/.*v1/)[0];
// }
if (this.options.reverseProxyUrl) {
this.langchainProxy = this.options.reverseProxyUrl.match(/.*v1/)[0];
}
}
getSaveOptions() {
@ -77,6 +79,16 @@ class PluginsClient extends OpenAIClient {
configOptions.basePath = this.langchainProxy;
}
if (this.useOpenRouter) {
configOptions.basePath = 'https://openrouter.ai/api/v1';
configOptions.baseOptions = {
headers: {
'HTTP-Referer': 'https://librechat.ai',
'X-Title': 'LibreChat',
},
};
}
const model = createLLM({
modelOptions,
configOptions,

View file

@ -0,0 +1,60 @@
const { availableTools } = require('../../app/clients/tools');
const { addOpenAPISpecs } = require('../../app/clients/tools/util/addOpenAPISpecs');
const {
openAIApiKey,
azureOpenAIApiKey,
useAzurePlugins,
userProvidedOpenAI,
palmKey,
openAI,
azureOpenAI,
bingAI,
chatGPTBrowser,
anthropic,
} = require('../services/EndpointService').config;
let i = 0;
async function endpointController(req, res) {
let key, palmUser;
try {
key = require('../../data/auth.json');
} catch (e) {
if (i === 0) {
i++;
}
}
if (palmKey === 'user_provided') {
palmUser = true;
if (i <= 1) {
i++;
}
}
const tools = await addOpenAPISpecs(availableTools);
function transformToolsToMap(tools) {
return tools.reduce((map, obj) => {
map[obj.pluginKey] = obj.name;
return map;
}, {});
}
const plugins = transformToolsToMap(tools);
const google = key || palmUser ? { userProvide: palmUser } : false;
const gptPlugins =
openAIApiKey || azureOpenAIApiKey
? {
plugins,
availableAgents: ['classic', 'functions'],
userProvide: userProvidedOpenAI,
azure: useAzurePlugins,
}
: false;
res.send(
JSON.stringify({ azureOpenAI, openAI, google, bingAI, chatGPTBrowser, gptPlugins, anthropic }),
);
}
module.exports = endpointController;

View file

@ -0,0 +1,23 @@
const {
getOpenAIModels,
getChatGPTBrowserModels,
getAnthropicModels,
} = require('../services/ModelService');
const { useAzurePlugins } = require('../services/EndpointService').config;
async function modelController(req, res) {
const google = ['chat-bison', 'text-bison', 'codechat-bison'];
const openAI = await getOpenAIModels();
const azureOpenAI = await getOpenAIModels({ azure: true });
const gptPlugins = await getOpenAIModels({ azure: useAzurePlugins, plugins: true });
const bingAI = ['BingAI', 'Sydney'];
const chatGPTBrowser = getChatGPTBrowserModels();
const anthropic = getAnthropicModels();
res.send(
JSON.stringify({ azureOpenAI, openAI, google, bingAI, chatGPTBrowser, gptPlugins, anthropic }),
);
}
module.exports = modelController;

View file

@ -60,6 +60,7 @@ const startServer = async () => {
app.use('/api/prompts', routes.prompts);
app.use('/api/tokenizer', routes.tokenizer);
app.use('/api/endpoints', routes.endpoints);
app.use('/api/models', routes.models);
app.use('/api/plugins', routes.plugins);
app.use('/api/config', routes.config);

View file

@ -41,10 +41,6 @@ router.post('/', setHeaders, async (req, res) => {
key: req.body?.key ?? null,
};
// const availableModels = getChatGPTBrowserModels();
// if (availableModels.find((model) => model === endpointOption.model) === undefined)
// return handleError(res, { text: 'Illegal request: model' });
console.log('ask log', {
userMessage,
endpointOption,

View file

@ -1,188 +1,7 @@
const axios = require('axios');
const express = require('express');
const router = express.Router();
const { availableTools } = require('../../app/clients/tools');
const { addOpenAPISpecs } = require('../../app/clients/tools/util/addOpenAPISpecs');
// const { getAzureCredentials, genAzureChatCompletion } = require('../../utils/');
const endpointController = require('../controllers/EndpointController');
const openAIApiKey = process.env.OPENAI_API_KEY;
const azureOpenAIApiKey = process.env.AZURE_API_KEY;
const useAzurePlugins = !!process.env.PLUGINS_USE_AZURE;
const userProvidedOpenAI = useAzurePlugins
? azureOpenAIApiKey === 'user_provided'
: openAIApiKey === 'user_provided';
router.get('/', endpointController);
const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _models = []) => {
let models = _models.slice() ?? [];
let apiKey = openAIApiKey;
let basePath = 'https://api.openai.com/v1';
if (opts.azure) {
return models;
// const azure = getAzureCredentials();
// basePath = (genAzureChatCompletion(azure))
// .split('/deployments')[0]
// .concat(`/models?api-version=${azure.azureOpenAIApiVersion}`);
// apiKey = azureOpenAIApiKey;
}
const reverseProxyUrl = process.env.OPENAI_REVERSE_PROXY;
if (reverseProxyUrl) {
basePath = reverseProxyUrl.match(/.*v1/)[0];
}
if (basePath.includes('v1') || opts.azure) {
try {
const res = await axios.get(`${basePath}${opts.azure ? '' : '/models'}`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
models = res.data.data.map((item) => item.id);
// console.log(`Fetched ${models.length} models from ${opts.azure ? 'Azure ' : ''}OpenAI API`);
} catch (err) {
console.log(`Failed to fetch models from ${opts.azure ? 'Azure ' : ''}OpenAI API`);
}
}
if (!reverseProxyUrl) {
const regex = /(text-davinci-003|gpt-)/;
models = models.filter((model) => regex.test(model));
}
return models;
};
const getOpenAIModels = async (opts = { azure: false, plugins: false }) => {
let models = [
'gpt-4',
'gpt-4-0613',
'gpt-3.5-turbo',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-0613',
'gpt-3.5-turbo-0301',
];
if (!opts.plugins) {
models.push('text-davinci-003');
}
let key;
if (opts.azure) {
key = 'AZURE_OPENAI_MODELS';
} else if (opts.plugins) {
key = 'PLUGIN_MODELS';
} else {
key = 'OPENAI_MODELS';
}
if (process.env[key]) {
models = String(process.env[key]).split(',');
return models;
}
if (userProvidedOpenAI) {
return models;
}
models = await fetchOpenAIModels(opts, models);
return models;
};
const getChatGPTBrowserModels = () => {
let models = ['text-davinci-002-render-sha', 'gpt-4'];
if (process.env.CHATGPT_MODELS) {
models = String(process.env.CHATGPT_MODELS).split(',');
}
return models;
};
const getAnthropicModels = () => {
let models = [
'claude-1',
'claude-1-100k',
'claude-instant-1',
'claude-instant-1-100k',
'claude-2',
];
if (process.env.ANTHROPIC_MODELS) {
models = String(process.env.ANTHROPIC_MODELS).split(',');
}
return models;
};
let i = 0;
router.get('/', async function (req, res) {
let key, palmUser;
try {
key = require('../../data/auth.json');
} catch (e) {
if (i === 0) {
i++;
}
}
if (process.env.PALM_KEY === 'user_provided') {
palmUser = true;
if (i <= 1) {
i++;
}
}
const tools = await addOpenAPISpecs(availableTools);
function transformToolsToMap(tools) {
return tools.reduce((map, obj) => {
map[obj.pluginKey] = obj.name;
return map;
}, {});
}
const plugins = transformToolsToMap(tools);
const google =
key || palmUser
? { userProvide: palmUser, availableModels: ['chat-bison', 'text-bison', 'codechat-bison'] }
: false;
const openAI = openAIApiKey
? { availableModels: await getOpenAIModels(), userProvide: openAIApiKey === 'user_provided' }
: false;
const azureOpenAI = azureOpenAIApiKey
? {
availableModels: await getOpenAIModels({ azure: true }),
userProvide: azureOpenAIApiKey === 'user_provided',
}
: false;
const gptPlugins =
openAIApiKey || azureOpenAIApiKey
? {
availableModels: await getOpenAIModels({ azure: useAzurePlugins, plugins: true }),
plugins,
availableAgents: ['classic', 'functions'],
userProvide: userProvidedOpenAI,
azure: useAzurePlugins,
}
: false;
const bingAI = process.env.BINGAI_TOKEN
? {
availableModels: ['BingAI', 'Sydney'],
userProvide: process.env.BINGAI_TOKEN == 'user_provided',
}
: false;
const chatGPTBrowser = process.env.CHATGPT_TOKEN
? {
userProvide: process.env.CHATGPT_TOKEN == 'user_provided',
availableModels: getChatGPTBrowserModels(),
}
: false;
const anthropic = process.env.ANTHROPIC_API_KEY
? {
userProvide: process.env.ANTHROPIC_API_KEY == 'user_provided',
availableModels: getAnthropicModels(),
}
: false;
res.send(
JSON.stringify({ azureOpenAI, openAI, google, bingAI, chatGPTBrowser, gptPlugins, anthropic }),
);
});
module.exports = { router, getOpenAIModels, getChatGPTBrowserModels };
module.exports = router;

View file

@ -1,13 +1,20 @@
const { PluginsClient } = require('../../../../app');
const { isEnabled } = require('../../../utils');
const { getAzureCredentials } = require('../../../../utils');
const { getUserKey, checkUserKeyExpiry } = require('../../../services/UserService');
const initializeClient = async (req, endpointOption) => {
const { PROXY, OPENAI_API_KEY, AZURE_API_KEY, PLUGINS_USE_AZURE, OPENAI_REVERSE_PROXY } =
process.env;
const {
PROXY,
OPENAI_API_KEY,
AZURE_API_KEY,
PLUGINS_USE_AZURE,
OPENAI_REVERSE_PROXY,
DEBUG_PLUGINS,
} = process.env;
const { key: expiresAt } = req.body;
const clientOptions = {
// debug: true,
debug: isEnabled(DEBUG_PLUGINS),
reverseProxyUrl: OPENAI_REVERSE_PROXY ?? null,
proxy: PROXY ?? null,
...endpointOption,

View file

@ -9,7 +9,8 @@ const tokenizer = require('./tokenizer');
const auth = require('./auth');
const keys = require('./keys');
const oauth = require('./oauth');
const { router: endpoints } = require('./endpoints');
const endpoints = require('./endpoints');
const models = require('./models');
const plugins = require('./plugins');
const user = require('./user');
const config = require('./config');
@ -28,6 +29,7 @@ module.exports = {
user,
tokenizer,
endpoints,
models,
plugins,
config,
};

View file

@ -0,0 +1,7 @@
const express = require('express');
const router = express.Router();
const modelController = require('../controllers/ModelController');
router.get('/', modelController);
module.exports = router;

View file

@ -0,0 +1,40 @@
const {
OPENAI_API_KEY: openAIApiKey,
AZURE_API_KEY: azureOpenAIApiKey,
ANTHROPIC_API_KEY: anthropicApiKey,
CHATGPT_TOKEN: chatGPTToken,
BINGAI_TOKEN: bingToken,
PLUGINS_USE_AZURE,
PALM_KEY: palmKey,
} = process.env ?? {};
const useAzurePlugins = !!PLUGINS_USE_AZURE;
const userProvidedOpenAI = useAzurePlugins
? azureOpenAIApiKey === 'user_provided'
: openAIApiKey === 'user_provided';
function isUserProvided(key) {
return key ? { userProvide: key === 'user_provided' } : false;
}
const openAI = isUserProvided(openAIApiKey);
const azureOpenAI = isUserProvided(azureOpenAIApiKey);
const bingAI = isUserProvided(bingToken);
const chatGPTBrowser = isUserProvided(chatGPTToken);
const anthropic = isUserProvided(anthropicApiKey);
module.exports = {
config: {
openAIApiKey,
azureOpenAIApiKey,
useAzurePlugins,
userProvidedOpenAI,
palmKey,
openAI,
azureOpenAI,
chatGPTBrowser,
anthropic,
bingAI,
},
};

View file

@ -0,0 +1,127 @@
const Keyv = require('keyv');
const axios = require('axios');
// const { getAzureCredentials, genAzureChatCompletion } = require('../../utils/');
const { openAIApiKey, userProvidedOpenAI } = require('./EndpointService').config;
const modelsCache = new Keyv({ namespace: 'models' });
const { OPENROUTER_API_KEY, OPENAI_REVERSE_PROXY, CHATGPT_MODELS, ANTHROPIC_MODELS } =
process.env ?? {};
const fetchOpenAIModels = async (opts = { azure: false, plugins: false }, _models = []) => {
let models = _models.slice() ?? [];
let apiKey = openAIApiKey;
let basePath = 'https://api.openai.com/v1';
if (opts.azure) {
return models;
// const azure = getAzureCredentials();
// basePath = (genAzureChatCompletion(azure))
// .split('/deployments')[0]
// .concat(`/models?api-version=${azure.azureOpenAIApiVersion}`);
// apiKey = azureOpenAIApiKey;
}
let reverseProxyUrl = OPENAI_REVERSE_PROXY;
if (OPENROUTER_API_KEY) {
reverseProxyUrl = 'https://openrouter.ai/api/v1';
}
if (reverseProxyUrl) {
basePath = reverseProxyUrl.match(/.*v1/)[0];
}
const cachedModels = await modelsCache.get(basePath);
if (cachedModels) {
return cachedModels;
}
if (basePath.includes('v1') || opts.azure) {
try {
const res = await axios.get(`${basePath}${opts.azure ? '' : '/models'}`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
});
models = res.data.data.map((item) => item.id);
// console.log(`Fetched ${models.length} models from ${opts.azure ? 'Azure ' : ''}OpenAI API`);
} catch (err) {
console.log(`Failed to fetch models from ${opts.azure ? 'Azure ' : ''}OpenAI API`);
}
}
if (!reverseProxyUrl) {
const regex = /(text-davinci-003|gpt-)/;
models = models.filter((model) => regex.test(model));
}
await modelsCache.set(basePath, models);
return models;
};
const getOpenAIModels = async (opts = { azure: false, plugins: false }) => {
let models = [
'gpt-4',
'gpt-4-0613',
'gpt-3.5-turbo',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-0613',
'gpt-3.5-turbo-0301',
];
if (!opts.plugins) {
models.push('text-davinci-003');
}
let key;
if (opts.azure) {
key = 'AZURE_OPENAI_MODELS';
} else if (opts.plugins) {
key = 'PLUGIN_MODELS';
} else {
key = 'OPENAI_MODELS';
}
if (process.env[key]) {
models = String(process.env[key]).split(',');
return models;
}
if (userProvidedOpenAI) {
return models;
}
models = await fetchOpenAIModels(opts, models);
return models;
};
const getChatGPTBrowserModels = () => {
let models = ['text-davinci-002-render-sha', 'gpt-4'];
if (CHATGPT_MODELS) {
models = String(CHATGPT_MODELS).split(',');
}
return models;
};
const getAnthropicModels = () => {
let models = [
'claude-1',
'claude-1-100k',
'claude-instant-1',
'claude-instant-1-100k',
'claude-2',
];
if (ANTHROPIC_MODELS) {
models = String(ANTHROPIC_MODELS).split(',');
}
return models;
};
module.exports = {
getOpenAIModels,
getChatGPTBrowserModels,
getAnthropicModels,
};

View file

@ -4,15 +4,15 @@ import { useUpdateConversationMutation } from 'librechat-data-provider';
import RenameButton from './RenameButton';
import DeleteButton from './DeleteButton';
import ConvoIcon from '../svg/ConvoIcon';
import { useConversations, useConversation } from '~/hooks';
import store from '~/store';
export default function Conversation({ conversation, retainView }) {
const [currentConversation, setCurrentConversation] = useRecoilState(store.conversation);
const setSubmission = useSetRecoilState(store.submission);
const { refreshConversations } = store.useConversations();
const { switchToConversation } = store.useConversation();
const { refreshConversations } = useConversations();
const { switchToConversation } = useConversation();
const updateConvoMutation = useUpdateConversationMutation(currentConversation?.conversationId);

View file

@ -5,14 +5,14 @@ import { useRecoilValue } from 'recoil';
import { useDeleteConversationMutation } from 'librechat-data-provider';
import { Dialog, DialogTrigger, Label } from '~/components/ui/';
import DialogTemplate from '~/components/ui/DialogTemplate';
import { useLocalize, useConversations, useConversation } from '~/hooks';
import store from '~/store';
import { useLocalize } from '~/hooks';
export default function DeleteButton({ conversationId, renaming, retainView, title }) {
const localize = useLocalize();
const currentConversation = useRecoilValue(store.conversation) || {};
const { newConversation } = store.useConversation();
const { refreshConversations } = store.useConversations();
const { newConversation } = useConversation();
const { refreshConversations } = useConversations();
const confirmDelete = () => {
deleteConvoMutation.mutate({ conversationId, source: 'button' });

View file

@ -16,7 +16,6 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }: TEditP
const [preset, setPreset] = useRecoilState(store.preset);
const setPresets = useSetRecoilState(store.presets);
const availableEndpoints = useRecoilValue(store.availableEndpoints);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const { setOption } = useSetOptions(_preset);
const localize = useLocalize();
@ -27,7 +26,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }: TEditP
axios({
method: 'post',
url: '/api/presets',
data: cleanupPreset({ preset, endpointsConfig }),
data: cleanupPreset({ preset }),
withCredentials: true,
}).then((res) => {
setPresets(res?.data);
@ -40,7 +39,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }: TEditP
}
const fileName = filenamify(preset?.title || 'preset');
exportFromJSON({
data: cleanupPreset({ preset, endpointsConfig }),
data: cleanupPreset({ preset }),
fileName,
exportType: exportFromJSON.types.json,
});

View file

@ -1,6 +1,6 @@
import exportFromJSON from 'export-from-json';
import { useEffect, useState } from 'react';
import { useRecoilValue, useRecoilState } from 'recoil';
import { useRecoilState } from 'recoil';
import { tPresetSchema } from 'librechat-data-provider';
import type { TSetOption, TEditPresetProps } from '~/common';
import { Dialog, DialogButton } from '~/components/ui';
@ -21,7 +21,6 @@ const EndpointOptionsDialog = ({
}: TEditPresetProps) => {
const [preset, setPreset] = useRecoilState(store.preset);
const [saveAsDialogShow, setSaveAsDialogShow] = useState(false);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const localize = useLocalize();
const setOption: TSetOption = (param) => (newValue) => {
@ -44,7 +43,7 @@ const EndpointOptionsDialog = ({
return;
}
exportFromJSON({
data: cleanupPreset({ preset, endpointsConfig }),
data: cleanupPreset({ preset }),
fileName: `${preset?.title}.json`,
exportType: exportFromJSON.types.json,
});

View file

@ -23,13 +23,13 @@ export default function Settings({
isPreset = false,
className = '',
}: TSettingsProps) {
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const modelsConfig = useRecoilValue(store.modelsConfig);
if (!conversation?.endpoint) {
return null;
}
const { endpoint } = conversation;
const models = endpointsConfig?.[endpoint]?.['availableModels'] || [];
const models = modelsConfig?.[endpoint] ?? [];
const OptionComponent = optionComponents[endpoint];
if (OptionComponent) {

View file

@ -1,16 +1,13 @@
import React, { useEffect, useState } from 'react';
import { useRecoilValue } from 'recoil';
import { useCreatePresetMutation } from 'librechat-data-provider';
import type { TEditPresetProps } from '~/common';
import { Dialog, Input, Label } from '~/components/ui/';
import DialogTemplate from '~/components/ui/DialogTemplate';
import { cn, defaultTextPropsLabel, removeFocusOutlines, cleanupPreset } from '~/utils/';
import { useLocalize } from '~/hooks';
import store from '~/store';
const SaveAsPresetDialog = ({ open, onOpenChange, preset }: TEditPresetProps) => {
const [title, setTitle] = useState<string>(preset?.title || 'My Preset');
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const createPresetMutation = useCreatePresetMutation();
const localize = useLocalize();
@ -20,7 +17,6 @@ const SaveAsPresetDialog = ({ open, onOpenChange, preset }: TEditPresetProps) =>
...preset,
title,
},
endpointsConfig,
});
createPresetMutation.mutate(_preset);
};

View file

@ -23,12 +23,13 @@ import {
TooltipContent,
} from '~/components/ui/';
import DialogTemplate from '~/components/ui/DialogTemplate';
import { cn, cleanupPreset, getDefaultConversation } from '~/utils';
import { useLocalize, useLocalStorage } from '~/hooks';
import { cn, cleanupPreset } from '~/utils';
import { useLocalize, useLocalStorage, useConversation, useDefaultConvo } from '~/hooks';
import store from '~/store';
export default function NewConversationMenu() {
const localize = useLocalize();
const getDefaultConversation = useDefaultConvo();
const [menuOpen, setMenuOpen] = useState(false);
const [showPresets, setShowPresets] = useState(true);
const [showEndpoints, setShowEndpoints] = useState(true);
@ -37,12 +38,12 @@ export default function NewConversationMenu() {
const [conversation, setConversation] = useRecoilState(store.conversation) ?? {};
const [messages, setMessages] = useRecoilState(store.messages);
const availableEndpoints = useRecoilValue(store.availableEndpoints);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const [presets, setPresets] = useRecoilState(store.presets);
const modularEndpoints = new Set(['gptPlugins', 'anthropic', 'google', 'openAI']);
const { endpoint, conversationId } = conversation;
const { newConversation } = store.useConversation();
const { endpoint } = conversation;
const { newConversation } = useConversation();
const deletePresetsMutation = useDeletePresetMutation();
const createPresetMutation = useCreatePresetMutation();
@ -62,19 +63,10 @@ export default function NewConversationMenu() {
};
const onFileSelected = (jsonData) => {
const jsonPreset = { ...cleanupPreset({ preset: jsonData, endpointsConfig }), presetId: null };
const jsonPreset = { ...cleanupPreset({ preset: jsonData }), presetId: null };
importPreset(jsonPreset);
};
// update the default model when availableModels changes
// typically, availableModels changes => modelsFilter or customGPTModels changes
useEffect(() => {
const isInvalidConversation = !availableEndpoints.find((e) => e === endpoint);
if (conversationId == 'new' && isInvalidConversation) {
newConversation();
}
}, [availableEndpoints]);
// save states to localStorage
const [newUser, setNewUser] = useLocalStorage('newUser', true);
const [lastModel, setLastModel] = useLocalStorage('lastSelectedModel', {});
@ -82,7 +74,12 @@ export default function NewConversationMenu() {
const [lastBingSettings, setLastBingSettings] = useLocalStorage('lastBingSettings', {});
useEffect(() => {
if (endpoint && endpoint !== 'bingAI') {
setLastModel({ ...lastModel, [endpoint]: conversation?.model }), setLastConvo(conversation);
const lastModelUpdate = { ...lastModel, [endpoint]: conversation?.model };
if (endpoint === 'gptPlugins') {
lastModelUpdate.secondaryModel = conversation.agentOptions.model;
}
setLastModel(lastModelUpdate);
setLastConvo(conversation);
} else if (endpoint === 'bingAI') {
const { jailbreak, toneStyle } = conversation;
setLastBingSettings({ ...lastBingSettings, jailbreak, toneStyle });
@ -114,7 +111,6 @@ export default function NewConversationMenu() {
) {
const currentConvo = getDefaultConversation({
conversation,
endpointsConfig,
preset: newPreset,
});

View file

@ -32,14 +32,14 @@ const optionComponents: { [key: string]: React.FC<TModelSelectProps> } = {
};
export default function ModelSelect({ conversation, setOption }: TSelectProps) {
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const modelsConfig = useRecoilValue(store.modelsConfig);
if (!conversation?.endpoint) {
return null;
}
const { endpoint } = conversation;
const OptionComponent = optionComponents[endpoint];
const models = endpointsConfig?.[endpoint]?.['availableModels'] ?? [];
const models = modelsConfig?.[endpoint] ?? [];
if (!OptionComponent) {
return null;

View file

@ -9,7 +9,7 @@ import MultiMessage from './MultiMessage';
import HoverButtons from './HoverButtons';
import SiblingSwitch from './SiblingSwitch';
import { getIcon } from '~/components/Endpoints';
import { useMessageHandler } from '~/hooks';
import { useMessageHandler, useConversation } from '~/hooks';
import type { TMessageProps } from '~/common';
import { cn } from '~/utils';
import store from '~/store';
@ -27,7 +27,7 @@ export default function Message({
const setLatestMessage = useSetRecoilState(store.latestMessage);
const [abortScroll, setAbort] = useState(false);
const { isSubmitting, ask, regenerate, handleContinue } = useMessageHandler();
const { switchToConversation } = store.useConversation();
const { switchToConversation } = useConversation();
const {
text,
children,

View file

@ -3,12 +3,11 @@ import { Dialog } from '~/components/ui/';
import DialogTemplate from '~/components/ui/DialogTemplate';
import { ClearChatsButton } from './SettingsTabs/';
import { useClearConversationsMutation } from 'librechat-data-provider';
import store from '~/store';
import { useLocalize } from '~/hooks';
import { useLocalize, useConversation, useConversations } from '~/hooks';
const ClearConvos = ({ open, onOpenChange }) => {
const { newConversation } = store.useConversation();
const { refreshConversations } = store.useConversations();
const { newConversation } = useConversation();
const { refreshConversations } = useConversations();
const clearConvosMutation = useClearConversationsMutation();
const [confirmClear, setConfirmClear] = useState(false);
const localize = useLocalize();

View file

@ -22,7 +22,6 @@ export default function ExportModel({ open, onOpenChange }) {
const conversation = useRecoilValue(store.conversation) || {};
const messagesTree = useRecoilValue(store.messagesTree) || [];
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const getSiblingIdx = useRecoilCallback(
({ snapshot }) =>
@ -197,7 +196,7 @@ export default function ExportModel({ open, onOpenChange }) {
if (includeOptions) {
data += '\n## Options\n';
const options = cleanupPreset({ preset: conversation, endpointsConfig });
const options = cleanupPreset({ preset: conversation });
for (const key of Object.keys(options)) {
data += `- ${key}: ${options[key]}\n`;
@ -246,7 +245,7 @@ export default function ExportModel({ open, onOpenChange }) {
if (includeOptions) {
data += '\nOptions\n########################\n';
const options = cleanupPreset({ preset: conversation, endpointsConfig });
const options = cleanupPreset({ preset: conversation });
for (const key of Object.keys(options)) {
data += `${key}: ${options[key]}\n`;
@ -295,7 +294,7 @@ export default function ExportModel({ open, onOpenChange }) {
};
if (includeOptions) {
data.options = cleanupPreset({ preset: conversation, endpointsConfig });
data.options = cleanupPreset({ preset: conversation });
}
const messages = await buildMessageTree({

View file

@ -1,11 +1,11 @@
import React from 'react';
import { useRecoilValue } from 'recoil';
import { useLocalize, useConversation } from '~/hooks';
import store from '~/store';
import { useLocalize } from '~/hooks';
export default function MobileNav({ setNavVisible }) {
const conversation = useRecoilValue(store.conversation);
const { newConversation } = store.useConversation();
const { newConversation } = useConversation();
const { title = 'New Chat' } = conversation || {};
const localize = useLocalize();

View file

@ -11,7 +11,14 @@ import SearchBar from './SearchBar';
import NavLinks from './NavLinks';
import { Panel, Spinner } from '~/components';
import { Conversations, Pages } from '../Conversations';
import { useAuthContext, useDebounce, useMediaQuery, useLocalize } from '~/hooks';
import {
useAuthContext,
useDebounce,
useMediaQuery,
useLocalize,
useConversation,
useConversations,
} from '~/hooks';
import { cn } from '~/utils/';
import store from '~/store';
@ -47,14 +54,14 @@ export default function Nav({ navVisible, setNavVisible }) {
const searchQuery = useRecoilValue(store.searchQuery);
const isSearchEnabled = useRecoilValue(store.isSearchEnabled);
const isSearching = useRecoilValue(store.isSearching);
const { newConversation, searchPlaceholderConversation } = store.useConversation();
const { newConversation, searchPlaceholderConversation } = useConversation();
// current conversation
const conversation = useRecoilValue(store.conversation);
const { conversationId } = conversation || {};
const setSearchResultMessages = useSetRecoilState(store.searchResultMessages);
const refreshConversationsHint = useRecoilValue(store.refreshConversationsHint);
const { refreshConversations } = store.useConversations();
const { refreshConversations } = useConversations();
const [isFetching, setIsFetching] = useState(false);

View file

@ -1,9 +1,8 @@
import React from 'react';
import store from '~/store';
import { useLocalize } from '~/hooks';
import { useLocalize, useConversation } from '~/hooks';
export default function NewChat() {
const { newConversation } = store.useConversation();
const { newConversation } = useConversation();
const localize = useLocalize();
const clickHandler = () => {

View file

@ -2,7 +2,13 @@ import { useRecoilState } from 'recoil';
import * as Tabs from '@radix-ui/react-tabs';
import React, { useState, useContext, useEffect, useCallback, useRef } from 'react';
import { useClearConversationsMutation } from 'librechat-data-provider';
import { ThemeContext, useLocalize, useOnClickOutside } from '~/hooks';
import {
ThemeContext,
useLocalize,
useOnClickOutside,
useConversation,
useConversations,
} from '~/hooks';
import type { TDangerButtonProps } from '~/common';
import DangerButton from './DangerButton';
import store from '~/store';
@ -87,7 +93,6 @@ export const LangSelector = ({
<option value="ru">{localize('com_nav_lang_russian')}</option>
<option value="jp">{localize('com_nav_lang_japanese')}</option>
<option value="sv">{localize('com_nav_lang_swedish')}</option>
</select>
</div>
);
@ -98,8 +103,8 @@ function General() {
const clearConvosMutation = useClearConversationsMutation();
const [confirmClear, setConfirmClear] = useState(false);
const [langcode, setLangcode] = useRecoilState(store.lang);
const { newConversation } = store.useConversation();
const { refreshConversations } = store.useConversations();
const { newConversation } = useConversation();
const { refreshConversations } = useConversations();
const contentRef = useRef(null);
useOnClickOutside(contentRef, () => confirmClear && setConfirmClear(false), []);

View file

@ -95,7 +95,7 @@ const AuthContextProvider = ({
});
},
});
}, [setUserContext, logoutUser]);
}, [setUserContext, doSetError, logoutUser]);
const silentRefresh = useCallback(() => {
refreshToken.mutate(undefined, {

View file

@ -11,6 +11,9 @@ export { default as useSetOptions } from './useSetOptions';
export { default as useGenerations } from './useGenerations';
export { default as useScrollToRef } from './useScrollToRef';
export { default as useLocalStorage } from './useLocalStorage';
export { default as useConversation } from './useConversation';
export { default as useDefaultConvo } from './useDefaultConvo';
export { default as useServerStream } from './useServerStream';
export { default as useConversations } from './useConversations';
export { default as useOnClickOutside } from './useOnClickOutside';
export { default as useMessageHandler } from './useMessageHandler';

View file

@ -0,0 +1,85 @@
import { useCallback } from 'react';
import { useSetRecoilState, useResetRecoilState, useRecoilCallback, useRecoilValue } from 'recoil';
import { TConversation, TMessagesAtom, TSubmission, TPreset } from 'librechat-data-provider';
import { buildDefaultConvo, getDefaultEndpoint } from '~/utils';
import store from '~/store';
const useConversation = () => {
const setConversation = useSetRecoilState(store.conversation);
const setMessages = useSetRecoilState<TMessagesAtom>(store.messages);
const setSubmission = useSetRecoilState<TSubmission | null>(store.submission);
const resetLatestMessage = useResetRecoilState(store.latestMessage);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const switchToConversation = useRecoilCallback(
({ snapshot }) =>
async (
conversation: TConversation,
messages: TMessagesAtom = null,
preset: TPreset | null = null,
) => {
const modelsConfig = snapshot.getLoadable(store.modelsConfig).contents;
const { endpoint = null } = conversation;
if (endpoint === null) {
const defaultEndpoint = getDefaultEndpoint({
convoSetup: preset ?? conversation,
endpointsConfig,
});
const models = modelsConfig?.[defaultEndpoint] ?? [];
conversation = buildDefaultConvo({
conversation,
lastConversationSetup: preset as TConversation,
endpoint: defaultEndpoint,
models,
});
}
setConversation(conversation);
setMessages(messages);
setSubmission({} as TSubmission);
resetLatestMessage();
},
[endpointsConfig],
);
const newConversation = useCallback(
(template = {}, preset?: TPreset) => {
switchToConversation(
{
conversationId: 'new',
title: 'New Chat',
...template,
endpoint: null,
createdAt: '',
updatedAt: '',
},
[],
preset,
);
},
[switchToConversation],
);
const searchPlaceholderConversation = useCallback(() => {
switchToConversation(
{
conversationId: 'search',
title: 'Search',
endpoint: null,
createdAt: '',
updatedAt: '',
},
[],
);
}, [switchToConversation]);
return {
switchToConversation,
newConversation,
searchPlaceholderConversation,
};
};
export default useConversation;

View file

@ -0,0 +1,15 @@
import { useSetRecoilState } from 'recoil';
import { useCallback } from 'react';
import store from '~/store';
const useConversations = () => {
const setRefreshConversationsHint = useSetRecoilState(store.refreshConversationsHint);
const refreshConversations = useCallback(() => {
setRefreshConversationsHint((prevState) => prevState + 1);
}, [setRefreshConversationsHint]);
return { refreshConversations };
};
export default useConversations;

View file

@ -0,0 +1,30 @@
import { useRecoilValue } from 'recoil';
import type { TConversation, TPreset } from 'librechat-data-provider';
import { getDefaultEndpoint, buildDefaultConvo } from '~/utils';
import store from '~/store';
type TDefaultConvo = { conversation: Partial<TConversation>; preset?: Partial<TPreset> | null };
const useDefaultConvo = () => {
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const modelsConfig = useRecoilValue(store.modelsConfig);
const getDefaultConversation = ({ conversation, preset }: TDefaultConvo) => {
const endpoint = getDefaultEndpoint({
convoSetup: preset as TPreset,
endpointsConfig,
});
const models = modelsConfig?.[endpoint] || [];
return buildDefaultConvo({
conversation: conversation as TConversation,
endpoint,
lastConversationSetup: preset as TConversation,
models,
});
};
return getDefaultConversation;
};
export default useDefaultConvo;

View file

@ -1,7 +1,7 @@
import { v4 } from 'uuid';
import { useRecoilState, useRecoilValue, useSetRecoilState } from 'recoil';
import { parseConvo, getResponseSender } from 'librechat-data-provider';
import type { TMessage, TSubmission } from 'librechat-data-provider';
import type { TMessage, TSubmission, TEndpointOption } from 'librechat-data-provider';
import type { TAskFunction } from '~/common';
import useUserKey from './useUserKey';
import store from '~/store';
@ -54,10 +54,10 @@ const useMessageHandler = () => {
// set the endpoint option
const convo = parseConvo(endpoint, currentConversation);
const endpointOption = {
endpoint,
...convo,
endpoint,
key: getExpiry(),
};
} as TEndpointOption;
const responseSender = getResponseSender(endpointOption);
let currentMessages: TMessage[] | null = messages ?? [];

View file

@ -23,7 +23,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => {
...prevState,
...update,
},
endpointsConfig,
}),
);
};
@ -41,7 +40,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => {
...prevState,
...update,
},
endpointsConfig,
}),
);
};
@ -57,7 +55,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => {
...prevState,
...update,
},
endpointsConfig,
}),
);
};
@ -73,7 +70,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => {
...prevState,
...update,
},
endpointsConfig,
}),
);
return;
@ -86,7 +82,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => {
...prevState,
...update,
},
endpointsConfig,
}),
);
};
@ -101,7 +96,6 @@ const usePresetOptions: TUsePresetOptions = (_preset) => {
...prevState,
agentOptions,
},
endpointsConfig,
}),
);
};

View file

@ -3,7 +3,9 @@ import { useResetRecoilState, useSetRecoilState } from 'recoil';
/* @ts-ignore */
import { SSE, createPayload, tMessageSchema, tConversationSchema } from 'librechat-data-provider';
import type { TResPlugin, TMessage, TConversation, TSubmission } from 'librechat-data-provider';
import { useAuthContext } from '~/hooks/AuthContext';
import useConversations from './useConversations';
import { useAuthContext } from './AuthContext';
import store from '~/store';
type TResData = {
@ -22,7 +24,7 @@ export default function useServerStream(submission: TSubmission | null) {
const resetLatestMessage = useResetRecoilState(store.latestMessage);
const { token } = useAuthContext();
const { refreshConversations } = store.useConversations();
const { refreshConversations } = useConversations();
const messageHandler = (data: string, submission: TSubmission) => {
const {

View file

@ -2,14 +2,14 @@ import { useEffect, useRef } from 'react';
type TUseTimeoutParams = {
callback: (error: string | number | boolean | null) => void;
delay?: number | undefined;
delay?: number;
};
type TTimeout = ReturnType<typeof setTimeout> | null;
function useTimeout({ callback, delay = 400 }: TUseTimeoutParams) {
const timeout = useRef<TTimeout>(null);
const callOnTimeout = (value: string | undefined) => {
const callOnTimeout = (value?: string) => {
// Clear existing timeout
if (timeout.current !== null) {
clearTimeout(timeout.current);

View file

@ -1,19 +1,19 @@
import { useState, useEffect } from 'react';
import { useAuthContext } from '~/hooks';
import { useNavigate, useParams } from 'react-router-dom';
import { useRecoilState, useRecoilValue, useSetRecoilState } from 'recoil';
import Landing from '~/components/ui/Landing';
import Messages from '~/components/Messages/Messages';
import TextChat from '~/components/Input/TextChat';
import store from '~/store';
import {
useGetMessagesByConvoId,
useGetConversationByIdMutation,
useGetStartupConfig,
} from 'librechat-data-provider';
import Landing from '~/components/ui/Landing';
import Messages from '~/components/Messages/Messages';
import TextChat from '~/components/Input/TextChat';
import { useAuthContext, useConversation } from '~/hooks';
import store from '~/store';
export default function Chat() {
const { isAuthenticated } = useAuthContext();
const [shouldNavigate, setShouldNavigate] = useState(true);
@ -22,7 +22,7 @@ export default function Chat() {
const setMessages = useSetRecoilState(store.messages);
const messagesTree = useRecoilValue(store.messagesTree);
const isSubmitting = useRecoilValue(store.isSubmitting);
const { newConversation } = store.useConversation();
const { newConversation } = useConversation();
const { conversationId } = useParams();
const navigate = useNavigate();

View file

@ -4,6 +4,7 @@ import { useRecoilValue, useSetRecoilState } from 'recoil';
import { Outlet } from 'react-router-dom';
import {
useGetEndpointsQuery,
useGetModelsQuery,
useGetPresetsQuery,
useGetSearchEnabledQuery,
} from 'librechat-data-provider';
@ -13,6 +14,7 @@ import { useAuthContext, useServerStream } from '~/hooks';
import store from '~/store';
export default function Root() {
const { user, isAuthenticated } = useAuthContext();
const [navVisible, setNavVisible] = useState(() => {
const savedNavVisible = localStorage.getItem('navVisible');
return savedNavVisible !== null ? JSON.parse(savedNavVisible) : false;
@ -21,13 +23,14 @@ export default function Root() {
const submission = useRecoilValue(store.submission);
useServerStream(submission ?? null);
const setPresets = useSetRecoilState(store.presets);
const setIsSearchEnabled = useSetRecoilState(store.isSearchEnabled);
const setEndpointsConfig = useSetRecoilState(store.endpointsConfig);
const setPresets = useSetRecoilState(store.presets);
const { user, isAuthenticated } = useAuthContext();
const setModelsConfig = useSetRecoilState(store.modelsConfig);
const searchEnabledQuery = useGetSearchEnabledQuery();
const endpointsQuery = useGetEndpointsQuery();
const modelsQuery = useGetModelsQuery();
const presetsQuery = useGetPresetsQuery({ enabled: !!user });
useEffect(() => {
@ -42,6 +45,14 @@ export default function Root() {
}
}, [endpointsQuery.data, endpointsQuery.isError]);
useEffect(() => {
if (modelsQuery.data) {
setModelsConfig(modelsQuery.data);
} else if (modelsQuery.isError) {
console.error('Failed to get models', modelsQuery.error);
}
}, [modelsQuery.data, modelsQuery.isError]);
useEffect(() => {
if (presetsQuery.data) {
setPresets(presetsQuery.data);

View file

@ -5,12 +5,13 @@ import { useRecoilState, useRecoilValue } from 'recoil';
import Messages from '~/components/Messages/Messages';
import TextChat from '~/components/Input/TextChat';
import { useConversation } from '~/hooks';
import store from '~/store';
export default function Search() {
const [searchQuery, setSearchQuery] = useRecoilState(store.searchQuery);
const conversation = useRecoilValue(store.conversation);
const { searchPlaceholderConversation } = store.useConversation();
const { searchPlaceholderConversation } = useConversation();
const { query } = useParams();
const navigate = useNavigate();

View file

@ -1,22 +1,6 @@
import { useCallback } from 'react';
import {
atom,
selector,
atomFamily,
useSetRecoilState,
useResetRecoilState,
useRecoilCallback,
} from 'recoil';
import {
TConversation,
TMessagesAtom,
TMessage,
TSubmission,
TPreset,
} from 'librechat-data-provider';
import { buildTree, getDefaultConversation } from '~/utils';
import submission from './submission';
import endpoints from './endpoints';
import { atom, selector, atomFamily } from 'recoil';
import { TConversation, TMessagesAtom, TMessage } from 'librechat-data-provider';
import { buildTree } from '~/utils';
const conversation = atom<TConversation | null>({
key: 'conversation',
@ -48,94 +32,10 @@ const messagesSiblingIdxFamily = atomFamily({
default: 0,
});
const useConversation = () => {
const setConversation = useSetRecoilState(conversation);
const setMessages = useSetRecoilState<TMessagesAtom>(messages);
const setSubmission = useSetRecoilState<TSubmission | null>(submission.submission);
const resetLatestMessage = useResetRecoilState(latestMessage);
const _switchToConversation = (
conversation: TConversation,
messages: TMessagesAtom = null,
preset: object | null = null,
{ endpointsConfig = {} },
) => {
const { endpoint = null } = conversation;
if (endpoint === null) {
// get the default model
conversation = getDefaultConversation({
conversation,
endpointsConfig,
preset,
});
}
setConversation(conversation);
setMessages(messages);
setSubmission({} as TSubmission);
resetLatestMessage();
};
const switchToConversation = useRecoilCallback(
({ snapshot }) =>
async (
_conversation: TConversation,
messages: TMessagesAtom = null,
preset: object | null = null,
) => {
const endpointsConfig = await snapshot.getPromise(endpoints.endpointsConfig);
_switchToConversation(_conversation, messages, preset, {
endpointsConfig,
});
},
[],
);
const newConversation = useCallback(
(template = {}, preset?: TPreset) => {
switchToConversation(
{
conversationId: 'new',
title: 'New Chat',
...template,
endpoint: null,
createdAt: '',
updatedAt: '',
},
[],
preset,
);
},
[switchToConversation],
);
const searchPlaceholderConversation = () => {
switchToConversation(
{
conversationId: 'search',
title: 'Search',
endpoint: null,
createdAt: '',
updatedAt: '',
},
[],
);
};
return {
_switchToConversation,
newConversation,
switchToConversation,
searchPlaceholderConversation,
};
};
export default {
messages,
conversation,
messagesTree,
latestMessage,
messagesSiblingIdxFamily,
useConversation,
};

View file

@ -1,19 +1,8 @@
import { atom, useSetRecoilState } from 'recoil';
import { useCallback } from 'react';
import { atom } from 'recoil';
const refreshConversationsHint = atom({
const refreshConversationsHint = atom<number>({
key: 'refreshConversationsHint',
default: 1,
});
const useConversations = () => {
const setRefreshConversationsHint = useSetRecoilState(refreshConversationsHint);
const refreshConversations = useCallback(() => {
setRefreshConversationsHint((prevState) => prevState + 1);
}, [setRefreshConversationsHint]);
return { refreshConversations };
};
export default { refreshConversationsHint, useConversations };
export default { refreshConversationsHint };

View file

@ -1,6 +1,7 @@
import conversation from './conversation';
import conversations from './conversations';
import endpoints from './endpoints';
import models from './models';
import user from './user';
import text from './text';
import submission from './submission';
@ -13,6 +14,7 @@ export default {
...conversation,
...conversations,
...endpoints,
...models,
...user,
...text,
...submission,

View file

@ -0,0 +1,34 @@
import { atom } from 'recoil';
import { TModelsConfig } from 'librechat-data-provider';
const openAIModels = [
'gpt-3.5-turbo',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-0301',
'text-davinci-003',
'gpt-4',
'gpt-4-0314',
'gpt-4-0613',
];
const modelsConfig = atom<TModelsConfig>({
key: 'models',
default: {
openAI: openAIModels,
gptPlugins: openAIModels,
azureOpenAI: openAIModels,
bingAI: ['BingAI', 'Sydney'],
chatGPTBrowser: ['text-davinci-002-render-sha'],
google: ['chat-bison', 'text-bison', 'codechat-bison'],
anthropic: [
'claude-1',
'claude-1-100k',
'claude-instant-1',
'claude-instant-1-100k',
'claude-2',
],
},
});
export default {
modelsConfig,
};

View file

@ -0,0 +1,64 @@
import { parseConvo } from 'librechat-data-provider';
import getLocalStorageItems from './getLocalStorageItems';
import type { TConversation, EModelEndpoint } from 'librechat-data-provider';
const buildDefaultConvo = ({
conversation,
endpoint,
models,
lastConversationSetup,
}: {
conversation: TConversation;
endpoint: EModelEndpoint;
models: string[];
lastConversationSetup: TConversation;
}) => {
const { lastSelectedModel, lastSelectedTools, lastBingSettings } = getLocalStorageItems();
const { jailbreak, toneStyle } = lastBingSettings;
if (!endpoint) {
return {
...conversation,
endpoint,
};
}
const availableModels = models;
const model = lastConversationSetup?.model ?? lastSelectedModel?.[endpoint];
const secondaryModel =
endpoint === 'gptPlugins'
? lastConversationSetup?.agentOptions?.model ?? lastSelectedModel?.secondaryModel
: null;
let possibleModels: string[], secondaryModels: string[];
if (availableModels.includes(model)) {
possibleModels = [model, ...availableModels];
} else {
possibleModels = [...availableModels];
}
if (secondaryModel && availableModels.includes(secondaryModel)) {
secondaryModels = [secondaryModel, ...availableModels];
} else {
secondaryModels = [...availableModels];
}
const convo = parseConvo(endpoint, lastConversationSetup, {
models: possibleModels,
secondaryModels,
});
const defaultConvo = {
...conversation,
...convo,
endpoint,
};
defaultConvo.tools = lastSelectedTools ?? defaultConvo.tools;
defaultConvo.jailbreak = jailbreak ?? defaultConvo.jailbreak;
defaultConvo.toneStyle = toneStyle ?? defaultConvo.toneStyle;
return defaultConvo;
};
export default buildDefaultConvo;

View file

@ -1,9 +1,8 @@
import { parseConvo } from 'librechat-data-provider';
import type { TEndpointsConfig, TPreset } from 'librechat-data-provider';
import type { TPreset } from 'librechat-data-provider';
type TCleanupPreset = {
preset: Partial<TPreset>;
endpointsConfig: TEndpointsConfig;
};
const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
@ -20,9 +19,9 @@ const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
const parsedPreset = parseConvo(endpoint, _preset);
return {
endpoint,
presetId: _preset?.presetId ?? null,
...parsedPreset,
endpoint,
title: _preset?.title ?? 'New Preset',
} as TPreset;
};

View file

@ -1,96 +0,0 @@
import { parseConvo } from 'librechat-data-provider';
import getLocalStorageItems from './getLocalStorageItems';
import type {
TConversation,
TEndpointsConfig,
EModelEndpoint,
TConfig,
} from 'librechat-data-provider';
const defaultEndpoints = [
'openAI',
'azureOpenAI',
'bingAI',
'chatGPTBrowser',
'gptPlugins',
'google',
'anthropic',
];
const buildDefaultConversation = ({
conversation,
endpoint,
endpointsConfig,
lastConversationSetup,
}: {
conversation: TConversation;
endpoint: EModelEndpoint;
endpointsConfig: TEndpointsConfig;
lastConversationSetup: TConversation;
}) => {
const { lastSelectedModel, lastSelectedTools, lastBingSettings } = getLocalStorageItems();
const { jailbreak, toneStyle } = lastBingSettings;
if (!endpoint) {
return {
...conversation,
endpoint,
};
}
const { availableModels = [] } = endpointsConfig[endpoint] as TConfig;
const possibleModels = [lastSelectedModel[endpoint], ...availableModels];
const convo = parseConvo(endpoint, lastConversationSetup, { model: possibleModels });
const defaultConvo = {
...conversation,
...convo,
endpoint,
};
defaultConvo.tools = lastSelectedTools ?? defaultConvo.tools;
defaultConvo.jailbreak = jailbreak ?? defaultConvo.jailbreak;
defaultConvo.toneStyle = toneStyle ?? defaultConvo.toneStyle;
return defaultConvo;
};
const getDefaultConversation = ({ conversation, endpointsConfig, preset }) => {
const getEndpointFromPreset = () => {
const { endpoint: targetEndpoint } = preset || {};
if (targetEndpoint && endpointsConfig?.[targetEndpoint]) {
return targetEndpoint;
} else if (targetEndpoint) {
console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsConfig}`);
}
return null;
};
const getEndpointFromLocalStorage = () => {
try {
const { lastConversationSetup } = getLocalStorageItems();
return (
lastConversationSetup.endpoint &&
(endpointsConfig[lastConversationSetup.endpoint] ? lastConversationSetup.endpoint : null)
);
} catch (error) {
console.error(error);
return null;
}
};
const getDefaultEndpoint = () => {
return defaultEndpoints.find((e) => endpointsConfig?.[e]) || null;
};
const endpoint = getEndpointFromPreset() || getEndpointFromLocalStorage() || getDefaultEndpoint();
return buildDefaultConversation({
conversation,
endpoint,
lastConversationSetup: preset,
endpointsConfig,
});
};
export default getDefaultConversation;

View file

@ -0,0 +1,54 @@
import type { TConversation, TPreset, TEndpointsConfig } from 'librechat-data-provider';
import getLocalStorageItems from './getLocalStorageItems';
type TConvoSetup = Partial<TPreset> | Partial<TConversation>;
type TDefaultEndpoint = { convoSetup: TConvoSetup; endpointsConfig: TEndpointsConfig };
const defaultEndpoints = [
'openAI',
'azureOpenAI',
'bingAI',
'chatGPTBrowser',
'gptPlugins',
'google',
'anthropic',
];
const getEndpointFromSetup = (convoSetup: TConvoSetup, endpointsConfig: TEndpointsConfig) => {
const { endpoint: targetEndpoint } = convoSetup || {};
if (targetEndpoint && endpointsConfig?.[targetEndpoint]) {
return targetEndpoint;
} else if (targetEndpoint) {
console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsConfig}`);
}
return null;
};
const getEndpointFromLocalStorage = (endpointsConfig: TEndpointsConfig) => {
try {
const { lastConversationSetup } = getLocalStorageItems();
return (
lastConversationSetup.endpoint &&
(endpointsConfig[lastConversationSetup.endpoint] ? lastConversationSetup.endpoint : null)
);
} catch (error) {
console.error(error);
return null;
}
};
const getDefinedEndpoint = (endpointsConfig: TEndpointsConfig) => {
return defaultEndpoints.find((e) => Object.hasOwn(endpointsConfig ?? {}, e)) ?? 'openAI';
};
const getDefaultEndpoint = ({ convoSetup, endpointsConfig }: TDefaultEndpoint) => {
return (
getEndpointFromSetup(convoSetup, endpointsConfig) ||
getEndpointFromLocalStorage(endpointsConfig) ||
getDefinedEndpoint(endpointsConfig)
);
};
export default getDefaultEndpoint;

View file

@ -7,8 +7,9 @@ export { default as getLoginError } from './getLoginError';
export { default as cleanupPreset } from './cleanupPreset';
export { default as validateIframe } from './validateIframe';
export { default as getMessageError } from './getMessageError';
export { default as buildDefaultConvo } from './buildDefaultConvo';
export { default as getDefaultEndpoint } from './getDefaultEndpoint';
export { default as getLocalStorageItems } from './getLocalStorageItems';
export { default as getDefaultConversation } from './getDefaultConversation';
export function cn(...inputs: string[]) {
return twMerge(clsx(inputs));

View file

@ -1,8 +1,34 @@
# Free AI APIs
There are APIs offering free access to AI APIs via reverse proxy, and one of the major players, compatible with LibreChat, is NagaAI.
There are APIs offering free/free-trial access to AI APIs via reverse proxy.
Feel free to check out the others, but I haven't personally tested them: [Free AI APIs](https://github.com/NovaOSS/free-ai-apis)
Here is a well-maintained public list of [Free AI APIs](https://github.com/NovaOSS/free-ai-apis) that may or may not be compatible with LibreChat
### [OpenRouter](https://openrouter.ai/) ⇆ (preferred)
While not completely free, you get free trial credits when you [sign up to OpenRouter](https://openrouter.ai/), a legitimate proxy service to a multitude of LLMs, both closed and open source, including:
- OpenAI models (great if you are barred from their API for whatever reason)
- Anthropic Claude models (same as above)
- Meta's Llama models
- pygmalionai/mythalion-13b
- and many more open source models. Newer integrations are usually discounted, too!
OpenRouter is so great, I decided to integrate it to the project as a standalone feature.
**Setup:**
- Signup to [OpenRouter](https://openrouter.ai/) and create a key. You should name it and set a limit as well.
- Set the environment variable `OPENROUTER_API_KEY` in your .env file to the key you just created.
- Restart your LibreChat server and use the OpenAI or Plugins endpoints.
**Notes:**
- [TODO] **In the future, you will be able to set up OpenRouter from the frontend as well.**
- This will override the official OpenAI API or your reverse proxy settings for both Plugins and OpenAI.
- On initial setup, you may need to refresh your page twice to see all their supported models populate automatically.
- Plugins: Functions Agent works with OpenRouter when using OpenAI models.
- Plugins: Turn functions off to try plugins with non-OpenAI models (ChatGPT plugins will not work and others may not work as expected).
- Plugins: Make sure `PLUGINS_USE_AZURE` is not set in your .env file when wanting to use OpenRouter and you have Azure configured.
> ⚠️ OpenRouter is in a category of its own, and is highly recommended over the "free" services below. NagaAI and other 'free' API proxies tend to have intermittent issues, data leaks, and/or problems with the guidelines of the platforms they advertise on. Use the below at your own risk.
### NagaAI

View file

@ -48,7 +48,7 @@
"b:api": "NODE_ENV=production bun run api/server/index.js",
"b:api:dev": "NODE_ENV=development bun run --watch api/server/index.js",
"b:data-provider": "cd packages/data-provider && bun run b:build",
"b:client": "bun run b:data-provider && cd client && bun run b:build",
"b:client": "bun --bun run b:data-provider && cd client && bun --bun run b:build",
"b:client:dev": "cd client && bun run b:dev",
"b:test:client": "cd client && bun run b:test",
"b:test:api": "cd api && bun run b:test"

View file

@ -36,6 +36,8 @@ export const deletePreset = () => '/api/presets/delete';
export const aiEndpoints = () => '/api/endpoints';
export const models = () => '/api/models';
export const tokenizer = () => '/api/tokenizer';
export const login = () => '/api/auth/login';

View file

@ -1,5 +1,7 @@
import * as t from './types';
import * as s from './schemas';
/* TODO: fix dependency cycle */
// eslint-disable-next-line import/no-cycle
import request from './request';
import * as endpoints from './api-endpoints';
@ -99,6 +101,10 @@ export const getAIEndpoints = () => {
return request.get(endpoints.aiEndpoints());
};
export const getModels = () => {
return request.get(endpoints.models());
};
export const updateTokenCount = (text: string) => {
return request.post(endpoints.tokenizer(), { arg: text });
};

View file

@ -17,6 +17,7 @@ export enum QueryKeys {
searchEnabled = 'searchEnabled',
user = 'user',
name = 'name', // user key name
models = 'models',
endpoints = 'endpoints',
presets = 'presets',
searchResults = 'searchResults',
@ -218,6 +219,14 @@ export const useGetEndpointsQuery = (): QueryObserverResult<t.TEndpointsConfig>
});
};
export const useGetModelsQuery = (): QueryObserverResult<t.TModelsConfig> => {
return useQuery([QueryKeys.models], () => dataService.getModels(), {
refetchOnWindowFocus: false,
refetchOnReconnect: false,
refetchOnMount: false,
});
};
export const useCreatePresetMutation = (): UseMutationResult<
s.TPreset[],
unknown,
@ -313,6 +322,9 @@ export const useLoginUserMutation = (): UseMutationResult<
onSuccess: () => {
queryClient.invalidateQueries([QueryKeys.user]);
},
onMutate: () => {
queryClient.invalidateQueries([QueryKeys.models]);
},
});
};
@ -345,7 +357,12 @@ export const useRefreshTokenMutation = (): UseMutationResult<
unknown,
unknown
> => {
return useMutation(() => dataService.refreshToken(), {});
const queryClient = useQueryClient();
return useMutation(() => dataService.refreshToken(), {
onMutate: () => {
queryClient.invalidateQueries([QueryKeys.models]);
},
});
};
export const useUserKeyQuery = (

View file

@ -1,5 +1,6 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import axios, { AxiosRequestConfig, AxiosError } from 'axios';
/* TODO: fix dependency cycle */
// eslint-disable-next-line import/no-cycle
import { refreshToken } from './data-service';
import { setTokenHeader } from './headers-helpers';

View file

@ -369,7 +369,8 @@ function getFirstDefinedValue(possibleValues: string[]) {
}
type TPossibleValues = {
model: string[];
models: string[];
secondaryModels?: string[];
};
export const parseConvo = (
@ -383,10 +384,15 @@ export const parseConvo = (
throw new Error(`Unknown endpoint: ${endpoint}`);
}
const convo = schema.parse(conversation);
const convo = schema.parse(conversation) as TConversation;
const { models, secondaryModels } = possibleValues ?? {};
if (possibleValues && convo) {
convo.model = getFirstDefinedValue(possibleValues.model) ?? convo.model;
if (models && convo) {
convo.model = getFirstDefinedValue(models) ?? convo.model;
}
if (secondaryModels && convo.agentOptions) {
convo.agentOptions.model = getFirstDefinedValue(secondaryModels) ?? convo.agentOptions.model;
}
return convo;

View file

@ -111,21 +111,16 @@ export type TSearchResults = {
};
export type TConfig = {
availableModels: [];
availableModels?: [];
userProvide?: boolean | null;
availableTools?: [];
plugins?: [];
azure?: boolean;
};
export type TEndpointsConfig = {
azureOpenAI: TConfig | null;
bingAI: TConfig | null;
chatGPTBrowser: TConfig | null;
anthropic: TConfig | null;
google: TConfig | null;
openAI: TConfig | null;
gptPlugins: TConfig | null;
};
export type TModelsConfig = Record<string, string[]>;
export type TEndpointsConfig = Record<string, TConfig | null>;
export type TUpdateTokenCountResponse = {
count: number;