mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 08:12:00 +02:00
feat: Order/disable endpoints with ENDPOINTS env var (#1206)
* fix: endpoint will not be select if disabled * feat: order and disable endpoints with ENDPOINTS env var * chore: remove console.log
This commit is contained in:
parent
f05f6826f5
commit
f3402401f1
6 changed files with 66 additions and 33 deletions
|
@ -8,6 +8,13 @@ APP_TITLE=LibreChat
|
|||
# Uncomment and make empty "" to remove the footer.
|
||||
# CUSTOM_FOOTER="My custom footer"
|
||||
|
||||
# Uncomment the `ENDPOINTS` variable to determine which endpoints are available.
|
||||
# Listed here are the only accepted values, which are all enabled if an authentication
|
||||
# option is provided, whether user provided or admin provided in this .env file.
|
||||
# Note: the first value is considered the default value.
|
||||
|
||||
# ENDPOINTS=openAI,azureOpenAI,bingAI,chatGPTBrowser,google,gptPlugins,anthropic
|
||||
|
||||
# The server will listen to localhost:3080 by default. You can change the target IP as you want.
|
||||
# If you want to make this server available externally, for example to share the server with others
|
||||
# or expose this from a Docker container, set host to 0.0.0.0 or your external IP interface.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { EModelEndpoint } = require('../routes/endpoints/schemas');
|
||||
const { availableTools } = require('../../app/clients/tools');
|
||||
const { addOpenAPISpecs } = require('../../app/clients/tools/util/addOpenAPISpecs');
|
||||
const { EModelEndpoint } = require('~/server/routes/endpoints/schemas');
|
||||
const { availableTools } = require('~/app/clients/tools');
|
||||
const { addOpenAPISpecs } = require('~/app/clients/tools/util/addOpenAPISpecs');
|
||||
const {
|
||||
openAIApiKey,
|
||||
azureOpenAIApiKey,
|
||||
|
@ -13,13 +13,13 @@ const {
|
|||
bingAI,
|
||||
chatGPTBrowser,
|
||||
anthropic,
|
||||
} = require('../services/EndpointService').config;
|
||||
} = require('~/server/services/EndpointService').config;
|
||||
|
||||
let i = 0;
|
||||
async function endpointController(req, res) {
|
||||
let key, palmUser;
|
||||
try {
|
||||
key = require('../../data/auth.json');
|
||||
key = require('~/data/auth.json');
|
||||
} catch (e) {
|
||||
if (i === 0) {
|
||||
i++;
|
||||
|
@ -54,18 +54,42 @@ async function endpointController(req, res) {
|
|||
}
|
||||
: false;
|
||||
|
||||
res.send(
|
||||
JSON.stringify({
|
||||
[EModelEndpoint.openAI]: openAI,
|
||||
// [EModelEndpoint.assistant]: assistant,
|
||||
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
||||
[EModelEndpoint.google]: google,
|
||||
[EModelEndpoint.bingAI]: bingAI,
|
||||
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
|
||||
[EModelEndpoint.gptPlugins]: gptPlugins,
|
||||
[EModelEndpoint.anthropic]: anthropic,
|
||||
}),
|
||||
);
|
||||
let enabledEndpoints = [
|
||||
EModelEndpoint.openAI,
|
||||
EModelEndpoint.azureOpenAI,
|
||||
EModelEndpoint.google,
|
||||
EModelEndpoint.bingAI,
|
||||
EModelEndpoint.chatGPTBrowser,
|
||||
EModelEndpoint.gptPlugins,
|
||||
EModelEndpoint.anthropic,
|
||||
];
|
||||
|
||||
const endpointsEnv = process.env.ENDPOINTS || '';
|
||||
if (endpointsEnv) {
|
||||
enabledEndpoints = endpointsEnv
|
||||
.split(',')
|
||||
.filter((endpoint) => endpoint?.trim())
|
||||
.map((endpoint) => endpoint.trim());
|
||||
}
|
||||
|
||||
const endpointConfig = {
|
||||
[EModelEndpoint.openAI]: openAI,
|
||||
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
||||
[EModelEndpoint.google]: google,
|
||||
[EModelEndpoint.bingAI]: bingAI,
|
||||
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
|
||||
[EModelEndpoint.gptPlugins]: gptPlugins,
|
||||
[EModelEndpoint.anthropic]: anthropic,
|
||||
};
|
||||
|
||||
const orderedAndFilteredEndpoints = enabledEndpoints.reduce((config, key, index) => {
|
||||
if (endpointConfig[key]) {
|
||||
config[key] = { ...(endpointConfig[key] ?? {}), order: index };
|
||||
}
|
||||
return config;
|
||||
}, {});
|
||||
|
||||
res.send(JSON.stringify(orderedAndFilteredEndpoints));
|
||||
}
|
||||
|
||||
module.exports = endpointController;
|
||||
|
|
|
@ -1,22 +1,11 @@
|
|||
import type { TConversation, TPreset, TEndpointsConfig } from 'librechat-data-provider';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import getLocalStorageItems from './getLocalStorageItems';
|
||||
import mapEndpoints from './mapEndpoints';
|
||||
|
||||
type TConvoSetup = Partial<TPreset> | Partial<TConversation>;
|
||||
|
||||
type TDefaultEndpoint = { convoSetup: TConvoSetup; endpointsConfig: TEndpointsConfig };
|
||||
|
||||
export const defaultEndpoints: EModelEndpoint[] = [
|
||||
EModelEndpoint.openAI,
|
||||
EModelEndpoint.assistant,
|
||||
EModelEndpoint.azureOpenAI,
|
||||
EModelEndpoint.bingAI,
|
||||
EModelEndpoint.chatGPTBrowser,
|
||||
EModelEndpoint.gptPlugins,
|
||||
EModelEndpoint.google,
|
||||
EModelEndpoint.anthropic,
|
||||
];
|
||||
|
||||
const getEndpointFromSetup = (convoSetup: TConvoSetup, endpointsConfig: TEndpointsConfig) => {
|
||||
const { endpoint: targetEndpoint } = convoSetup || {};
|
||||
if (targetEndpoint && endpointsConfig?.[targetEndpoint]) {
|
||||
|
@ -49,7 +38,8 @@ const getEndpointFromLocalStorage = (endpointsConfig: TEndpointsConfig) => {
|
|||
};
|
||||
|
||||
const getDefinedEndpoint = (endpointsConfig: TEndpointsConfig) => {
|
||||
return defaultEndpoints.find((e) => Object.hasOwn(endpointsConfig ?? {}, e)) ?? 'openAI';
|
||||
const endpoints = mapEndpoints(endpointsConfig);
|
||||
return endpoints.find((e) => Object.hasOwn(endpointsConfig ?? {}, e));
|
||||
};
|
||||
|
||||
const getDefaultEndpoint = ({ convoSetup, endpointsConfig }: TDefaultEndpoint) => {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { defaultEndpoints } from 'librechat-data-provider';
|
||||
import type { TEndpointsConfig } from 'librechat-data-provider';
|
||||
import { defaultEndpoints } from './getDefaultEndpoint';
|
||||
|
||||
const getEndpointsFilter = (config: TEndpointsConfig) => {
|
||||
const filter: Record<string, boolean> = {};
|
||||
|
@ -16,5 +16,5 @@ const getAvailableEndpoints = (filter: Record<string, boolean>) => {
|
|||
|
||||
export default function mapEndpoints(config: TEndpointsConfig) {
|
||||
const filter = getEndpointsFilter(config);
|
||||
return getAvailableEndpoints(filter);
|
||||
return getAvailableEndpoints(filter).sort((a, b) => config[a].order - config[b].order);
|
||||
}
|
||||
|
|
|
@ -11,6 +11,17 @@ export enum EModelEndpoint {
|
|||
assistant = 'assistant',
|
||||
}
|
||||
|
||||
export const defaultEndpoints: EModelEndpoint[] = [
|
||||
EModelEndpoint.openAI,
|
||||
EModelEndpoint.assistant,
|
||||
EModelEndpoint.azureOpenAI,
|
||||
EModelEndpoint.bingAI,
|
||||
EModelEndpoint.chatGPTBrowser,
|
||||
EModelEndpoint.gptPlugins,
|
||||
EModelEndpoint.google,
|
||||
EModelEndpoint.anthropic,
|
||||
];
|
||||
|
||||
export const alternateName = {
|
||||
[EModelEndpoint.openAI]: 'OpenAI',
|
||||
[EModelEndpoint.assistant]: 'Assistants',
|
||||
|
|
|
@ -122,11 +122,12 @@ export type TConfig = {
|
|||
availableTools?: [];
|
||||
plugins?: Record<string, string>;
|
||||
azure?: boolean;
|
||||
order: number;
|
||||
};
|
||||
|
||||
export type TModelsConfig = Record<string, string[]>;
|
||||
|
||||
export type TEndpointsConfig = Record<string, TConfig | null>;
|
||||
export type TEndpointsConfig = Record<string, TConfig>;
|
||||
|
||||
export type TUpdateTokenCountResponse = {
|
||||
count: number;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue