mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-15 23:15:30 +01:00
💫 feat: Config File & Custom Endpoints (#1474)
* WIP(backend/api): custom endpoint * WIP(frontend/client): custom endpoint * chore: adjust typedefs for configs * refactor: use data-provider for cache keys and rename enums and custom endpoint for better clarity and compatibility * feat: loadYaml utility * refactor: rename back to from and proof-of-concept for creating schemas from user-defined defaults * refactor: remove custom endpoint from default endpointsConfig as it will be exclusively managed by yaml config * refactor(EndpointController): rename variables for clarity * feat: initial load custom config * feat(server/utils): add simple `isUserProvided` helper * chore(types): update TConfig type * refactor: remove custom endpoint handling from model services as will be handled by config, modularize fetching of models * feat: loadCustomConfig, loadConfigEndpoints, loadConfigModels * chore: reorganize server init imports, invoke loadCustomConfig * refactor(loadConfigEndpoints/Models): return each custom endpoint as standalone endpoint * refactor(Endpoint/ModelController): spread config values after default (temporary) * chore(client): fix type issues * WIP: first pass for multiple custom endpoints - add endpointType to Conversation schema - add update zod schemas for both convo/presets to allow non-EModelEndpoint value as endpoint (also using type assertion) - use `endpointType` value as `endpoint` where mapping to type is necessary using this field - use custom defined `endpoint` value and not type for mapping to modelsConfig - misc: add return type to `getDefaultEndpoint` - in `useNewConvo`, add the endpointType if it wasn't already added to conversation - EndpointsMenu: use user-defined endpoint name as Title in menu - TODO: custom icon via custom config, change unknown to robot icon * refactor(parseConvo): pass args as an object and change where used accordingly; chore: comment out 'create schema' code * chore: remove unused availableModels field in TConfig type * refactor(parseCompactConvo): pass args as an object and change where used accordingly * feat: chat through custom endpoint * chore(message/convoSchemas): avoid saving empty arrays * fix(BaseClient/saveMessageToDatabase): save endpointType * refactor(ChatRoute): show Spinner if endpointsQuery or modelsQuery are still loading, which is apparent with slow fetching of models/remote config on first serve * fix(useConversation): assign endpointType if it's missing * fix(SaveAsPreset): pass real endpoint and endpointType when saving Preset) * chore: recorganize types order for TConfig, add `iconURL` * feat: custom endpoint icon support: - use UnknownIcon in all icon contexts - add mistral and openrouter as known endpoints, and add their icons - iconURL support * fix(presetSchema): move endpointType to default schema definitions shared between convoSchema and defaults * refactor(Settings/OpenAI): remove legacy `isOpenAI` flag * fix(OpenAIClient): do not invoke abortCompletion on completion error * feat: add responseSender/label support for custom endpoints: - use defaultModelLabel field in endpointOption - add model defaults for custom endpoints in `getResponseSender` - add `useGetSender` hook which uses EndpointsQuery to determine `defaultModelLabel` - include defaultModelLabel from endpointConfig in custom endpoint client options - pass `endpointType` to `getResponseSender` * feat(OpenAIClient): use custom options from config file * refactor: rename `defaultModelLabel` to `modelDisplayLabel` * refactor(data-provider): separate concerns from `schemas` into `parsers`, `config`, and fix imports elsewhere * feat: `iconURL` and extract environment variables from custom endpoint config values * feat: custom config validation via zod schema, rename and move to `./projectRoot/librechat.yaml` * docs: custom config docs and examples * fix(OpenAIClient/mistral): mistral does not allow singular system message, also add `useChatCompletion` flag to use openai-node for title completions * fix(custom/initializeClient): extract env var and use `isUserProvided` function * Update librechat.example.yaml * feat(InputWithLabel): add className props, and forwardRef * fix(streamResponse): handle error edge case where either messages or convos query throws an error * fix(useSSE): handle errorHandler edge cases where error response is and is not properly formatted from API, especially when a conversationId is not yet provided, which ensures stream is properly closed on error * feat: user_provided keys for custom endpoints * fix(config/endpointSchema): do not allow default endpoint values in custom endpoint `name` * feat(loadConfigModels): extract env variables and optimize fetching models * feat: support custom endpoint iconURL for messages and Nav * feat(OpenAIClient): add/dropParams support * docs: update docs with default params, add/dropParams, and notes to use config file instead of `OPENAI_REVERSE_PROXY` * docs: update docs with additional notes * feat(maxTokensMap): add mistral models (32k context) * docs: update openrouter notes * Update ai_setup.md * docs(custom_config): add table of contents and fix note about custom name * docs(custom_config): reorder ToC * Update custom_config.md * Add note about `max_tokens` field in custom_config.md
This commit is contained in:
parent
3f98f92d4c
commit
29473a72db
100 changed files with 2146 additions and 627 deletions
|
|
@ -15,10 +15,12 @@ const buildDefaultConvo = ({
|
|||
}) => {
|
||||
const { lastSelectedModel, lastSelectedTools, lastBingSettings } = getLocalStorageItems();
|
||||
const { jailbreak, toneStyle } = lastBingSettings;
|
||||
const { endpointType } = conversation;
|
||||
|
||||
if (!endpoint) {
|
||||
return {
|
||||
...conversation,
|
||||
endpointType,
|
||||
endpoint,
|
||||
};
|
||||
}
|
||||
|
|
@ -44,13 +46,20 @@ const buildDefaultConvo = ({
|
|||
secondaryModels = [...availableModels];
|
||||
}
|
||||
|
||||
const convo = parseConvo(endpoint, lastConversationSetup, {
|
||||
models: possibleModels,
|
||||
secondaryModels,
|
||||
const convo = parseConvo({
|
||||
endpoint,
|
||||
endpointType,
|
||||
conversation: lastConversationSetup,
|
||||
possibleValues: {
|
||||
models: possibleModels,
|
||||
secondaryModels,
|
||||
},
|
||||
});
|
||||
|
||||
const defaultConvo = {
|
||||
...conversation,
|
||||
...convo,
|
||||
endpointType,
|
||||
endpoint,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ type TCleanupPreset = {
|
|||
};
|
||||
|
||||
const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
|
||||
const { endpoint } = _preset;
|
||||
const { endpoint, endpointType } = _preset;
|
||||
if (!endpoint) {
|
||||
console.error(`Unknown endpoint ${endpoint}`, _preset);
|
||||
return {
|
||||
|
|
@ -16,12 +16,13 @@ const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
|
|||
};
|
||||
}
|
||||
|
||||
const parsedPreset = parseConvo(endpoint, _preset);
|
||||
const parsedPreset = parseConvo({ endpoint, endpointType, conversation: _preset });
|
||||
|
||||
return {
|
||||
presetId: _preset?.presetId ?? null,
|
||||
...parsedPreset,
|
||||
endpoint,
|
||||
endpointType,
|
||||
title: _preset?.title ?? 'New Preset',
|
||||
} as TPreset;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,4 +1,9 @@
|
|||
import type { TConversation, TPreset, TEndpointsConfig } from 'librechat-data-provider';
|
||||
import type {
|
||||
TConversation,
|
||||
TPreset,
|
||||
TEndpointsConfig,
|
||||
EModelEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import getLocalStorageItems from './getLocalStorageItems';
|
||||
import mapEndpoints from './mapEndpoints';
|
||||
|
||||
|
|
@ -42,7 +47,7 @@ const getDefinedEndpoint = (endpointsConfig: TEndpointsConfig) => {
|
|||
return endpoints.find((e) => Object.hasOwn(endpointsConfig ?? {}, e));
|
||||
};
|
||||
|
||||
const getDefaultEndpoint = ({ convoSetup, endpointsConfig }: TDefaultEndpoint) => {
|
||||
const getDefaultEndpoint = ({ convoSetup, endpointsConfig }: TDefaultEndpoint): EModelEndpoint => {
|
||||
return (
|
||||
getEndpointFromSetup(convoSetup, endpointsConfig) ||
|
||||
getEndpointFromLocalStorage(endpointsConfig) ||
|
||||
|
|
|
|||
|
|
@ -1,20 +1,37 @@
|
|||
import { defaultEndpoints } from 'librechat-data-provider';
|
||||
import type { TEndpointsConfig } from 'librechat-data-provider';
|
||||
import type { EModelEndpoint, TEndpointsConfig } from 'librechat-data-provider';
|
||||
|
||||
const getEndpointsFilter = (config: TEndpointsConfig) => {
|
||||
const getEndpointsFilter = (endpointsConfig: TEndpointsConfig) => {
|
||||
const filter: Record<string, boolean> = {};
|
||||
for (const key of Object.keys(config)) {
|
||||
filter[key] = !!config[key];
|
||||
for (const key of Object.keys(endpointsConfig)) {
|
||||
filter[key] = !!endpointsConfig[key];
|
||||
}
|
||||
return filter;
|
||||
};
|
||||
|
||||
const getAvailableEndpoints = (filter: Record<string, boolean>) => {
|
||||
const endpoints = defaultEndpoints;
|
||||
return endpoints.filter((endpoint) => filter[endpoint]);
|
||||
const getAvailableEndpoints = (
|
||||
filter: Record<string, boolean>,
|
||||
endpointsConfig: TEndpointsConfig,
|
||||
) => {
|
||||
const defaultSet = new Set(defaultEndpoints);
|
||||
const availableEndpoints: EModelEndpoint[] = [];
|
||||
|
||||
for (const endpoint in endpointsConfig) {
|
||||
// Check if endpoint is in the filter or its type is in defaultEndpoints
|
||||
if (
|
||||
filter[endpoint] ||
|
||||
(endpointsConfig[endpoint]?.type && defaultSet.has(endpointsConfig[endpoint].type))
|
||||
) {
|
||||
availableEndpoints.push(endpoint as EModelEndpoint);
|
||||
}
|
||||
}
|
||||
|
||||
return availableEndpoints;
|
||||
};
|
||||
|
||||
export default function mapEndpoints(config: TEndpointsConfig) {
|
||||
const filter = getEndpointsFilter(config);
|
||||
return getAvailableEndpoints(filter).sort((a, b) => config[a].order - config[b].order);
|
||||
export default function mapEndpoints(endpointsConfig: TEndpointsConfig) {
|
||||
const filter = getEndpointsFilter(endpointsConfig);
|
||||
return getAvailableEndpoints(filter, endpointsConfig).sort(
|
||||
(a, b) => (endpointsConfig[a]?.order ?? 0) - (endpointsConfig[b]?.order ?? 0),
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,7 +26,10 @@ export const getPresetTitle = (preset: TPreset) => {
|
|||
let modelInfo = model || '';
|
||||
let label = '';
|
||||
|
||||
if (endpoint && [EModelEndpoint.azureOpenAI, EModelEndpoint.openAI].includes(endpoint)) {
|
||||
if (
|
||||
endpoint &&
|
||||
[EModelEndpoint.azureOpenAI, EModelEndpoint.openAI, EModelEndpoint.custom].includes(endpoint)
|
||||
) {
|
||||
label = chatGptLabel || '';
|
||||
} else if (endpoint && [EModelEndpoint.google, EModelEndpoint.anthropic].includes(endpoint)) {
|
||||
label = modelLabel || '';
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue