feat: OpenRouter Support & Improve Model Fetching ⇆ (#936)

* chore(ChatGPTClient.js): add support for OpenRouter API
chore(OpenAIClient.js): add support for OpenRouter API

* chore: comment out token debugging

* chore: add back streamResult assignment

* chore: remove double condition/assignment from merging

* refactor(routes/endpoints): -> controller/services logic

* feat: add openrouter model fetching

* chore: remove unused endpointsConfig in cleanupPreset function

* refactor: separate models concern from endpointsConfig

* refactor(data-provider): add TModels type and make TEndpointsConfig adaptible to new endpoint keys

* refactor: complete models endpoint service in data-provider

* refactor: onMutate for refreshToken and login, invalidate models query

* feat: complete models endpoint logic for frontend

* chore: remove requireJwtAuth from /api/endpoints and /api/models as not implemented yet

* fix: endpoint will not be overwritten and instead use active value

* feat: openrouter support for plugins

* chore(EndpointOptionsDialog): remove unused recoil value

* refactor(schemas/parseConvo): add handling of secondaryModels to use first of defined secondary models, which includes last selected one as first, or default to the convo's secondary model value

* refactor: remove hooks from store and move to hooks
refactor(switchToConversation): make switchToConversation use latest recoil state, which is necessary to get the most up-to-date models list, replace wrapper function
refactor(getDefaultConversation): factor out logic into 3 pieces to reduce complexity.

* fix: backend tests

* feat: optimistic update by calling newConvo when models are fetched

* feat: openrouter support for titling convos

* feat: cache models fetch

* chore: add missing dep to AuthContext useEffect

* chore: fix useTimeout types

* chore: delete old getDefaultConvo file

* chore: remove newConvo logic from Root, remove console log from api models caching

* chore: ensure bun is used for building in b:client script

* fix: default endpoint will not default to null on a completely fresh login (no localStorage/cookies)

* chore: add openrouter docs to free_ai_apis.md and .env.example

* chore: remove openrouter console logs

* feat: add debugging env variable for Plugins
This commit is contained in:
Danny Avila 2023-09-18 12:55:51 -04:00 committed by GitHub
parent ccb46164c0
commit fd70e21732
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
58 changed files with 809 additions and 523 deletions

View file

@ -0,0 +1,64 @@
import { parseConvo } from 'librechat-data-provider';
import getLocalStorageItems from './getLocalStorageItems';
import type { TConversation, EModelEndpoint } from 'librechat-data-provider';
const buildDefaultConvo = ({
conversation,
endpoint,
models,
lastConversationSetup,
}: {
conversation: TConversation;
endpoint: EModelEndpoint;
models: string[];
lastConversationSetup: TConversation;
}) => {
const { lastSelectedModel, lastSelectedTools, lastBingSettings } = getLocalStorageItems();
const { jailbreak, toneStyle } = lastBingSettings;
if (!endpoint) {
return {
...conversation,
endpoint,
};
}
const availableModels = models;
const model = lastConversationSetup?.model ?? lastSelectedModel?.[endpoint];
const secondaryModel =
endpoint === 'gptPlugins'
? lastConversationSetup?.agentOptions?.model ?? lastSelectedModel?.secondaryModel
: null;
let possibleModels: string[], secondaryModels: string[];
if (availableModels.includes(model)) {
possibleModels = [model, ...availableModels];
} else {
possibleModels = [...availableModels];
}
if (secondaryModel && availableModels.includes(secondaryModel)) {
secondaryModels = [secondaryModel, ...availableModels];
} else {
secondaryModels = [...availableModels];
}
const convo = parseConvo(endpoint, lastConversationSetup, {
models: possibleModels,
secondaryModels,
});
const defaultConvo = {
...conversation,
...convo,
endpoint,
};
defaultConvo.tools = lastSelectedTools ?? defaultConvo.tools;
defaultConvo.jailbreak = jailbreak ?? defaultConvo.jailbreak;
defaultConvo.toneStyle = toneStyle ?? defaultConvo.toneStyle;
return defaultConvo;
};
export default buildDefaultConvo;

View file

@ -1,9 +1,8 @@
import { parseConvo } from 'librechat-data-provider';
import type { TEndpointsConfig, TPreset } from 'librechat-data-provider';
import type { TPreset } from 'librechat-data-provider';
type TCleanupPreset = {
preset: Partial<TPreset>;
endpointsConfig: TEndpointsConfig;
};
const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
@ -20,9 +19,9 @@ const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
const parsedPreset = parseConvo(endpoint, _preset);
return {
endpoint,
presetId: _preset?.presetId ?? null,
...parsedPreset,
endpoint,
title: _preset?.title ?? 'New Preset',
} as TPreset;
};

View file

@ -1,96 +0,0 @@
import { parseConvo } from 'librechat-data-provider';
import getLocalStorageItems from './getLocalStorageItems';
import type {
TConversation,
TEndpointsConfig,
EModelEndpoint,
TConfig,
} from 'librechat-data-provider';
const defaultEndpoints = [
'openAI',
'azureOpenAI',
'bingAI',
'chatGPTBrowser',
'gptPlugins',
'google',
'anthropic',
];
const buildDefaultConversation = ({
conversation,
endpoint,
endpointsConfig,
lastConversationSetup,
}: {
conversation: TConversation;
endpoint: EModelEndpoint;
endpointsConfig: TEndpointsConfig;
lastConversationSetup: TConversation;
}) => {
const { lastSelectedModel, lastSelectedTools, lastBingSettings } = getLocalStorageItems();
const { jailbreak, toneStyle } = lastBingSettings;
if (!endpoint) {
return {
...conversation,
endpoint,
};
}
const { availableModels = [] } = endpointsConfig[endpoint] as TConfig;
const possibleModels = [lastSelectedModel[endpoint], ...availableModels];
const convo = parseConvo(endpoint, lastConversationSetup, { model: possibleModels });
const defaultConvo = {
...conversation,
...convo,
endpoint,
};
defaultConvo.tools = lastSelectedTools ?? defaultConvo.tools;
defaultConvo.jailbreak = jailbreak ?? defaultConvo.jailbreak;
defaultConvo.toneStyle = toneStyle ?? defaultConvo.toneStyle;
return defaultConvo;
};
const getDefaultConversation = ({ conversation, endpointsConfig, preset }) => {
const getEndpointFromPreset = () => {
const { endpoint: targetEndpoint } = preset || {};
if (targetEndpoint && endpointsConfig?.[targetEndpoint]) {
return targetEndpoint;
} else if (targetEndpoint) {
console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsConfig}`);
}
return null;
};
const getEndpointFromLocalStorage = () => {
try {
const { lastConversationSetup } = getLocalStorageItems();
return (
lastConversationSetup.endpoint &&
(endpointsConfig[lastConversationSetup.endpoint] ? lastConversationSetup.endpoint : null)
);
} catch (error) {
console.error(error);
return null;
}
};
const getDefaultEndpoint = () => {
return defaultEndpoints.find((e) => endpointsConfig?.[e]) || null;
};
const endpoint = getEndpointFromPreset() || getEndpointFromLocalStorage() || getDefaultEndpoint();
return buildDefaultConversation({
conversation,
endpoint,
lastConversationSetup: preset,
endpointsConfig,
});
};
export default getDefaultConversation;

View file

@ -0,0 +1,54 @@
import type { TConversation, TPreset, TEndpointsConfig } from 'librechat-data-provider';
import getLocalStorageItems from './getLocalStorageItems';
type TConvoSetup = Partial<TPreset> | Partial<TConversation>;
type TDefaultEndpoint = { convoSetup: TConvoSetup; endpointsConfig: TEndpointsConfig };
const defaultEndpoints = [
'openAI',
'azureOpenAI',
'bingAI',
'chatGPTBrowser',
'gptPlugins',
'google',
'anthropic',
];
const getEndpointFromSetup = (convoSetup: TConvoSetup, endpointsConfig: TEndpointsConfig) => {
const { endpoint: targetEndpoint } = convoSetup || {};
if (targetEndpoint && endpointsConfig?.[targetEndpoint]) {
return targetEndpoint;
} else if (targetEndpoint) {
console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsConfig}`);
}
return null;
};
const getEndpointFromLocalStorage = (endpointsConfig: TEndpointsConfig) => {
try {
const { lastConversationSetup } = getLocalStorageItems();
return (
lastConversationSetup.endpoint &&
(endpointsConfig[lastConversationSetup.endpoint] ? lastConversationSetup.endpoint : null)
);
} catch (error) {
console.error(error);
return null;
}
};
const getDefinedEndpoint = (endpointsConfig: TEndpointsConfig) => {
return defaultEndpoints.find((e) => Object.hasOwn(endpointsConfig ?? {}, e)) ?? 'openAI';
};
const getDefaultEndpoint = ({ convoSetup, endpointsConfig }: TDefaultEndpoint) => {
return (
getEndpointFromSetup(convoSetup, endpointsConfig) ||
getEndpointFromLocalStorage(endpointsConfig) ||
getDefinedEndpoint(endpointsConfig)
);
};
export default getDefaultEndpoint;

View file

@ -7,8 +7,9 @@ export { default as getLoginError } from './getLoginError';
export { default as cleanupPreset } from './cleanupPreset';
export { default as validateIframe } from './validateIframe';
export { default as getMessageError } from './getMessageError';
export { default as buildDefaultConvo } from './buildDefaultConvo';
export { default as getDefaultEndpoint } from './getDefaultEndpoint';
export { default as getLocalStorageItems } from './getLocalStorageItems';
export { default as getDefaultConversation } from './getDefaultConversation';
export function cn(...inputs: string[]) {
return twMerge(clsx(inputs));