mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-22 03:10:15 +01:00
feat: OpenRouter Support & Improve Model Fetching ⇆ (#936)
* chore(ChatGPTClient.js): add support for OpenRouter API chore(OpenAIClient.js): add support for OpenRouter API * chore: comment out token debugging * chore: add back streamResult assignment * chore: remove double condition/assignment from merging * refactor(routes/endpoints): -> controller/services logic * feat: add openrouter model fetching * chore: remove unused endpointsConfig in cleanupPreset function * refactor: separate models concern from endpointsConfig * refactor(data-provider): add TModels type and make TEndpointsConfig adaptible to new endpoint keys * refactor: complete models endpoint service in data-provider * refactor: onMutate for refreshToken and login, invalidate models query * feat: complete models endpoint logic for frontend * chore: remove requireJwtAuth from /api/endpoints and /api/models as not implemented yet * fix: endpoint will not be overwritten and instead use active value * feat: openrouter support for plugins * chore(EndpointOptionsDialog): remove unused recoil value * refactor(schemas/parseConvo): add handling of secondaryModels to use first of defined secondary models, which includes last selected one as first, or default to the convo's secondary model value * refactor: remove hooks from store and move to hooks refactor(switchToConversation): make switchToConversation use latest recoil state, which is necessary to get the most up-to-date models list, replace wrapper function refactor(getDefaultConversation): factor out logic into 3 pieces to reduce complexity. * fix: backend tests * feat: optimistic update by calling newConvo when models are fetched * feat: openrouter support for titling convos * feat: cache models fetch * chore: add missing dep to AuthContext useEffect * chore: fix useTimeout types * chore: delete old getDefaultConvo file * chore: remove newConvo logic from Root, remove console log from api models caching * chore: ensure bun is used for building in b:client script * fix: default endpoint will not default to null on a completely fresh login (no localStorage/cookies) * chore: add openrouter docs to free_ai_apis.md and .env.example * chore: remove openrouter console logs * feat: add debugging env variable for Plugins
This commit is contained in:
parent
ccb46164c0
commit
fd70e21732
58 changed files with 809 additions and 523 deletions
|
|
@ -1,22 +1,6 @@
|
|||
import { useCallback } from 'react';
|
||||
import {
|
||||
atom,
|
||||
selector,
|
||||
atomFamily,
|
||||
useSetRecoilState,
|
||||
useResetRecoilState,
|
||||
useRecoilCallback,
|
||||
} from 'recoil';
|
||||
import {
|
||||
TConversation,
|
||||
TMessagesAtom,
|
||||
TMessage,
|
||||
TSubmission,
|
||||
TPreset,
|
||||
} from 'librechat-data-provider';
|
||||
import { buildTree, getDefaultConversation } from '~/utils';
|
||||
import submission from './submission';
|
||||
import endpoints from './endpoints';
|
||||
import { atom, selector, atomFamily } from 'recoil';
|
||||
import { TConversation, TMessagesAtom, TMessage } from 'librechat-data-provider';
|
||||
import { buildTree } from '~/utils';
|
||||
|
||||
const conversation = atom<TConversation | null>({
|
||||
key: 'conversation',
|
||||
|
|
@ -48,94 +32,10 @@ const messagesSiblingIdxFamily = atomFamily({
|
|||
default: 0,
|
||||
});
|
||||
|
||||
const useConversation = () => {
|
||||
const setConversation = useSetRecoilState(conversation);
|
||||
const setMessages = useSetRecoilState<TMessagesAtom>(messages);
|
||||
const setSubmission = useSetRecoilState<TSubmission | null>(submission.submission);
|
||||
const resetLatestMessage = useResetRecoilState(latestMessage);
|
||||
|
||||
const _switchToConversation = (
|
||||
conversation: TConversation,
|
||||
messages: TMessagesAtom = null,
|
||||
preset: object | null = null,
|
||||
{ endpointsConfig = {} },
|
||||
) => {
|
||||
const { endpoint = null } = conversation;
|
||||
|
||||
if (endpoint === null) {
|
||||
// get the default model
|
||||
conversation = getDefaultConversation({
|
||||
conversation,
|
||||
endpointsConfig,
|
||||
preset,
|
||||
});
|
||||
}
|
||||
|
||||
setConversation(conversation);
|
||||
setMessages(messages);
|
||||
setSubmission({} as TSubmission);
|
||||
resetLatestMessage();
|
||||
};
|
||||
|
||||
const switchToConversation = useRecoilCallback(
|
||||
({ snapshot }) =>
|
||||
async (
|
||||
_conversation: TConversation,
|
||||
messages: TMessagesAtom = null,
|
||||
preset: object | null = null,
|
||||
) => {
|
||||
const endpointsConfig = await snapshot.getPromise(endpoints.endpointsConfig);
|
||||
_switchToConversation(_conversation, messages, preset, {
|
||||
endpointsConfig,
|
||||
});
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const newConversation = useCallback(
|
||||
(template = {}, preset?: TPreset) => {
|
||||
switchToConversation(
|
||||
{
|
||||
conversationId: 'new',
|
||||
title: 'New Chat',
|
||||
...template,
|
||||
endpoint: null,
|
||||
createdAt: '',
|
||||
updatedAt: '',
|
||||
},
|
||||
[],
|
||||
preset,
|
||||
);
|
||||
},
|
||||
[switchToConversation],
|
||||
);
|
||||
|
||||
const searchPlaceholderConversation = () => {
|
||||
switchToConversation(
|
||||
{
|
||||
conversationId: 'search',
|
||||
title: 'Search',
|
||||
endpoint: null,
|
||||
createdAt: '',
|
||||
updatedAt: '',
|
||||
},
|
||||
[],
|
||||
);
|
||||
};
|
||||
|
||||
return {
|
||||
_switchToConversation,
|
||||
newConversation,
|
||||
switchToConversation,
|
||||
searchPlaceholderConversation,
|
||||
};
|
||||
};
|
||||
|
||||
export default {
|
||||
messages,
|
||||
conversation,
|
||||
messagesTree,
|
||||
latestMessage,
|
||||
messagesSiblingIdxFamily,
|
||||
useConversation,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,19 +1,8 @@
|
|||
import { atom, useSetRecoilState } from 'recoil';
|
||||
import { useCallback } from 'react';
|
||||
import { atom } from 'recoil';
|
||||
|
||||
const refreshConversationsHint = atom({
|
||||
const refreshConversationsHint = atom<number>({
|
||||
key: 'refreshConversationsHint',
|
||||
default: 1,
|
||||
});
|
||||
|
||||
const useConversations = () => {
|
||||
const setRefreshConversationsHint = useSetRecoilState(refreshConversationsHint);
|
||||
|
||||
const refreshConversations = useCallback(() => {
|
||||
setRefreshConversationsHint((prevState) => prevState + 1);
|
||||
}, [setRefreshConversationsHint]);
|
||||
|
||||
return { refreshConversations };
|
||||
};
|
||||
|
||||
export default { refreshConversationsHint, useConversations };
|
||||
export default { refreshConversationsHint };
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import conversation from './conversation';
|
||||
import conversations from './conversations';
|
||||
import endpoints from './endpoints';
|
||||
import models from './models';
|
||||
import user from './user';
|
||||
import text from './text';
|
||||
import submission from './submission';
|
||||
|
|
@ -13,6 +14,7 @@ export default {
|
|||
...conversation,
|
||||
...conversations,
|
||||
...endpoints,
|
||||
...models,
|
||||
...user,
|
||||
...text,
|
||||
...submission,
|
||||
|
|
|
|||
34
client/src/store/models.ts
Normal file
34
client/src/store/models.ts
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import { atom } from 'recoil';
|
||||
import { TModelsConfig } from 'librechat-data-provider';
|
||||
const openAIModels = [
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-3.5-turbo-16k',
|
||||
'gpt-3.5-turbo-0301',
|
||||
'text-davinci-003',
|
||||
'gpt-4',
|
||||
'gpt-4-0314',
|
||||
'gpt-4-0613',
|
||||
];
|
||||
|
||||
const modelsConfig = atom<TModelsConfig>({
|
||||
key: 'models',
|
||||
default: {
|
||||
openAI: openAIModels,
|
||||
gptPlugins: openAIModels,
|
||||
azureOpenAI: openAIModels,
|
||||
bingAI: ['BingAI', 'Sydney'],
|
||||
chatGPTBrowser: ['text-davinci-002-render-sha'],
|
||||
google: ['chat-bison', 'text-bison', 'codechat-bison'],
|
||||
anthropic: [
|
||||
'claude-1',
|
||||
'claude-1-100k',
|
||||
'claude-instant-1',
|
||||
'claude-instant-1-100k',
|
||||
'claude-2',
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
export default {
|
||||
modelsConfig,
|
||||
};
|
||||
Loading…
Add table
Add a link
Reference in a new issue