From aa4fd57459ad79279375a100511a83ea49c2ccc5 Mon Sep 17 00:00:00 2001 From: Daniel Avila Date: Sun, 2 Apr 2023 14:34:12 -0400 Subject: [PATCH] feat(chatgpt-browser): add support for multiple GPT models This commit adds support for multiple GPT models in the chatGPT-browser client. The available models are now stored in a Map object, which maps the model label to its corresponding model. The commit adds a new component, ChatGPTOptions, to the client UI to allow the user to select the GPT model to use in the chat. The component is only displayed when the chatGPT-browser endpoint is selected. --- api/app/clients/chatgpt-browser.js | 16 +++--- api/server/index.js | 6 ++- api/server/routes/askChatGPTBrowser.js | 1 + api/server/routes/askOpenAI.js | 1 + .../components/Input/ChatGPTOptions/index.jsx | 50 +++++++++++++++++++ client/src/components/Input/index.jsx | 2 + client/src/utils/buildPresetByConversation.js | 2 +- client/src/utils/getDefaultConversation.js | 2 +- client/src/utils/handleSubmit.js | 2 +- 9 files changed, 69 insertions(+), 13 deletions(-) create mode 100644 client/src/components/Input/ChatGPTOptions/index.jsx diff --git a/api/app/clients/chatgpt-browser.js b/api/app/clients/chatgpt-browser.js index a0c1067af1..0ba78b7c78 100644 --- a/api/app/clients/chatgpt-browser.js +++ b/api/app/clients/chatgpt-browser.js @@ -1,11 +1,11 @@ require('dotenv').config(); const { KeyvFile } = require('keyv-file'); -// const set = new Set([ -// 'gpt-4', -// 'text-davinci-002-render', -// 'text-davinci-002-render-paid', -// 'text-davinci-002-render-sha' -// ]); + +const modelMap = new Map([ + ['Default (GPT-3.5)', 'text-davinci-002-render-sha'], + ['Legacy (GPT-3.5)', 'text-davinci-002-render-paid'], + ['GPT-4', 'gpt-4'] +]); const browserClient = async ({ text, @@ -25,7 +25,7 @@ const browserClient = async ({ reverseProxyUrl: 'https://bypass.duti.tech/api/conversation', // Access token from https://chat.openai.com/api/auth/session accessToken: process.env.CHATGPT_TOKEN, - model, + model: modelMap.get(model), // debug: true proxy: process.env.PROXY || null }; @@ -37,7 +37,7 @@ const browserClient = async ({ options = { ...options, parentMessageId, conversationId }; } - // console.log('gptBrowser options', options, clientOptions); + console.log('gptBrowser clientOptions', clientOptions); if (parentMessageId === '00000000-0000-0000-0000-000000000000') { delete options.conversationId; diff --git a/api/server/index.js b/api/server/index.js index 4b59191e56..8516edb945 100644 --- a/api/server/index.js +++ b/api/server/index.js @@ -67,7 +67,9 @@ const projectPath = path.join(__dirname, '..', '..', 'client'); ? { availableModels: ['gpt-4', 'text-davinci-003', 'gpt-3.5-turbo', 'gpt-3.5-turbo-0301'] } : false; const bingAI = !!process.env.BING_TOKEN; - const chatGPTBrowser = !!process.env.CHATGPT_TOKEN; + const chatGPTBrowser = process.env.OPENAI_KEY + ? { availableModels: ['Default (GPT-3.5)', 'Legacy (GPT-3.5)', 'GPT-4'] } + : false; res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser })); }); @@ -86,7 +88,7 @@ const projectPath = path.join(__dirname, '..', '..', 'client'); })(); let messageCount = 0; -process.on('uncaughtException', err => { +process.on('uncaughtException', (err) => { if (!err.message.includes('fetch failed')) { console.error('There was an uncaught error:', err.message); } diff --git a/api/server/routes/askChatGPTBrowser.js b/api/server/routes/askChatGPTBrowser.js index 36ce5aad94..b7e6580ee6 100644 --- a/api/server/routes/askChatGPTBrowser.js +++ b/api/server/routes/askChatGPTBrowser.js @@ -50,6 +50,7 @@ router.post('/', async (req, res) => { }); } + // eslint-disable-next-line no-use-before-define return await ask({ userMessage, endpointOption, diff --git a/api/server/routes/askOpenAI.js b/api/server/routes/askOpenAI.js index 3b88c35dc2..eb7c2bfa07 100644 --- a/api/server/routes/askOpenAI.js +++ b/api/server/routes/askOpenAI.js @@ -56,6 +56,7 @@ router.post('/', async (req, res) => { }); } + // eslint-disable-next-line no-use-before-define return await ask({ userMessage, endpointOption, diff --git a/client/src/components/Input/ChatGPTOptions/index.jsx b/client/src/components/Input/ChatGPTOptions/index.jsx new file mode 100644 index 0000000000..a53e68b054 --- /dev/null +++ b/client/src/components/Input/ChatGPTOptions/index.jsx @@ -0,0 +1,50 @@ +import React, { useEffect } from 'react'; +import { useRecoilState } from 'recoil'; +import ModelDropDown from '../../ui/ModelDropDown.jsx'; +import { cn } from '~/utils/'; + +import store from '~/store'; + +function ChatGPTOptions() { + const [conversation, setConversation] = useRecoilState(store.conversation) || {}; + const { endpoint, conversationId } = conversation; + const { model } = conversation; + + console.log('ChatGPTOptions', endpoint, model); + useEffect(() => { + if (endpoint !== 'chatGPTBrowser') return; + }, [conversation]); + + if (endpoint !== 'chatGPTBrowser') return null; + if (conversationId !== 'new') return null; + + const setOption = param => newValue => { + let update = {}; + update[param] = newValue; + setConversation(prevState => ({ + ...prevState, + ...update + })); + }; + + const cardStyle = + 'transition-colors shadow-md rounded-md min-w-[75px] font-normal bg-white border-black/10 hover:border-black/10 focus:border-black/10 dark:border-black/10 dark:hover:border-black/10 dark:focus:border-black/10 border dark:bg-gray-700 text-black dark:text-white'; + + return ( +
+ +
+ ); +} + +export default ChatGPTOptions; diff --git a/client/src/components/Input/index.jsx b/client/src/components/Input/index.jsx index 0f3764a2fc..f1788d94c1 100644 --- a/client/src/components/Input/index.jsx +++ b/client/src/components/Input/index.jsx @@ -3,6 +3,7 @@ import { useRecoilValue, useRecoilState } from 'recoil'; import SubmitButton from './SubmitButton'; import AdjustToneButton from './AdjustToneButton'; import OpenAIOptions from './OpenAIOptions'; +import ChatGPTOptions from './ChatGPTOptions'; import BingAIOptions from './BingAIOptions'; // import BingStyles from './BingStyles'; import EndpointMenu from './Endpoints/NewConversationMenu'; @@ -132,6 +133,7 @@ export default function TextChat({ isSearchView = false }) {
+
diff --git a/client/src/utils/buildPresetByConversation.js b/client/src/utils/buildPresetByConversation.js index 73799f08c1..36f991068b 100644 --- a/client/src/utils/buildPresetByConversation.js +++ b/client/src/utils/buildPresetByConversation.js @@ -30,7 +30,7 @@ const buildPresetByConversation = ({ title, conversation, ...others }) => { } else if (endpoint === 'chatGPTBrowser') { preset = { endpoint, - model: conversation?.model || 'text-davinci-002-render-sha', + model: conversation?.model || 'Default (GPT-3.5)', title, ...others }; diff --git a/client/src/utils/getDefaultConversation.js b/client/src/utils/getDefaultConversation.js index 397f122ab4..c8141e02d7 100644 --- a/client/src/utils/getDefaultConversation.js +++ b/client/src/utils/getDefaultConversation.js @@ -26,7 +26,7 @@ const buildDefaultConversation = ({ conversation, endpoint, lastConversationSetu conversation = { ...conversation, endpoint, - model: lastConversationSetup?.model || 'text-davinci-002-render-sha' + model: lastConversationSetup?.model || 'Default (GPT-3.5)' }; } else if (endpoint === null) { conversation = { diff --git a/client/src/utils/handleSubmit.js b/client/src/utils/handleSubmit.js index 9a912c12a4..01a9b72495 100644 --- a/client/src/utils/handleSubmit.js +++ b/client/src/utils/handleSubmit.js @@ -50,7 +50,7 @@ const useMessageHandler = () => { } else if (endpoint === 'chatGPTBrowser') { endpointOption = { endpoint, - model: currentConversation?.model || 'text-davinci-002-render-sha' + model: currentConversation?.model || 'Default (GPT-3.5)' }; responseSender = 'ChatGPT'; } else if (endpoint === null) {