feat(chatgpt-browser): add support for multiple GPT models

This commit adds support for multiple GPT models in the chatGPT-browser
client. The available models are now stored in a Map object, which maps
the model label to its corresponding model.

The commit adds a new component, ChatGPTOptions, to the client
UI to allow the user to select the GPT model to use in the chat. The
component is only displayed when the chatGPT-browser endpoint is
selected.
This commit is contained in:
Daniel Avila 2023-04-02 14:34:12 -04:00
parent eef2303c8e
commit aa4fd57459
9 changed files with 69 additions and 13 deletions

View file

@ -1,11 +1,11 @@
require('dotenv').config(); require('dotenv').config();
const { KeyvFile } = require('keyv-file'); const { KeyvFile } = require('keyv-file');
// const set = new Set([
// 'gpt-4', const modelMap = new Map([
// 'text-davinci-002-render', ['Default (GPT-3.5)', 'text-davinci-002-render-sha'],
// 'text-davinci-002-render-paid', ['Legacy (GPT-3.5)', 'text-davinci-002-render-paid'],
// 'text-davinci-002-render-sha' ['GPT-4', 'gpt-4']
// ]); ]);
const browserClient = async ({ const browserClient = async ({
text, text,
@ -25,7 +25,7 @@ const browserClient = async ({
reverseProxyUrl: 'https://bypass.duti.tech/api/conversation', reverseProxyUrl: 'https://bypass.duti.tech/api/conversation',
// Access token from https://chat.openai.com/api/auth/session // Access token from https://chat.openai.com/api/auth/session
accessToken: process.env.CHATGPT_TOKEN, accessToken: process.env.CHATGPT_TOKEN,
model, model: modelMap.get(model),
// debug: true // debug: true
proxy: process.env.PROXY || null proxy: process.env.PROXY || null
}; };
@ -37,7 +37,7 @@ const browserClient = async ({
options = { ...options, parentMessageId, conversationId }; options = { ...options, parentMessageId, conversationId };
} }
// console.log('gptBrowser options', options, clientOptions); console.log('gptBrowser clientOptions', clientOptions);
if (parentMessageId === '00000000-0000-0000-0000-000000000000') { if (parentMessageId === '00000000-0000-0000-0000-000000000000') {
delete options.conversationId; delete options.conversationId;

View file

@ -67,7 +67,9 @@ const projectPath = path.join(__dirname, '..', '..', 'client');
? { availableModels: ['gpt-4', 'text-davinci-003', 'gpt-3.5-turbo', 'gpt-3.5-turbo-0301'] } ? { availableModels: ['gpt-4', 'text-davinci-003', 'gpt-3.5-turbo', 'gpt-3.5-turbo-0301'] }
: false; : false;
const bingAI = !!process.env.BING_TOKEN; const bingAI = !!process.env.BING_TOKEN;
const chatGPTBrowser = !!process.env.CHATGPT_TOKEN; const chatGPTBrowser = process.env.OPENAI_KEY
? { availableModels: ['Default (GPT-3.5)', 'Legacy (GPT-3.5)', 'GPT-4'] }
: false;
res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser })); res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser }));
}); });
@ -86,7 +88,7 @@ const projectPath = path.join(__dirname, '..', '..', 'client');
})(); })();
let messageCount = 0; let messageCount = 0;
process.on('uncaughtException', err => { process.on('uncaughtException', (err) => {
if (!err.message.includes('fetch failed')) { if (!err.message.includes('fetch failed')) {
console.error('There was an uncaught error:', err.message); console.error('There was an uncaught error:', err.message);
} }

View file

@ -50,6 +50,7 @@ router.post('/', async (req, res) => {
}); });
} }
// eslint-disable-next-line no-use-before-define
return await ask({ return await ask({
userMessage, userMessage,
endpointOption, endpointOption,

View file

@ -56,6 +56,7 @@ router.post('/', async (req, res) => {
}); });
} }
// eslint-disable-next-line no-use-before-define
return await ask({ return await ask({
userMessage, userMessage,
endpointOption, endpointOption,

View file

@ -0,0 +1,50 @@
import React, { useEffect } from 'react';
import { useRecoilState } from 'recoil';
import ModelDropDown from '../../ui/ModelDropDown.jsx';
import { cn } from '~/utils/';
import store from '~/store';
function ChatGPTOptions() {
const [conversation, setConversation] = useRecoilState(store.conversation) || {};
const { endpoint, conversationId } = conversation;
const { model } = conversation;
console.log('ChatGPTOptions', endpoint, model);
useEffect(() => {
if (endpoint !== 'chatGPTBrowser') return;
}, [conversation]);
if (endpoint !== 'chatGPTBrowser') return null;
if (conversationId !== 'new') return null;
const setOption = param => newValue => {
let update = {};
update[param] = newValue;
setConversation(prevState => ({
...prevState,
...update
}));
};
const cardStyle =
'transition-colors shadow-md rounded-md min-w-[75px] font-normal bg-white border-black/10 hover:border-black/10 focus:border-black/10 dark:border-black/10 dark:hover:border-black/10 dark:focus:border-black/10 border dark:bg-gray-700 text-black dark:text-white';
return (
<div className="openAIOptions-simple-container flex w-full items-center justify-center gap-2 show">
<ModelDropDown
model={model}
setModel={setOption('model')}
endpoint="chatGPTBrowser"
showAbove={true}
showLabel={false}
className={cn(
cardStyle,
'min-w-48 z-50 flex h-[40px] w-48 items-center justify-center px-4 ring-0 hover:cursor-pointer hover:bg-slate-50 focus:ring-0 focus:ring-offset-0 data-[state=open]:bg-slate-50 dark:bg-gray-700 dark:hover:bg-gray-600 dark:data-[state=open]:bg-gray-600'
)}
/>
</div>
);
}
export default ChatGPTOptions;

View file

@ -3,6 +3,7 @@ import { useRecoilValue, useRecoilState } from 'recoil';
import SubmitButton from './SubmitButton'; import SubmitButton from './SubmitButton';
import AdjustToneButton from './AdjustToneButton'; import AdjustToneButton from './AdjustToneButton';
import OpenAIOptions from './OpenAIOptions'; import OpenAIOptions from './OpenAIOptions';
import ChatGPTOptions from './ChatGPTOptions';
import BingAIOptions from './BingAIOptions'; import BingAIOptions from './BingAIOptions';
// import BingStyles from './BingStyles'; // import BingStyles from './BingStyles';
import EndpointMenu from './Endpoints/NewConversationMenu'; import EndpointMenu from './Endpoints/NewConversationMenu';
@ -132,6 +133,7 @@ export default function TextChat({ isSearchView = false }) {
<div className="relative py-2 md:mb-[-16px] md:py-4 lg:mb-[-32px]"> <div className="relative py-2 md:mb-[-16px] md:py-4 lg:mb-[-32px]">
<span className="ml-1 flex flex-col items-center justify-center gap-0 md:order-none md:m-auto md:w-full md:gap-2"> <span className="ml-1 flex flex-col items-center justify-center gap-0 md:order-none md:m-auto md:w-full md:gap-2">
<OpenAIOptions /> <OpenAIOptions />
<ChatGPTOptions />
<BingAIOptions /> <BingAIOptions />
</span> </span>
</div> </div>

View file

@ -30,7 +30,7 @@ const buildPresetByConversation = ({ title, conversation, ...others }) => {
} else if (endpoint === 'chatGPTBrowser') { } else if (endpoint === 'chatGPTBrowser') {
preset = { preset = {
endpoint, endpoint,
model: conversation?.model || 'text-davinci-002-render-sha', model: conversation?.model || 'Default (GPT-3.5)',
title, title,
...others ...others
}; };

View file

@ -26,7 +26,7 @@ const buildDefaultConversation = ({ conversation, endpoint, lastConversationSetu
conversation = { conversation = {
...conversation, ...conversation,
endpoint, endpoint,
model: lastConversationSetup?.model || 'text-davinci-002-render-sha' model: lastConversationSetup?.model || 'Default (GPT-3.5)'
}; };
} else if (endpoint === null) { } else if (endpoint === null) {
conversation = { conversation = {

View file

@ -50,7 +50,7 @@ const useMessageHandler = () => {
} else if (endpoint === 'chatGPTBrowser') { } else if (endpoint === 'chatGPTBrowser') {
endpointOption = { endpointOption = {
endpoint, endpoint,
model: currentConversation?.model || 'text-davinci-002-render-sha' model: currentConversation?.model || 'Default (GPT-3.5)'
}; };
responseSender = 'ChatGPT'; responseSender = 'ChatGPT';
} else if (endpoint === null) { } else if (endpoint === null) {