feat: update env example.

feat: support OPENAI_REVERSE_PROXY
feat: support set availModels in env file

fix: chatgpt Browser send logic refactor.

fix: title wrong usage of responseMessage
BREAKING: some env paramaters has been changed!
This commit is contained in:
Wentao Lyu 2023-04-05 21:21:02 +08:00
parent a5202f84cc
commit 22b9524ad3
19 changed files with 259 additions and 197 deletions

View file

@ -20,13 +20,18 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
const setPresets = useSetRecoilState(store.presets);
const availableEndpoints = useRecoilValue(store.availableEndpoints);
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const setOption = param => newValue => {
let update = {};
update[param] = newValue;
setPreset(prevState =>
cleanupPreset({
...prevState,
...update
preset: {
...prevState,
...update
},
endpointsFilter
})
);
};
@ -38,7 +43,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
axios({
method: 'post',
url: '/api/presets',
data: cleanupPreset(preset),
data: cleanupPreset({ preset, endpointsFilter }),
withCredentials: true
}).then(res => {
setPresets(res?.data);
@ -47,7 +52,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
const exportPreset = () => {
exportFromJSON({
data: cleanupPreset(preset),
data: cleanupPreset({ preset, endpointsFilter }),
fileName: `${preset?.title}.json`,
exportType: exportFromJSON.types.json
});

View file

@ -1,4 +1,5 @@
import React, { useEffect, useState } from 'react';
import { useRecoilValue } from 'recoil';
import exportFromJSON from 'export-from-json';
import DialogTemplate from '../ui/DialogTemplate.jsx';
import { Dialog, DialogButton } from '../ui/Dialog.tsx';
@ -7,12 +8,15 @@ import cleanupPreset from '~/utils/cleanupPreset';
import Settings from './Settings';
import store from '~/store';
// A preset dialog to show readonly preset values.
const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) => {
// const [title, setTitle] = useState('My Preset');
const [preset, setPreset] = useState(_preset);
const [saveAsDialogShow, setSaveAsDialogShow] = useState(false);
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const setOption = param => newValue => {
let update = {};
@ -29,7 +33,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) =
const exportPreset = () => {
exportFromJSON({
data: cleanupPreset(preset),
data: cleanupPreset({ preset, endpointsFilter }),
fileName: `${preset?.title}.json`,
exportType: exportFromJSON.types.json
});

View file

@ -1,5 +1,5 @@
import React, { useEffect, useState } from 'react';
import { useSetRecoilState } from 'recoil';
import { useSetRecoilState, useRecoilValue } from 'recoil';
import axios from 'axios';
import DialogTemplate from '../ui/DialogTemplate';
import { Dialog } from '../ui/Dialog.tsx';
@ -13,14 +13,18 @@ import store from '~/store';
const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => {
const [title, setTitle] = useState(preset?.title || 'My Preset');
const setPresets = useSetRecoilState(store.presets);
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const defaultTextProps =
'rounded-md border border-gray-300 bg-transparent text-sm shadow-[0_0_10px_rgba(0,0,0,0.10)] outline-none placeholder:text-gray-400 focus:outline-none focus:ring-gray-400 focus:ring-opacity-20 focus:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 dark:border-gray-400 dark:bg-gray-700 dark:text-gray-50 dark:shadow-[0_0_15px_rgba(0,0,0,0.10)] dark:focus:border-gray-400 dark:focus:outline-none dark:focus:ring-0 dark:focus:ring-gray-400 dark:focus:ring-offset-0';
const submitPreset = () => {
const _preset = cleanupPreset({
...preset,
title
preset: {
...preset,
title
},
endpointsFilter
});
axios({

View file

@ -21,6 +21,12 @@ function ChatGPTOptions() {
const models = endpointsConfig?.['chatGPTBrowser']?.['availableModels'] || [];
// const modelMap = new Map([
// ['Default (GPT-3.5)', 'text-davinci-002-render-sha'],
// ['Legacy (GPT-3.5)', 'text-davinci-002-render-paid'],
// ['GPT-4', 'gpt-4']
// ]);
const setOption = param => newValue => {
let update = {};
update[param] = newValue;
@ -43,7 +49,7 @@ function ChatGPTOptions() {
showLabel={false}
className={cn(
cardStyle,
'min-w-48 z-50 flex h-[40px] w-48 flex-none items-center justify-center px-4 ring-0 hover:cursor-pointer hover:bg-slate-50 focus:ring-0 focus:ring-offset-0 data-[state=open]:bg-slate-50 dark:bg-gray-700 dark:hover:bg-gray-600 dark:data-[state=open]:bg-gray-600'
'z-50 flex h-[40px] w-[260px] min-w-[260px] flex-none items-center justify-center px-4 ring-0 hover:cursor-pointer hover:bg-slate-50 focus:ring-0 focus:ring-offset-0 data-[state=open]:bg-slate-50 dark:bg-gray-700 dark:hover:bg-gray-600 dark:data-[state=open]:bg-gray-600'
)}
/>
</div>

View file

@ -1,9 +1,9 @@
import React from 'react';
import { useSetRecoilState } from 'recoil';
import { FileUp } from 'lucide-react';
import store from '~/store';
import axios from 'axios';
import cleanupPreset from '~/utils/cleanupPreset.js';
import { useRecoilValue } from 'recoil';
import store from '~/store';
// async function fetchPresets(callback) {
// try {
@ -21,6 +21,7 @@ import cleanupPreset from '~/utils/cleanupPreset.js';
const FileUpload = ({ onFileSelected }) => {
// const setPresets = useSetRecoilState(store.presets);
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const handleFileChange = event => {
const file = event.target.files[0];
@ -29,7 +30,7 @@ const FileUpload = ({ onFileSelected }) => {
const reader = new FileReader();
reader.onload = e => {
const jsonData = JSON.parse(e.target.result);
onFileSelected({ ...cleanupPreset(jsonData), presetId: null });
onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsFilter }), presetId: null });
};
reader.readAsText(file);
};

View file

@ -1,4 +1,4 @@
const cleanupPreset = _preset => {
const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
const { endpoint } = _preset;
let preset = {};
@ -6,7 +6,7 @@ const cleanupPreset = _preset => {
preset = {
endpoint,
presetId: _preset?.presetId ?? null,
model: _preset?.model ?? 'gpt-3.5-turbo',
model: _preset?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
chatGptLabel: _preset?.chatGptLabel ?? null,
promptPrefix: _preset?.promptPrefix ?? null,
temperature: _preset?.temperature ?? 1,
@ -25,11 +25,12 @@ const cleanupPreset = _preset => {
toneStyle: _preset?.toneStyle ?? 'fast',
title: _preset?.title ?? 'New Preset'
};
} else if (endpoint === 'chatGPTBrowser') {
} else if (endpoint === 'chatGPT') {
preset = {
endpoint,
presetId: _preset?.presetId ?? null,
model: _preset?.model ?? 'Default (GPT-3.5)',
model:
_preset?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'text-davinci-002-render-sha',
title: _preset?.title ?? 'New Preset'
};
} else if (endpoint === null) {

View file

@ -1,9 +1,15 @@
const buildDefaultConversation = ({ conversation, endpoint, lastConversationSetup = {} }) => {
const buildDefaultConversation = ({
conversation,
endpoint,
endpointsFilter = {},
lastConversationSetup = {}
}) => {
if (endpoint === 'azureOpenAI' || endpoint === 'openAI') {
conversation = {
...conversation,
endpoint,
model: lastConversationSetup?.model ?? 'gpt-3.5-turbo',
model:
lastConversationSetup?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
chatGptLabel: lastConversationSetup?.chatGptLabel ?? null,
promptPrefix: lastConversationSetup?.promptPrefix ?? null,
temperature: lastConversationSetup?.temperature ?? 1,
@ -28,7 +34,10 @@ const buildDefaultConversation = ({ conversation, endpoint, lastConversationSetu
conversation = {
...conversation,
endpoint,
model: lastConversationSetup?.model ?? 'Default (GPT-3.5)'
model:
lastConversationSetup?.model ??
endpointsFilter[endpoint]?.availableModels?.[0] ??
'text-davinci-002-render-sha'
};
} else if (endpoint === null) {
conversation = {
@ -56,7 +65,8 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
conversation = buildDefaultConversation({
conversation,
endpoint,
lastConversationSetup: preset
lastConversationSetup: preset,
endpointsFilter
});
return conversation;
} else {
@ -72,7 +82,8 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
// conversation = buildDefaultConversation({
// conversation,
// endpoint,
// lastConversationSetup: prevConversation
// lastConversationSetup: prevConversation,
// endpointsFilter
// });
// return conversation;
// }
@ -84,7 +95,7 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
const { endpoint = null } = lastConversationSetup;
if (endpointsFilter?.[endpoint]) {
conversation = buildDefaultConversation({ conversation, endpoint });
conversation = buildDefaultConversation({ conversation, endpoint, endpointsFilter });
return conversation;
}
} catch (error) {}
@ -93,10 +104,10 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
const endpoint = ['openAI', 'azureOpenAI', 'bingAI', 'chatGPTBrowser'].find(e => endpointsFilter?.[e]);
if (endpoint) {
conversation = buildDefaultConversation({ conversation, endpoint });
conversation = buildDefaultConversation({ conversation, endpoint, endpointsFilter });
return conversation;
} else {
conversation = buildDefaultConversation({ conversation, endpoint: null });
conversation = buildDefaultConversation({ conversation, endpoint: null, endpointsFilter });
return conversation;
}
};

View file

@ -7,6 +7,7 @@ const useMessageHandler = () => {
const currentConversation = useRecoilValue(store.conversation) || {};
const setSubmission = useSetRecoilState(store.submission);
const isSubmitting = useRecoilValue(store.isSubmitting);
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const latestMessage = useRecoilValue(store.latestMessage);
@ -27,7 +28,8 @@ const useMessageHandler = () => {
if (endpoint === 'azureOpenAI' || endpoint === 'openAI') {
endpointOption = {
endpoint,
model: currentConversation?.model ?? 'gpt-3.5-turbo',
model:
currentConversation?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
chatGptLabel: currentConversation?.chatGptLabel ?? null,
promptPrefix: currentConversation?.promptPrefix ?? null,
temperature: currentConversation?.temperature ?? 1,
@ -52,7 +54,10 @@ const useMessageHandler = () => {
} else if (endpoint === 'chatGPTBrowser') {
endpointOption = {
endpoint,
model: currentConversation?.model ?? 'Default (GPT-3.5)'
model:
currentConversation?.model ??
endpointsFilter[endpoint]?.availableModels?.[0] ??
'text-davinci-002-render-sha'
};
responseSender = 'ChatGPT';
} else if (endpoint === null) {