feat: support user-provided token to bingAI and chatgptBrowser

This commit is contained in:
Wentao Lyu 2023-04-10 00:41:34 +08:00
parent a953fc9f2b
commit bbf2f8a6ca
22 changed files with 309 additions and 86 deletions

View file

@ -25,8 +25,9 @@ MONGO_URI="mongodb://127.0.0.1:27017/chatgpt-clone"
OPENAI_KEY=
# Identify the available models, sperate by comma, and not space in it
# The first will be default
# Leave it blank to use internal settings.
# OPENAI_MODELS=gpt-4,text-davinci-003,gpt-3.5-turbo,gpt-3.5-turbo-0301
OPENAI_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-0301,text-davinci-003,gpt-4
# Reverse proxy setting for OpenAI
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
@ -39,6 +40,8 @@ OPENAI_KEY=
# BingAI Tokens: the "_U" cookies value from bing.com
# Leave it and BINGAI_USER_TOKEN blank to disable this endpoint.
# Set to "user_providered" to allow user provided token.
# BINGAI_TOKEN="user_providered"
BINGAI_TOKEN=
# BingAI Host:
@ -46,12 +49,6 @@ BINGAI_TOKEN=
# Leave it blank to use default server.
# BINGAI_HOST="https://cn.bing.com"
# BingAI User defined Token
# Allow user to set their own token by client
# Uncomment this to enable this feature.
# (Not implemented yet.)
# BINGAI_USER_TOKEN=1
#############################
# Endpoint chatGPT:
@ -61,11 +58,14 @@ BINGAI_TOKEN=
# Access token from https://chat.openai.com/api/auth/session
# Exposes your access token to CHATGPT_REVERSE_PROXY
# Leave it blank to disable this endpoint
# Set to "user_provide" to allow user provided token.
# CHATGPT_TOKEN="user_provide"
CHATGPT_TOKEN=
# Identify the available models, sperate by comma, and not space in it
# The first will be default
# Leave it blank to use internal settings.
# CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4
CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4
# Reverse proxy setting for OpenAI
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy

View file

@ -13,6 +13,7 @@ const askBing = async ({
clientId,
invocationId,
toneStyle,
token,
onProgress
}) => {
const { BingAIClient } = await import('@waylaidwanderer/chatgpt-api');
@ -22,7 +23,7 @@ const askBing = async ({
const bingAIClient = new BingAIClient({
// "_U" cookie from bing.com
userToken: process.env.BINGAI_TOKEN,
userToken: process.env.BINGAI_TOKEN == 'user_provide' ? token : process.env.BINGAI_TOKEN ?? null,
// If the above doesn't work, provide all your cookies as a string instead
// cookies: '',
debug: false,

View file

@ -6,6 +6,7 @@ const browserClient = async ({
parentMessageId,
conversationId,
model,
token,
onProgress,
abortController
}) => {
@ -18,7 +19,7 @@ const browserClient = async ({
// Warning: This will expose your access token to a third party. Consider the risks before using this.
reverseProxyUrl: process.env.CHATGPT_REVERSE_PROXY || 'https://bypass.churchless.tech/api/conversation',
// Access token from https://chat.openai.com/api/auth/session
accessToken: process.env.CHATGPT_TOKEN,
accessToken: process.env.CHATGPT_TOKEN == 'user_provide' ? token : process.env.CHATGPT_TOKEN ?? null,
model: model,
// debug: true
proxy: process.env.PROXY || null

View file

@ -39,7 +39,8 @@ router.post('/', async (req, res) => {
jailbreakConversationId: req.body?.jailbreakConversationId ?? null,
systemMessage: req.body?.systemMessage ?? null,
context: req.body?.context ?? null,
toneStyle: req.body?.toneStyle ?? 'fast'
toneStyle: req.body?.toneStyle ?? 'fast',
token: req.body?.token ?? null
};
else
endpointOption = {
@ -49,7 +50,8 @@ router.post('/', async (req, res) => {
conversationSignature: req.body?.conversationSignature ?? null,
clientId: req.body?.clientId ?? null,
invocationId: req.body?.invocationId ?? null,
toneStyle: req.body?.toneStyle ?? 'fast'
toneStyle: req.body?.toneStyle ?? 'fast',
token: req.body?.token ?? null
};
console.log('ask log', {

View file

@ -33,7 +33,8 @@ router.post('/', async (req, res) => {
// build endpoint option
const endpointOption = {
model: req.body?.model ?? 'text-davinci-002-render-sha'
model: req.body?.model ?? 'text-davinci-002-render-sha',
token: req.body?.token ?? null
};
const availableModels = getChatGPTBrowserModels();

View file

@ -18,8 +18,15 @@ const getChatGPTBrowserModels = () => {
router.get('/', function (req, res) {
const azureOpenAI = !!process.env.AZURE_OPENAI_KEY;
const openAI = process.env.OPENAI_KEY ? { availableModels: getOpenAIModels() } : false;
const bingAI = !!process.env.BINGAI_TOKEN;
const chatGPTBrowser = process.env.CHATGPT_TOKEN ? { availableModels: getChatGPTBrowserModels() } : false;
const bingAI = process.env.BINGAI_TOKEN
? { userProvide: process.env.BINGAI_TOKEN == 'user_provide' }
: false;
const chatGPTBrowser = process.env.CHATGPT_TOKEN
? {
userProvide: process.env.CHATGPT_TOKEN == 'user_provide',
availableModels: getChatGPTBrowserModels()
}
: false;
res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser }));
});

View file

@ -21,7 +21,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
const setPresets = useSetRecoilState(store.presets);
const availableEndpoints = useRecoilValue(store.availableEndpoints);
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const setOption = param => newValue => {
let update = {};
@ -32,7 +32,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
...prevState,
...update
},
endpointsFilter
endpointsConfig
})
);
};
@ -44,7 +44,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
axios({
method: 'post',
url: '/api/presets',
data: cleanupPreset({ preset, endpointsFilter }),
data: cleanupPreset({ preset, endpointsConfig }),
withCredentials: true
}).then(res => {
setPresets(res?.data);
@ -54,7 +54,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
const exportPreset = () => {
const fileName = filenamify(preset?.title || 'preset');
exportFromJSON({
data: cleanupPreset({ preset, endpointsFilter }),
data: cleanupPreset({ preset, endpointsConfig }),
fileName,
exportType: exportFromJSON.types.json
});

View file

@ -16,7 +16,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) =
const [preset, setPreset] = useState(_preset);
const [saveAsDialogShow, setSaveAsDialogShow] = useState(false);
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const setOption = param => newValue => {
let update = {};
@ -33,7 +33,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) =
const exportPreset = () => {
exportFromJSON({
data: cleanupPreset({ preset, endpointsFilter }),
data: cleanupPreset({ preset, endpointsConfig }),
fileName: `${preset?.title}.json`,
exportType: exportFromJSON.types.json
});

View file

@ -1,4 +1,4 @@
import { useEffect, useState } from 'react';
import React, { useEffect, useState } from 'react';
import { useRecoilValue } from 'recoil';
import DialogTemplate from '../ui/DialogTemplate';
import { Dialog } from '../ui/Dialog.tsx';
@ -11,7 +11,7 @@ import store from '~/store';
const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => {
const [title, setTitle] = useState(preset?.title || 'My Preset');
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const createPresetMutation = useCreatePresetMutation();
const defaultTextProps =
@ -23,7 +23,7 @@ const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => {
...preset,
title
},
endpointsFilter
endpointsConfig
});
createPresetMutation.mutate(_preset);
};

View file

@ -1,8 +1,16 @@
import React from 'react';
import React, { useState } from 'react';
import { DropdownMenuRadioItem } from '../../ui/DropdownMenu.tsx';
import { Settings } from 'lucide-react';
import getIcon from '~/utils/getIcon';
import { useRecoilValue } from 'recoil';
import SetTokenDialog from '../SetTokenDialog';
import store from '../../../store';
export default function ModelItem({ endpoint, value, onSelect }) {
const [setTokenDialogOpen, setSetTokenDialogOpen] = useState(false);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const icon = getIcon({
size: 20,
endpoint,
@ -10,15 +18,37 @@ export default function ModelItem({ endpoint, value, onSelect }) {
className: 'mr-2'
});
const isuserProvide = endpointsConfig?.[endpoint]?.userProvide;
// regular model
return (
<DropdownMenuRadioItem
value={value}
className="dark:font-semibold dark:text-gray-100 dark:hover:bg-gray-800"
>
{icon}
{endpoint}
{!!['azureOpenAI', 'openAI'].find(e => e === endpoint) && <sup>$</sup>}
</DropdownMenuRadioItem>
<>
<DropdownMenuRadioItem
value={value}
className="group dark:font-semibold dark:text-gray-100 dark:hover:bg-gray-800"
>
{icon}
{endpoint}
{!!['azureOpenAI', 'openAI'].find(e => e === endpoint) && <sup>$</sup>}
<div className="flex w-4 flex-1" />
{isuserProvide ? (
<button
className="invisible m-0 mr-1 flex-initial rounded-md p-0 text-xs font-medium text-gray-400 hover:text-gray-700 group-hover:visible dark:font-normal dark:text-gray-400 dark:hover:text-gray-200"
onClick={e => {
e.preventDefault();
setSetTokenDialogOpen(true);
}}
>
<Settings className="mr-1 inline-block w-[16px] items-center stroke-1" />
Config Token
</button>
) : null}
</DropdownMenuRadioItem>
<SetTokenDialog
open={setTokenDialogOpen}
onOpenChange={setSetTokenDialogOpen}
endpoint={endpoint}
/>
</>
);
}

View file

@ -7,7 +7,7 @@ import store from '~/store';
const FileUpload = ({ onFileSelected }) => {
// const setPresets = useSetRecoilState(store.presets);
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const handleFileChange = event => {
const file = event.target.files[0];
@ -16,7 +16,7 @@ const FileUpload = ({ onFileSelected }) => {
const reader = new FileReader();
reader.onload = e => {
const jsonData = JSON.parse(e.target.result);
onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsFilter }), presetId: null });
onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsConfig }), presetId: null });
};
reader.readAsText(file);
};
@ -24,10 +24,10 @@ const FileUpload = ({ onFileSelected }) => {
return (
<label
htmlFor="file-upload"
className=" mr-1 flex h-auto cursor-pointer items-center rounded bg-transparent px-2 py-1 text-xs font-medium font-normal text-gray-600 hover:bg-slate-200 hover:text-green-700 dark:bg-transparent dark:text-gray-300 dark:hover:bg-gray-800 dark:hover:text-green-500"
className=" mr-1 flex h-auto cursor-pointer items-center rounded bg-transparent px-2 py-1 text-xs font-medium font-normal text-gray-600 transition-colors hover:bg-slate-200 hover:text-green-700 dark:bg-transparent dark:text-gray-300 dark:hover:bg-gray-800 dark:hover:text-green-500"
>
<FileUp className="flex w-[22px] items-center stroke-1" />
<span className="ml-1 flex text-xs ">Import</span>
<FileUp className="mr-1 flex w-[22px] items-center stroke-1" />
<span className="flex text-xs ">Import</span>
<input
id="file-upload"
value=""

View file

@ -1,8 +1,9 @@
import { useState, useEffect } from 'react';
import React, { useState, useEffect } from 'react';
import { useRecoilValue, useRecoilState } from 'recoil';
import EditPresetDialog from '../../Endpoints/EditPresetDialog';
import EndpointItems from './EndpointItems';
import PresetItems from './PresetItems';
import { Trash2 } from 'lucide-react';
import FileUpload from './FileUpload';
import getIcon from '~/utils/getIcon';
import { useDeletePresetMutation, useCreatePresetMutation } from '~/data-provider';
@ -36,14 +37,17 @@ export default function NewConversationMenu() {
const createPresetMutation = useCreatePresetMutation();
const importPreset = jsonData => {
createPresetMutation.mutate({...jsonData}, {
onSuccess: (data) => {
setPresets(data);
},
onError: (error) => {
console.error('Error uploading the preset:', error);
createPresetMutation.mutate(
{ ...jsonData },
{
onSuccess: data => {
setPresets(data);
},
onError: error => {
console.error('Error uploading the preset:', error);
}
}
})
);
};
// update the default model when availableModels changes
@ -85,11 +89,11 @@ export default function NewConversationMenu() {
};
const clearAllPresets = () => {
deletePresetsMutation.mutate({arg: {}});
deletePresetsMutation.mutate({ arg: {} });
};
const onDeletePreset = preset => {
deletePresetsMutation.mutate({arg: preset});
deletePresetsMutation.mutate({ arg: preset });
};
const icon = getIcon({
@ -146,12 +150,18 @@ export default function NewConversationMenu() {
<FileUpload onFileSelected={importPreset} />
<Dialog>
<DialogTrigger asChild>
<Button
<label
htmlFor="file-upload"
className=" mr-1 flex h-[32px] h-auto cursor-pointer items-center rounded bg-transparent px-2 py-1 text-xs font-medium font-normal text-gray-600 transition-colors hover:bg-slate-200 hover:text-red-700 dark:bg-transparent dark:text-gray-300 dark:hover:bg-gray-800 dark:hover:text-green-500"
>
{/* <Button
type="button"
className="h-auto bg-transparent px-2 py-1 text-xs font-medium font-normal text-red-700 hover:bg-slate-200 hover:text-red-700 dark:bg-transparent dark:text-red-400 dark:hover:bg-gray-800 dark:hover:text-red-400"
>
> */}
<Trash2 className="mr-1 flex w-[22px] items-center stroke-1" />
Clear All
</Button>
{/* </Button> */}
</label>
</DialogTrigger>
<DialogTemplate
title="Clear presets"

View file

@ -0,0 +1,102 @@
import React, { useEffect, useState } from 'react';
import { useRecoilValue } from 'recoil';
import DialogTemplate from '../../ui/DialogTemplate';
import { Dialog } from '../../ui/Dialog.tsx';
import { Input } from '../../ui/Input.tsx';
import { Label } from '../../ui/Label.tsx';
import { cn } from '~/utils/';
import cleanupPreset from '~/utils/cleanupPreset';
import { useCreatePresetMutation } from '~/data-provider';
import store from '~/store';
const SetTokenDialog = ({ open, onOpenChange, endpoint }) => {
const [token, setToken] = useState('');
const { getToken, saveToken } = store.useToken(endpoint);
const defaultTextProps =
'rounded-md border border-gray-300 bg-transparent text-sm shadow-[0_0_10px_rgba(0,0,0,0.10)] outline-none placeholder:text-gray-400 focus:outline-none focus:ring-gray-400 focus:ring-opacity-20 focus:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 dark:border-gray-400 dark:bg-gray-700 dark:text-gray-50 dark:shadow-[0_0_15px_rgba(0,0,0,0.10)] dark:focus:border-gray-400 dark:focus:outline-none dark:focus:ring-0 dark:focus:ring-gray-400 dark:focus:ring-offset-0';
const submit = () => {
saveToken(token);
onOpenChange(false);
};
useEffect(() => {
setToken(getToken() ?? '');
}, [open]);
const helpText = {
bingAI: (
<small className="break-all text-gray-600">
'The Bing Access Token is the "_U" cookie from bing.com. Use dev tools or an extension while logged
into the site to view it.'
</small>
),
chatGPTBrowser: (
<small className="break-all text-gray-600">
To get your Access token For ChatGPT 'Free Version', login to{' '}
<a
target="_blank"
href="https://chat.openai.com"
rel="noreferrer"
className="text-blue-600 underline"
>
https://chat.openai.com
</a>
, then visit{' '}
<a
target="_blank"
href="https://chat.openai.com/api/auth/session"
rel="noreferrer"
className="text-blue-600 underline"
>
https://chat.openai.com/api/auth/session
</a>
. Copy access token.
</small>
)
};
return (
<Dialog
open={open}
onOpenChange={onOpenChange}
>
<DialogTemplate
title={`Set Token of ${endpoint}`}
main={
<div className="grid w-full items-center gap-2">
<Label
htmlFor="chatGptLabel"
className="text-left text-sm font-medium"
>
Token Name
<br />
</Label>
<Input
id="chatGptLabel"
value={token || ''}
onChange={e => setToken(e.target.value || '')}
placeholder="Set the token."
className={cn(
defaultTextProps,
'flex h-10 max-h-10 w-full resize-none px-3 py-2 focus:outline-none focus:ring-0 focus:ring-opacity-0 focus:ring-offset-0'
)}
/>
<small className="text-red-600">
Your token will be send to the server, but we won't save it.
</small>
{helpText?.[endpoint]}
</div>
}
selection={{
selectHandler: submit,
selectClasses: 'bg-green-600 hover:bg-green-700 dark:hover:bg-green-800 text-white',
selectText: 'Submit'
}}
/>
</Dialog>
);
};
export default SetTokenDialog;

View file

@ -1,12 +1,31 @@
import React from 'react';
import React, { useState } from 'react';
import StopGeneratingIcon from '../svg/StopGeneratingIcon';
import { Settings } from 'lucide-react';
import SetTokenDialog from './SetTokenDialog';
import store from '../../store';
export default function SubmitButton({
endpoint,
submitMessage,
handleStopGenerating,
disabled,
isSubmitting,
endpointsConfig
}) {
const [setTokenDialogOpen, setSetTokenDialogOpen] = useState(false);
const { getToken } = store.useToken(endpoint);
const isTokenProvided = endpointsConfig?.[endpoint]?.userProvide ? !!getToken() : true;
export default function SubmitButton({ submitMessage, handleStopGenerating, disabled, isSubmitting }) {
const clickHandler = e => {
e.preventDefault();
submitMessage();
};
const setToken = () => {
setSetTokenDialogOpen(true);
};
if (isSubmitting)
return (
<button
@ -42,7 +61,27 @@ export default function SubmitButton({ submitMessage, handleStopGenerating, disa
// </div>
// </button>
// );
else
else if (!isTokenProvided) {
return (
<>
<button
onClick={setToken}
type="button"
className="group absolute bottom-0 right-0 flex h-[100%] w-auto items-center justify-center bg-transparent p-1 text-gray-500"
>
<div className="m-1 mr-0 rounded-md p-2 pt-[10px] pb-[10px] align-middle text-xs group-hover:bg-gray-100 group-disabled:hover:bg-transparent dark:group-hover:bg-gray-900 dark:group-hover:text-gray-400 dark:group-disabled:hover:bg-transparent">
<Settings className="mr-1 inline-block w-[18px]" />
Set Token First
</div>
</button>
<SetTokenDialog
open={setTokenDialogOpen}
onOpenChange={setSetTokenDialogOpen}
endpoint={endpoint}
/>
</>
);
} else
return (
<button
onClick={clickHandler}

View file

@ -22,6 +22,7 @@ export default function TextChat({ isSearchView = false }) {
const [text, setText] = useRecoilState(store.text);
// const [text, setText] = useState('');
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const isSubmitting = useRecoilValue(store.isSubmitting);
// TODO: do we need this?
@ -62,7 +63,7 @@ export default function TextChat({ isSearchView = false }) {
setText('');
};
const handleStopGenerating = (e) => {
const handleStopGenerating = e => {
e.preventDefault();
stopGenerating();
};
@ -169,6 +170,8 @@ export default function TextChat({ isSearchView = false }) {
handleStopGenerating={handleStopGenerating}
disabled={disabled || isNotAppendable}
isSubmitting={isSubmitting}
endpointsConfig={endpointsConfig}
endpoint={conversation?.endpoint}
/>
{latestMessage && conversation?.jailbreak && conversation.endpoint === 'bingAI' ? (
<AdjustToneButton onClick={handleBingToneSetting} />

View file

@ -27,7 +27,7 @@ export default function ExportModel({ open, onOpenChange }) {
const conversation = useRecoilValue(store.conversation) || {};
const messagesTree = useRecoilValue(store.messagesTree) || [];
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const getSiblingIdx = useRecoilCallback(
({ snapshot }) =>
@ -164,7 +164,7 @@ export default function ExportModel({ open, onOpenChange }) {
if (includeOptions) {
data += `\n## Options\n`;
const options = cleanupPreset({ preset: conversation, endpointsFilter });
const options = cleanupPreset({ preset: conversation, endpointsConfig });
for (const key of Object.keys(options)) {
data += `- ${key}: ${options[key]}\n`;
@ -203,7 +203,7 @@ export default function ExportModel({ open, onOpenChange }) {
if (includeOptions) {
data += `\nOptions\n########################\n`;
const options = cleanupPreset({ preset: conversation, endpointsFilter });
const options = cleanupPreset({ preset: conversation, endpointsConfig });
for (const key of Object.keys(options)) {
data += `${key}: ${options[key]}\n`;
@ -241,7 +241,7 @@ export default function ExportModel({ open, onOpenChange }) {
recursive: recursive
};
if (includeOptions) data.options = cleanupPreset({ preset: conversation, endpointsFilter });
if (includeOptions) data.options = cleanupPreset({ preset: conversation, endpointsConfig });
const messages = await buildMessageTree({
messageId: conversation?.conversationId,

View file

@ -79,9 +79,9 @@ const useConversation = () => {
({ snapshot }) =>
async (_conversation, messages = null, preset = null) => {
const prevConversation = await snapshot.getPromise(conversation);
const endpointsFilter = await snapshot.getPromise(endpoints.endpointsFilter);
const endpointsConfig = await snapshot.getPromise(endpoints.endpointsConfig);
_switchToConversation(_conversation, messages, preset, {
endpointsFilter,
endpointsConfig,
prevConversation
});
},
@ -92,7 +92,7 @@ const useConversation = () => {
conversation,
messages = null,
preset = null,
{ endpointsFilter = {}, prevConversation = {} }
{ endpointsConfig = {}, prevConversation = {} }
) => {
let { endpoint = null } = conversation;
@ -100,7 +100,7 @@ const useConversation = () => {
// get the default model
conversation = getDefaultConversation({
conversation,
endpointsFilter,
endpointsConfig,
prevConversation,
preset
});

View file

@ -6,6 +6,7 @@ import text from './text';
import submission from './submission';
import search from './search';
import preset from './preset';
import token from './token';
export default {
...conversation,
@ -15,5 +16,6 @@ export default {
...text,
...submission,
...search,
...preset
...preset,
...token
};

21
client/src/store/token.js Normal file
View file

@ -0,0 +1,21 @@
import { atom, useRecoilState } from 'recoil';
const tokenRefreshHints = atom({
key: 'tokenRefreshHints',
default: 1
});
const useToken = endpoint => {
const [hints, setHints] = useRecoilState(tokenRefreshHints);
const getToken = () => localStorage.getItem(`${endpoint}_token`);
const saveToken = value => {
localStorage.setItem(`${endpoint}_token`, value);
setHints(prev => prev + 1);
};
return { token: getToken(), getToken, saveToken };
};
export default {
useToken
};

View file

@ -1,4 +1,4 @@
const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
const cleanupPreset = ({ preset: _preset, endpointsConfig = {} }) => {
const { endpoint } = _preset;
let preset = {};
@ -6,7 +6,7 @@ const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
preset = {
endpoint,
presetId: _preset?.presetId ?? null,
model: _preset?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
model: _preset?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
chatGptLabel: _preset?.chatGptLabel ?? null,
promptPrefix: _preset?.promptPrefix ?? null,
temperature: _preset?.temperature ?? 1,
@ -30,7 +30,7 @@ const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
endpoint,
presetId: _preset?.presetId ?? null,
model:
_preset?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'text-davinci-002-render-sha',
_preset?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'text-davinci-002-render-sha',
title: _preset?.title ?? 'New Preset'
};
} else if (endpoint === null) {

View file

@ -1,7 +1,7 @@
const buildDefaultConversation = ({
conversation,
endpoint,
endpointsFilter = {},
endpointsConfig = {},
lastConversationSetup = {}
}) => {
if (endpoint === 'azureOpenAI' || endpoint === 'openAI') {
@ -9,7 +9,7 @@ const buildDefaultConversation = ({
...conversation,
endpoint,
model:
lastConversationSetup?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
lastConversationSetup?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
chatGptLabel: lastConversationSetup?.chatGptLabel ?? null,
promptPrefix: lastConversationSetup?.promptPrefix ?? null,
temperature: lastConversationSetup?.temperature ?? 1,
@ -36,7 +36,7 @@ const buildDefaultConversation = ({
endpoint,
model:
lastConversationSetup?.model ??
endpointsFilter[endpoint]?.availableModels?.[0] ??
endpointsConfig[endpoint]?.availableModels?.[0] ??
'text-davinci-002-render-sha'
};
} else if (endpoint === null) {
@ -55,35 +55,35 @@ const buildDefaultConversation = ({
return conversation;
};
const getDefaultConversation = ({ conversation, prevConversation, endpointsFilter, preset }) => {
const getDefaultConversation = ({ conversation, prevConversation, endpointsConfig, preset }) => {
const { endpoint: targetEndpoint } = preset || {};
if (targetEndpoint) {
// try to use preset
const endpoint = targetEndpoint;
if (endpointsFilter?.[endpoint]) {
if (endpointsConfig?.[endpoint]) {
conversation = buildDefaultConversation({
conversation,
endpoint,
lastConversationSetup: preset,
endpointsFilter
endpointsConfig
});
return conversation;
} else {
console.log(endpoint);
console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsFilter}`);
console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsConfig}`);
}
}
// try {
// // try to use current model
// const { endpoint = null } = prevConversation || {};
// if (endpointsFilter?.[endpoint]) {
// if (endpointsConfig?.[endpoint]) {
// conversation = buildDefaultConversation({
// conversation,
// endpoint,
// lastConversationSetup: prevConversation,
// endpointsFilter
// endpointsConfig
// });
// return conversation;
// }
@ -94,20 +94,20 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
const lastConversationSetup = JSON.parse(localStorage.getItem('lastConversationSetup'));
const { endpoint = null } = lastConversationSetup;
if (endpointsFilter?.[endpoint]) {
conversation = buildDefaultConversation({ conversation, endpoint, endpointsFilter });
if (endpointsConfig?.[endpoint]) {
conversation = buildDefaultConversation({ conversation, endpoint, endpointsConfig });
return conversation;
}
} catch (error) {}
// if anything happens, reset to default model
const endpoint = ['openAI', 'azureOpenAI', 'bingAI', 'chatGPTBrowser'].find(e => endpointsFilter?.[e]);
const endpoint = ['openAI', 'azureOpenAI', 'bingAI', 'chatGPTBrowser'].find(e => endpointsConfig?.[e]);
if (endpoint) {
conversation = buildDefaultConversation({ conversation, endpoint, endpointsFilter });
conversation = buildDefaultConversation({ conversation, endpoint, endpointsConfig });
return conversation;
} else {
conversation = buildDefaultConversation({ conversation, endpoint: null, endpointsFilter });
conversation = buildDefaultConversation({ conversation, endpoint: null, endpointsConfig });
return conversation;
}
};

View file

@ -7,7 +7,9 @@ const useMessageHandler = () => {
const currentConversation = useRecoilValue(store.conversation) || {};
const setSubmission = useSetRecoilState(store.submission);
const isSubmitting = useRecoilValue(store.isSubmitting);
const endpointsFilter = useRecoilValue(store.endpointsFilter);
const endpointsConfig = useRecoilValue(store.endpointsConfig);
const { getToken } = store.useToken(currentConversation?.endpoint);
const latestMessage = useRecoilValue(store.latestMessage);
@ -29,7 +31,7 @@ const useMessageHandler = () => {
endpointOption = {
endpoint,
model:
currentConversation?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
currentConversation?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
chatGptLabel: currentConversation?.chatGptLabel ?? null,
promptPrefix: currentConversation?.promptPrefix ?? null,
temperature: currentConversation?.temperature ?? 1,
@ -48,7 +50,8 @@ const useMessageHandler = () => {
jailbreakConversationId: currentConversation?.jailbreakConversationId ?? null,
conversationSignature: currentConversation?.conversationSignature ?? null,
clientId: currentConversation?.clientId ?? null,
invocationId: currentConversation?.invocationId ?? 1
invocationId: currentConversation?.invocationId ?? 1,
token: endpointsConfig[endpoint]?.userProvide ? getToken() : null
};
responseSender = endpointOption.jailbreak ? 'Sydney' : 'BingAI';
} else if (endpoint === 'chatGPTBrowser') {
@ -56,8 +59,9 @@ const useMessageHandler = () => {
endpoint,
model:
currentConversation?.model ??
endpointsFilter[endpoint]?.availableModels?.[0] ??
'text-davinci-002-render-sha'
endpointsConfig[endpoint]?.availableModels?.[0] ??
'text-davinci-002-render-sha',
token: endpointsConfig[endpoint]?.userProvide ? getToken() : null
};
responseSender = 'ChatGPT';
} else if (endpoint === null) {