diff --git a/api/.env.example b/api/.env.example
index 4fa5a432a1..83da008446 100644
--- a/api/.env.example
+++ b/api/.env.example
@@ -25,8 +25,9 @@ MONGO_URI="mongodb://127.0.0.1:27017/chatgpt-clone"
OPENAI_KEY=
# Identify the available models, sperate by comma, and not space in it
+# The first will be default
# Leave it blank to use internal settings.
-# OPENAI_MODELS=gpt-4,text-davinci-003,gpt-3.5-turbo,gpt-3.5-turbo-0301
+OPENAI_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-0301,text-davinci-003,gpt-4
# Reverse proxy setting for OpenAI
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
@@ -39,6 +40,8 @@ OPENAI_KEY=
# BingAI Tokens: the "_U" cookies value from bing.com
# Leave it and BINGAI_USER_TOKEN blank to disable this endpoint.
+# Set to "user_providered" to allow user provided token.
+# BINGAI_TOKEN="user_providered"
BINGAI_TOKEN=
# BingAI Host:
@@ -46,12 +49,6 @@ BINGAI_TOKEN=
# Leave it blank to use default server.
# BINGAI_HOST="https://cn.bing.com"
-# BingAI User defined Token
-# Allow user to set their own token by client
-# Uncomment this to enable this feature.
-# (Not implemented yet.)
-# BINGAI_USER_TOKEN=1
-
#############################
# Endpoint chatGPT:
@@ -61,11 +58,14 @@ BINGAI_TOKEN=
# Access token from https://chat.openai.com/api/auth/session
# Exposes your access token to CHATGPT_REVERSE_PROXY
# Leave it blank to disable this endpoint
+# Set to "user_provide" to allow user provided token.
+# CHATGPT_TOKEN="user_provide"
CHATGPT_TOKEN=
# Identify the available models, sperate by comma, and not space in it
+# The first will be default
# Leave it blank to use internal settings.
-# CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4
+CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4
# Reverse proxy setting for OpenAI
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
diff --git a/api/app/clients/bingai.js b/api/app/clients/bingai.js
index ca790ec43d..700c7b72a9 100644
--- a/api/app/clients/bingai.js
+++ b/api/app/clients/bingai.js
@@ -13,6 +13,7 @@ const askBing = async ({
clientId,
invocationId,
toneStyle,
+ token,
onProgress
}) => {
const { BingAIClient } = await import('@waylaidwanderer/chatgpt-api');
@@ -22,7 +23,7 @@ const askBing = async ({
const bingAIClient = new BingAIClient({
// "_U" cookie from bing.com
- userToken: process.env.BINGAI_TOKEN,
+ userToken: process.env.BINGAI_TOKEN == 'user_provide' ? token : process.env.BINGAI_TOKEN ?? null,
// If the above doesn't work, provide all your cookies as a string instead
// cookies: '',
debug: false,
diff --git a/api/app/clients/chatgpt-browser.js b/api/app/clients/chatgpt-browser.js
index 4eb50a1476..0844e5c173 100644
--- a/api/app/clients/chatgpt-browser.js
+++ b/api/app/clients/chatgpt-browser.js
@@ -6,6 +6,7 @@ const browserClient = async ({
parentMessageId,
conversationId,
model,
+ token,
onProgress,
abortController
}) => {
@@ -18,7 +19,7 @@ const browserClient = async ({
// Warning: This will expose your access token to a third party. Consider the risks before using this.
reverseProxyUrl: process.env.CHATGPT_REVERSE_PROXY || 'https://bypass.churchless.tech/api/conversation',
// Access token from https://chat.openai.com/api/auth/session
- accessToken: process.env.CHATGPT_TOKEN,
+ accessToken: process.env.CHATGPT_TOKEN == 'user_provide' ? token : process.env.CHATGPT_TOKEN ?? null,
model: model,
// debug: true
proxy: process.env.PROXY || null
diff --git a/api/server/routes/ask/askBingAI.js b/api/server/routes/ask/askBingAI.js
index 05c2220285..9f8d9bc878 100644
--- a/api/server/routes/ask/askBingAI.js
+++ b/api/server/routes/ask/askBingAI.js
@@ -39,7 +39,8 @@ router.post('/', async (req, res) => {
jailbreakConversationId: req.body?.jailbreakConversationId ?? null,
systemMessage: req.body?.systemMessage ?? null,
context: req.body?.context ?? null,
- toneStyle: req.body?.toneStyle ?? 'fast'
+ toneStyle: req.body?.toneStyle ?? 'fast',
+ token: req.body?.token ?? null
};
else
endpointOption = {
@@ -49,7 +50,8 @@ router.post('/', async (req, res) => {
conversationSignature: req.body?.conversationSignature ?? null,
clientId: req.body?.clientId ?? null,
invocationId: req.body?.invocationId ?? null,
- toneStyle: req.body?.toneStyle ?? 'fast'
+ toneStyle: req.body?.toneStyle ?? 'fast',
+ token: req.body?.token ?? null
};
console.log('ask log', {
diff --git a/api/server/routes/ask/askChatGPTBrowser.js b/api/server/routes/ask/askChatGPTBrowser.js
index 4592ab98b4..4e416e7c4a 100644
--- a/api/server/routes/ask/askChatGPTBrowser.js
+++ b/api/server/routes/ask/askChatGPTBrowser.js
@@ -33,7 +33,8 @@ router.post('/', async (req, res) => {
// build endpoint option
const endpointOption = {
- model: req.body?.model ?? 'text-davinci-002-render-sha'
+ model: req.body?.model ?? 'text-davinci-002-render-sha',
+ token: req.body?.token ?? null
};
const availableModels = getChatGPTBrowserModels();
diff --git a/api/server/routes/endpoints.js b/api/server/routes/endpoints.js
index bcdd051e2f..70ce661428 100644
--- a/api/server/routes/endpoints.js
+++ b/api/server/routes/endpoints.js
@@ -18,8 +18,15 @@ const getChatGPTBrowserModels = () => {
router.get('/', function (req, res) {
const azureOpenAI = !!process.env.AZURE_OPENAI_KEY;
const openAI = process.env.OPENAI_KEY ? { availableModels: getOpenAIModels() } : false;
- const bingAI = !!process.env.BINGAI_TOKEN;
- const chatGPTBrowser = process.env.CHATGPT_TOKEN ? { availableModels: getChatGPTBrowserModels() } : false;
+ const bingAI = process.env.BINGAI_TOKEN
+ ? { userProvide: process.env.BINGAI_TOKEN == 'user_provide' }
+ : false;
+ const chatGPTBrowser = process.env.CHATGPT_TOKEN
+ ? {
+ userProvide: process.env.CHATGPT_TOKEN == 'user_provide',
+ availableModels: getChatGPTBrowserModels()
+ }
+ : false;
res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser }));
});
diff --git a/client/src/components/Endpoints/EditPresetDialog.jsx b/client/src/components/Endpoints/EditPresetDialog.jsx
index e264920772..c9d618dbd2 100644
--- a/client/src/components/Endpoints/EditPresetDialog.jsx
+++ b/client/src/components/Endpoints/EditPresetDialog.jsx
@@ -21,7 +21,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
const setPresets = useSetRecoilState(store.presets);
const availableEndpoints = useRecoilValue(store.availableEndpoints);
- const endpointsFilter = useRecoilValue(store.endpointsFilter);
+ const endpointsConfig = useRecoilValue(store.endpointsConfig);
const setOption = param => newValue => {
let update = {};
@@ -32,7 +32,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
...prevState,
...update
},
- endpointsFilter
+ endpointsConfig
})
);
};
@@ -44,7 +44,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
axios({
method: 'post',
url: '/api/presets',
- data: cleanupPreset({ preset, endpointsFilter }),
+ data: cleanupPreset({ preset, endpointsConfig }),
withCredentials: true
}).then(res => {
setPresets(res?.data);
@@ -54,7 +54,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
const exportPreset = () => {
const fileName = filenamify(preset?.title || 'preset');
exportFromJSON({
- data: cleanupPreset({ preset, endpointsFilter }),
+ data: cleanupPreset({ preset, endpointsConfig }),
fileName,
exportType: exportFromJSON.types.json
});
diff --git a/client/src/components/Endpoints/EndpointOptionsDialog.jsx b/client/src/components/Endpoints/EndpointOptionsDialog.jsx
index dc62499584..17b9528f64 100644
--- a/client/src/components/Endpoints/EndpointOptionsDialog.jsx
+++ b/client/src/components/Endpoints/EndpointOptionsDialog.jsx
@@ -16,7 +16,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) =
const [preset, setPreset] = useState(_preset);
const [saveAsDialogShow, setSaveAsDialogShow] = useState(false);
- const endpointsFilter = useRecoilValue(store.endpointsFilter);
+ const endpointsConfig = useRecoilValue(store.endpointsConfig);
const setOption = param => newValue => {
let update = {};
@@ -33,7 +33,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) =
const exportPreset = () => {
exportFromJSON({
- data: cleanupPreset({ preset, endpointsFilter }),
+ data: cleanupPreset({ preset, endpointsConfig }),
fileName: `${preset?.title}.json`,
exportType: exportFromJSON.types.json
});
diff --git a/client/src/components/Endpoints/SaveAsPresetDialog.jsx b/client/src/components/Endpoints/SaveAsPresetDialog.jsx
index 9fd51153e9..9e85b568cd 100644
--- a/client/src/components/Endpoints/SaveAsPresetDialog.jsx
+++ b/client/src/components/Endpoints/SaveAsPresetDialog.jsx
@@ -1,4 +1,4 @@
-import { useEffect, useState } from 'react';
+import React, { useEffect, useState } from 'react';
import { useRecoilValue } from 'recoil';
import DialogTemplate from '../ui/DialogTemplate';
import { Dialog } from '../ui/Dialog.tsx';
@@ -11,7 +11,7 @@ import store from '~/store';
const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => {
const [title, setTitle] = useState(preset?.title || 'My Preset');
- const endpointsFilter = useRecoilValue(store.endpointsFilter);
+ const endpointsConfig = useRecoilValue(store.endpointsConfig);
const createPresetMutation = useCreatePresetMutation();
const defaultTextProps =
@@ -23,7 +23,7 @@ const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => {
...preset,
title
},
- endpointsFilter
+ endpointsConfig
});
createPresetMutation.mutate(_preset);
};
diff --git a/client/src/components/Input/NewConversationMenu/EndpointItem.jsx b/client/src/components/Input/NewConversationMenu/EndpointItem.jsx
index a783750227..690e120e82 100644
--- a/client/src/components/Input/NewConversationMenu/EndpointItem.jsx
+++ b/client/src/components/Input/NewConversationMenu/EndpointItem.jsx
@@ -1,8 +1,16 @@
-import React from 'react';
+import React, { useState } from 'react';
import { DropdownMenuRadioItem } from '../../ui/DropdownMenu.tsx';
+import { Settings } from 'lucide-react';
import getIcon from '~/utils/getIcon';
+import { useRecoilValue } from 'recoil';
+import SetTokenDialog from '../SetTokenDialog';
+
+import store from '../../../store';
export default function ModelItem({ endpoint, value, onSelect }) {
+ const [setTokenDialogOpen, setSetTokenDialogOpen] = useState(false);
+ const endpointsConfig = useRecoilValue(store.endpointsConfig);
+
const icon = getIcon({
size: 20,
endpoint,
@@ -10,15 +18,37 @@ export default function ModelItem({ endpoint, value, onSelect }) {
className: 'mr-2'
});
+ const isuserProvide = endpointsConfig?.[endpoint]?.userProvide;
+
// regular model
return (
-
- {icon}
- {endpoint}
- {!!['azureOpenAI', 'openAI'].find(e => e === endpoint) && $}
-
+ <>
+
+ {icon}
+ {endpoint}
+ {!!['azureOpenAI', 'openAI'].find(e => e === endpoint) && $}
+
+ {isuserProvide ? (
+
+ ) : null}
+
+
+ >
);
}
diff --git a/client/src/components/Input/NewConversationMenu/FileUpload.jsx b/client/src/components/Input/NewConversationMenu/FileUpload.jsx
index 69f2bf1dba..15b8e1a743 100644
--- a/client/src/components/Input/NewConversationMenu/FileUpload.jsx
+++ b/client/src/components/Input/NewConversationMenu/FileUpload.jsx
@@ -7,7 +7,7 @@ import store from '~/store';
const FileUpload = ({ onFileSelected }) => {
// const setPresets = useSetRecoilState(store.presets);
- const endpointsFilter = useRecoilValue(store.endpointsFilter);
+ const endpointsConfig = useRecoilValue(store.endpointsConfig);
const handleFileChange = event => {
const file = event.target.files[0];
@@ -16,7 +16,7 @@ const FileUpload = ({ onFileSelected }) => {
const reader = new FileReader();
reader.onload = e => {
const jsonData = JSON.parse(e.target.result);
- onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsFilter }), presetId: null });
+ onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsConfig }), presetId: null });
};
reader.readAsText(file);
};
@@ -24,10 +24,10 @@ const FileUpload = ({ onFileSelected }) => {
return (
{
+ const [token, setToken] = useState('');
+ const { getToken, saveToken } = store.useToken(endpoint);
+
+ const defaultTextProps =
+ 'rounded-md border border-gray-300 bg-transparent text-sm shadow-[0_0_10px_rgba(0,0,0,0.10)] outline-none placeholder:text-gray-400 focus:outline-none focus:ring-gray-400 focus:ring-opacity-20 focus:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 dark:border-gray-400 dark:bg-gray-700 dark:text-gray-50 dark:shadow-[0_0_15px_rgba(0,0,0,0.10)] dark:focus:border-gray-400 dark:focus:outline-none dark:focus:ring-0 dark:focus:ring-gray-400 dark:focus:ring-offset-0';
+
+ const submit = () => {
+ saveToken(token);
+ onOpenChange(false);
+ };
+
+ useEffect(() => {
+ setToken(getToken() ?? '');
+ }, [open]);
+
+ const helpText = {
+ bingAI: (
+
+ 'The Bing Access Token is the "_U" cookie from bing.com. Use dev tools or an extension while logged
+ into the site to view it.'
+
+ ),
+ chatGPTBrowser: (
+
+ To get your Access token For ChatGPT 'Free Version', login to{' '}
+
+ https://chat.openai.com
+
+ , then visit{' '}
+
+ https://chat.openai.com/api/auth/session
+
+ . Copy access token.
+
+ )
+ };
+
+ return (
+
+ );
+};
+
+export default SetTokenDialog;
diff --git a/client/src/components/Input/SubmitButton.jsx b/client/src/components/Input/SubmitButton.jsx
index 1198a4eee2..0773208d36 100644
--- a/client/src/components/Input/SubmitButton.jsx
+++ b/client/src/components/Input/SubmitButton.jsx
@@ -1,12 +1,31 @@
-import React from 'react';
+import React, { useState } from 'react';
import StopGeneratingIcon from '../svg/StopGeneratingIcon';
+import { Settings } from 'lucide-react';
+import SetTokenDialog from './SetTokenDialog';
+import store from '../../store';
+
+export default function SubmitButton({
+ endpoint,
+ submitMessage,
+ handleStopGenerating,
+ disabled,
+ isSubmitting,
+ endpointsConfig
+}) {
+ const [setTokenDialogOpen, setSetTokenDialogOpen] = useState(false);
+ const { getToken } = store.useToken(endpoint);
+
+ const isTokenProvided = endpointsConfig?.[endpoint]?.userProvide ? !!getToken() : true;
-export default function SubmitButton({ submitMessage, handleStopGenerating, disabled, isSubmitting }) {
const clickHandler = e => {
e.preventDefault();
submitMessage();
};
+ const setToken = () => {
+ setSetTokenDialogOpen(true);
+ };
+
if (isSubmitting)
return (
//
// );
- else
+ else if (!isTokenProvided) {
+ return (
+ <>
+
+
+
+ Set Token First
+
+
+
+ >
+ );
+ } else
return (
{
+ const handleStopGenerating = e => {
e.preventDefault();
stopGenerating();
};
@@ -169,6 +170,8 @@ export default function TextChat({ isSearchView = false }) {
handleStopGenerating={handleStopGenerating}
disabled={disabled || isNotAppendable}
isSubmitting={isSubmitting}
+ endpointsConfig={endpointsConfig}
+ endpoint={conversation?.endpoint}
/>
{latestMessage && conversation?.jailbreak && conversation.endpoint === 'bingAI' ? (
diff --git a/client/src/components/Nav/ExportConversation/ExportModel.jsx b/client/src/components/Nav/ExportConversation/ExportModel.jsx
index 61e9916132..9f1c503acc 100644
--- a/client/src/components/Nav/ExportConversation/ExportModel.jsx
+++ b/client/src/components/Nav/ExportConversation/ExportModel.jsx
@@ -27,7 +27,7 @@ export default function ExportModel({ open, onOpenChange }) {
const conversation = useRecoilValue(store.conversation) || {};
const messagesTree = useRecoilValue(store.messagesTree) || [];
- const endpointsFilter = useRecoilValue(store.endpointsFilter);
+ const endpointsConfig = useRecoilValue(store.endpointsConfig);
const getSiblingIdx = useRecoilCallback(
({ snapshot }) =>
@@ -164,7 +164,7 @@ export default function ExportModel({ open, onOpenChange }) {
if (includeOptions) {
data += `\n## Options\n`;
- const options = cleanupPreset({ preset: conversation, endpointsFilter });
+ const options = cleanupPreset({ preset: conversation, endpointsConfig });
for (const key of Object.keys(options)) {
data += `- ${key}: ${options[key]}\n`;
@@ -203,7 +203,7 @@ export default function ExportModel({ open, onOpenChange }) {
if (includeOptions) {
data += `\nOptions\n########################\n`;
- const options = cleanupPreset({ preset: conversation, endpointsFilter });
+ const options = cleanupPreset({ preset: conversation, endpointsConfig });
for (const key of Object.keys(options)) {
data += `${key}: ${options[key]}\n`;
@@ -241,7 +241,7 @@ export default function ExportModel({ open, onOpenChange }) {
recursive: recursive
};
- if (includeOptions) data.options = cleanupPreset({ preset: conversation, endpointsFilter });
+ if (includeOptions) data.options = cleanupPreset({ preset: conversation, endpointsConfig });
const messages = await buildMessageTree({
messageId: conversation?.conversationId,
diff --git a/client/src/store/conversation.js b/client/src/store/conversation.js
index c13cbc48ec..963f08bb76 100644
--- a/client/src/store/conversation.js
+++ b/client/src/store/conversation.js
@@ -79,9 +79,9 @@ const useConversation = () => {
({ snapshot }) =>
async (_conversation, messages = null, preset = null) => {
const prevConversation = await snapshot.getPromise(conversation);
- const endpointsFilter = await snapshot.getPromise(endpoints.endpointsFilter);
+ const endpointsConfig = await snapshot.getPromise(endpoints.endpointsConfig);
_switchToConversation(_conversation, messages, preset, {
- endpointsFilter,
+ endpointsConfig,
prevConversation
});
},
@@ -92,7 +92,7 @@ const useConversation = () => {
conversation,
messages = null,
preset = null,
- { endpointsFilter = {}, prevConversation = {} }
+ { endpointsConfig = {}, prevConversation = {} }
) => {
let { endpoint = null } = conversation;
@@ -100,7 +100,7 @@ const useConversation = () => {
// get the default model
conversation = getDefaultConversation({
conversation,
- endpointsFilter,
+ endpointsConfig,
prevConversation,
preset
});
diff --git a/client/src/store/index.js b/client/src/store/index.js
index 948442531b..ecae29dbff 100644
--- a/client/src/store/index.js
+++ b/client/src/store/index.js
@@ -6,6 +6,7 @@ import text from './text';
import submission from './submission';
import search from './search';
import preset from './preset';
+import token from './token';
export default {
...conversation,
@@ -15,5 +16,6 @@ export default {
...text,
...submission,
...search,
- ...preset
+ ...preset,
+ ...token
};
diff --git a/client/src/store/token.js b/client/src/store/token.js
new file mode 100644
index 0000000000..9c397eff3a
--- /dev/null
+++ b/client/src/store/token.js
@@ -0,0 +1,21 @@
+import { atom, useRecoilState } from 'recoil';
+
+const tokenRefreshHints = atom({
+ key: 'tokenRefreshHints',
+ default: 1
+});
+
+const useToken = endpoint => {
+ const [hints, setHints] = useRecoilState(tokenRefreshHints);
+ const getToken = () => localStorage.getItem(`${endpoint}_token`);
+ const saveToken = value => {
+ localStorage.setItem(`${endpoint}_token`, value);
+ setHints(prev => prev + 1);
+ };
+
+ return { token: getToken(), getToken, saveToken };
+};
+
+export default {
+ useToken
+};
diff --git a/client/src/utils/cleanupPreset.js b/client/src/utils/cleanupPreset.js
index 8b9ae2ee91..ab962a01d4 100644
--- a/client/src/utils/cleanupPreset.js
+++ b/client/src/utils/cleanupPreset.js
@@ -1,4 +1,4 @@
-const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
+const cleanupPreset = ({ preset: _preset, endpointsConfig = {} }) => {
const { endpoint } = _preset;
let preset = {};
@@ -6,7 +6,7 @@ const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
preset = {
endpoint,
presetId: _preset?.presetId ?? null,
- model: _preset?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
+ model: _preset?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
chatGptLabel: _preset?.chatGptLabel ?? null,
promptPrefix: _preset?.promptPrefix ?? null,
temperature: _preset?.temperature ?? 1,
@@ -30,7 +30,7 @@ const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
endpoint,
presetId: _preset?.presetId ?? null,
model:
- _preset?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'text-davinci-002-render-sha',
+ _preset?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'text-davinci-002-render-sha',
title: _preset?.title ?? 'New Preset'
};
} else if (endpoint === null) {
diff --git a/client/src/utils/getDefaultConversation.js b/client/src/utils/getDefaultConversation.js
index 98a920f94b..b82e36730c 100644
--- a/client/src/utils/getDefaultConversation.js
+++ b/client/src/utils/getDefaultConversation.js
@@ -1,7 +1,7 @@
const buildDefaultConversation = ({
conversation,
endpoint,
- endpointsFilter = {},
+ endpointsConfig = {},
lastConversationSetup = {}
}) => {
if (endpoint === 'azureOpenAI' || endpoint === 'openAI') {
@@ -9,7 +9,7 @@ const buildDefaultConversation = ({
...conversation,
endpoint,
model:
- lastConversationSetup?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
+ lastConversationSetup?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
chatGptLabel: lastConversationSetup?.chatGptLabel ?? null,
promptPrefix: lastConversationSetup?.promptPrefix ?? null,
temperature: lastConversationSetup?.temperature ?? 1,
@@ -36,7 +36,7 @@ const buildDefaultConversation = ({
endpoint,
model:
lastConversationSetup?.model ??
- endpointsFilter[endpoint]?.availableModels?.[0] ??
+ endpointsConfig[endpoint]?.availableModels?.[0] ??
'text-davinci-002-render-sha'
};
} else if (endpoint === null) {
@@ -55,35 +55,35 @@ const buildDefaultConversation = ({
return conversation;
};
-const getDefaultConversation = ({ conversation, prevConversation, endpointsFilter, preset }) => {
+const getDefaultConversation = ({ conversation, prevConversation, endpointsConfig, preset }) => {
const { endpoint: targetEndpoint } = preset || {};
if (targetEndpoint) {
// try to use preset
const endpoint = targetEndpoint;
- if (endpointsFilter?.[endpoint]) {
+ if (endpointsConfig?.[endpoint]) {
conversation = buildDefaultConversation({
conversation,
endpoint,
lastConversationSetup: preset,
- endpointsFilter
+ endpointsConfig
});
return conversation;
} else {
console.log(endpoint);
- console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsFilter}`);
+ console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsConfig}`);
}
}
// try {
// // try to use current model
// const { endpoint = null } = prevConversation || {};
- // if (endpointsFilter?.[endpoint]) {
+ // if (endpointsConfig?.[endpoint]) {
// conversation = buildDefaultConversation({
// conversation,
// endpoint,
// lastConversationSetup: prevConversation,
- // endpointsFilter
+ // endpointsConfig
// });
// return conversation;
// }
@@ -94,20 +94,20 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
const lastConversationSetup = JSON.parse(localStorage.getItem('lastConversationSetup'));
const { endpoint = null } = lastConversationSetup;
- if (endpointsFilter?.[endpoint]) {
- conversation = buildDefaultConversation({ conversation, endpoint, endpointsFilter });
+ if (endpointsConfig?.[endpoint]) {
+ conversation = buildDefaultConversation({ conversation, endpoint, endpointsConfig });
return conversation;
}
} catch (error) {}
// if anything happens, reset to default model
- const endpoint = ['openAI', 'azureOpenAI', 'bingAI', 'chatGPTBrowser'].find(e => endpointsFilter?.[e]);
+ const endpoint = ['openAI', 'azureOpenAI', 'bingAI', 'chatGPTBrowser'].find(e => endpointsConfig?.[e]);
if (endpoint) {
- conversation = buildDefaultConversation({ conversation, endpoint, endpointsFilter });
+ conversation = buildDefaultConversation({ conversation, endpoint, endpointsConfig });
return conversation;
} else {
- conversation = buildDefaultConversation({ conversation, endpoint: null, endpointsFilter });
+ conversation = buildDefaultConversation({ conversation, endpoint: null, endpointsConfig });
return conversation;
}
};
diff --git a/client/src/utils/handleSubmit.js b/client/src/utils/handleSubmit.js
index 8514977eef..7f25d74768 100644
--- a/client/src/utils/handleSubmit.js
+++ b/client/src/utils/handleSubmit.js
@@ -7,7 +7,9 @@ const useMessageHandler = () => {
const currentConversation = useRecoilValue(store.conversation) || {};
const setSubmission = useSetRecoilState(store.submission);
const isSubmitting = useRecoilValue(store.isSubmitting);
- const endpointsFilter = useRecoilValue(store.endpointsFilter);
+ const endpointsConfig = useRecoilValue(store.endpointsConfig);
+
+ const { getToken } = store.useToken(currentConversation?.endpoint);
const latestMessage = useRecoilValue(store.latestMessage);
@@ -29,7 +31,7 @@ const useMessageHandler = () => {
endpointOption = {
endpoint,
model:
- currentConversation?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
+ currentConversation?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
chatGptLabel: currentConversation?.chatGptLabel ?? null,
promptPrefix: currentConversation?.promptPrefix ?? null,
temperature: currentConversation?.temperature ?? 1,
@@ -48,7 +50,8 @@ const useMessageHandler = () => {
jailbreakConversationId: currentConversation?.jailbreakConversationId ?? null,
conversationSignature: currentConversation?.conversationSignature ?? null,
clientId: currentConversation?.clientId ?? null,
- invocationId: currentConversation?.invocationId ?? 1
+ invocationId: currentConversation?.invocationId ?? 1,
+ token: endpointsConfig[endpoint]?.userProvide ? getToken() : null
};
responseSender = endpointOption.jailbreak ? 'Sydney' : 'BingAI';
} else if (endpoint === 'chatGPTBrowser') {
@@ -56,8 +59,9 @@ const useMessageHandler = () => {
endpoint,
model:
currentConversation?.model ??
- endpointsFilter[endpoint]?.availableModels?.[0] ??
- 'text-davinci-002-render-sha'
+ endpointsConfig[endpoint]?.availableModels?.[0] ??
+ 'text-davinci-002-render-sha',
+ token: endpointsConfig[endpoint]?.userProvide ? getToken() : null
};
responseSender = 'ChatGPT';
} else if (endpoint === null) {