mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 08:50:15 +01:00
feat: support user-provided token to bingAI and chatgptBrowser
This commit is contained in:
parent
a953fc9f2b
commit
bbf2f8a6ca
22 changed files with 309 additions and 86 deletions
|
|
@ -25,8 +25,9 @@ MONGO_URI="mongodb://127.0.0.1:27017/chatgpt-clone"
|
||||||
OPENAI_KEY=
|
OPENAI_KEY=
|
||||||
|
|
||||||
# Identify the available models, sperate by comma, and not space in it
|
# Identify the available models, sperate by comma, and not space in it
|
||||||
|
# The first will be default
|
||||||
# Leave it blank to use internal settings.
|
# Leave it blank to use internal settings.
|
||||||
# OPENAI_MODELS=gpt-4,text-davinci-003,gpt-3.5-turbo,gpt-3.5-turbo-0301
|
OPENAI_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-0301,text-davinci-003,gpt-4
|
||||||
|
|
||||||
# Reverse proxy setting for OpenAI
|
# Reverse proxy setting for OpenAI
|
||||||
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
|
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
|
||||||
|
|
@ -39,6 +40,8 @@ OPENAI_KEY=
|
||||||
|
|
||||||
# BingAI Tokens: the "_U" cookies value from bing.com
|
# BingAI Tokens: the "_U" cookies value from bing.com
|
||||||
# Leave it and BINGAI_USER_TOKEN blank to disable this endpoint.
|
# Leave it and BINGAI_USER_TOKEN blank to disable this endpoint.
|
||||||
|
# Set to "user_providered" to allow user provided token.
|
||||||
|
# BINGAI_TOKEN="user_providered"
|
||||||
BINGAI_TOKEN=
|
BINGAI_TOKEN=
|
||||||
|
|
||||||
# BingAI Host:
|
# BingAI Host:
|
||||||
|
|
@ -46,12 +49,6 @@ BINGAI_TOKEN=
|
||||||
# Leave it blank to use default server.
|
# Leave it blank to use default server.
|
||||||
# BINGAI_HOST="https://cn.bing.com"
|
# BINGAI_HOST="https://cn.bing.com"
|
||||||
|
|
||||||
# BingAI User defined Token
|
|
||||||
# Allow user to set their own token by client
|
|
||||||
# Uncomment this to enable this feature.
|
|
||||||
# (Not implemented yet.)
|
|
||||||
# BINGAI_USER_TOKEN=1
|
|
||||||
|
|
||||||
|
|
||||||
#############################
|
#############################
|
||||||
# Endpoint chatGPT:
|
# Endpoint chatGPT:
|
||||||
|
|
@ -61,11 +58,14 @@ BINGAI_TOKEN=
|
||||||
# Access token from https://chat.openai.com/api/auth/session
|
# Access token from https://chat.openai.com/api/auth/session
|
||||||
# Exposes your access token to CHATGPT_REVERSE_PROXY
|
# Exposes your access token to CHATGPT_REVERSE_PROXY
|
||||||
# Leave it blank to disable this endpoint
|
# Leave it blank to disable this endpoint
|
||||||
|
# Set to "user_provide" to allow user provided token.
|
||||||
|
# CHATGPT_TOKEN="user_provide"
|
||||||
CHATGPT_TOKEN=
|
CHATGPT_TOKEN=
|
||||||
|
|
||||||
# Identify the available models, sperate by comma, and not space in it
|
# Identify the available models, sperate by comma, and not space in it
|
||||||
|
# The first will be default
|
||||||
# Leave it blank to use internal settings.
|
# Leave it blank to use internal settings.
|
||||||
# CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4
|
CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4
|
||||||
|
|
||||||
# Reverse proxy setting for OpenAI
|
# Reverse proxy setting for OpenAI
|
||||||
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
|
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ const askBing = async ({
|
||||||
clientId,
|
clientId,
|
||||||
invocationId,
|
invocationId,
|
||||||
toneStyle,
|
toneStyle,
|
||||||
|
token,
|
||||||
onProgress
|
onProgress
|
||||||
}) => {
|
}) => {
|
||||||
const { BingAIClient } = await import('@waylaidwanderer/chatgpt-api');
|
const { BingAIClient } = await import('@waylaidwanderer/chatgpt-api');
|
||||||
|
|
@ -22,7 +23,7 @@ const askBing = async ({
|
||||||
|
|
||||||
const bingAIClient = new BingAIClient({
|
const bingAIClient = new BingAIClient({
|
||||||
// "_U" cookie from bing.com
|
// "_U" cookie from bing.com
|
||||||
userToken: process.env.BINGAI_TOKEN,
|
userToken: process.env.BINGAI_TOKEN == 'user_provide' ? token : process.env.BINGAI_TOKEN ?? null,
|
||||||
// If the above doesn't work, provide all your cookies as a string instead
|
// If the above doesn't work, provide all your cookies as a string instead
|
||||||
// cookies: '',
|
// cookies: '',
|
||||||
debug: false,
|
debug: false,
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ const browserClient = async ({
|
||||||
parentMessageId,
|
parentMessageId,
|
||||||
conversationId,
|
conversationId,
|
||||||
model,
|
model,
|
||||||
|
token,
|
||||||
onProgress,
|
onProgress,
|
||||||
abortController
|
abortController
|
||||||
}) => {
|
}) => {
|
||||||
|
|
@ -18,7 +19,7 @@ const browserClient = async ({
|
||||||
// Warning: This will expose your access token to a third party. Consider the risks before using this.
|
// Warning: This will expose your access token to a third party. Consider the risks before using this.
|
||||||
reverseProxyUrl: process.env.CHATGPT_REVERSE_PROXY || 'https://bypass.churchless.tech/api/conversation',
|
reverseProxyUrl: process.env.CHATGPT_REVERSE_PROXY || 'https://bypass.churchless.tech/api/conversation',
|
||||||
// Access token from https://chat.openai.com/api/auth/session
|
// Access token from https://chat.openai.com/api/auth/session
|
||||||
accessToken: process.env.CHATGPT_TOKEN,
|
accessToken: process.env.CHATGPT_TOKEN == 'user_provide' ? token : process.env.CHATGPT_TOKEN ?? null,
|
||||||
model: model,
|
model: model,
|
||||||
// debug: true
|
// debug: true
|
||||||
proxy: process.env.PROXY || null
|
proxy: process.env.PROXY || null
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,8 @@ router.post('/', async (req, res) => {
|
||||||
jailbreakConversationId: req.body?.jailbreakConversationId ?? null,
|
jailbreakConversationId: req.body?.jailbreakConversationId ?? null,
|
||||||
systemMessage: req.body?.systemMessage ?? null,
|
systemMessage: req.body?.systemMessage ?? null,
|
||||||
context: req.body?.context ?? null,
|
context: req.body?.context ?? null,
|
||||||
toneStyle: req.body?.toneStyle ?? 'fast'
|
toneStyle: req.body?.toneStyle ?? 'fast',
|
||||||
|
token: req.body?.token ?? null
|
||||||
};
|
};
|
||||||
else
|
else
|
||||||
endpointOption = {
|
endpointOption = {
|
||||||
|
|
@ -49,7 +50,8 @@ router.post('/', async (req, res) => {
|
||||||
conversationSignature: req.body?.conversationSignature ?? null,
|
conversationSignature: req.body?.conversationSignature ?? null,
|
||||||
clientId: req.body?.clientId ?? null,
|
clientId: req.body?.clientId ?? null,
|
||||||
invocationId: req.body?.invocationId ?? null,
|
invocationId: req.body?.invocationId ?? null,
|
||||||
toneStyle: req.body?.toneStyle ?? 'fast'
|
toneStyle: req.body?.toneStyle ?? 'fast',
|
||||||
|
token: req.body?.token ?? null
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log('ask log', {
|
console.log('ask log', {
|
||||||
|
|
|
||||||
|
|
@ -33,7 +33,8 @@ router.post('/', async (req, res) => {
|
||||||
|
|
||||||
// build endpoint option
|
// build endpoint option
|
||||||
const endpointOption = {
|
const endpointOption = {
|
||||||
model: req.body?.model ?? 'text-davinci-002-render-sha'
|
model: req.body?.model ?? 'text-davinci-002-render-sha',
|
||||||
|
token: req.body?.token ?? null
|
||||||
};
|
};
|
||||||
|
|
||||||
const availableModels = getChatGPTBrowserModels();
|
const availableModels = getChatGPTBrowserModels();
|
||||||
|
|
|
||||||
|
|
@ -18,8 +18,15 @@ const getChatGPTBrowserModels = () => {
|
||||||
router.get('/', function (req, res) {
|
router.get('/', function (req, res) {
|
||||||
const azureOpenAI = !!process.env.AZURE_OPENAI_KEY;
|
const azureOpenAI = !!process.env.AZURE_OPENAI_KEY;
|
||||||
const openAI = process.env.OPENAI_KEY ? { availableModels: getOpenAIModels() } : false;
|
const openAI = process.env.OPENAI_KEY ? { availableModels: getOpenAIModels() } : false;
|
||||||
const bingAI = !!process.env.BINGAI_TOKEN;
|
const bingAI = process.env.BINGAI_TOKEN
|
||||||
const chatGPTBrowser = process.env.CHATGPT_TOKEN ? { availableModels: getChatGPTBrowserModels() } : false;
|
? { userProvide: process.env.BINGAI_TOKEN == 'user_provide' }
|
||||||
|
: false;
|
||||||
|
const chatGPTBrowser = process.env.CHATGPT_TOKEN
|
||||||
|
? {
|
||||||
|
userProvide: process.env.CHATGPT_TOKEN == 'user_provide',
|
||||||
|
availableModels: getChatGPTBrowserModels()
|
||||||
|
}
|
||||||
|
: false;
|
||||||
|
|
||||||
res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser }));
|
res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser }));
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
|
||||||
const setPresets = useSetRecoilState(store.presets);
|
const setPresets = useSetRecoilState(store.presets);
|
||||||
|
|
||||||
const availableEndpoints = useRecoilValue(store.availableEndpoints);
|
const availableEndpoints = useRecoilValue(store.availableEndpoints);
|
||||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||||
|
|
||||||
const setOption = param => newValue => {
|
const setOption = param => newValue => {
|
||||||
let update = {};
|
let update = {};
|
||||||
|
|
@ -32,7 +32,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
|
||||||
...prevState,
|
...prevState,
|
||||||
...update
|
...update
|
||||||
},
|
},
|
||||||
endpointsFilter
|
endpointsConfig
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
@ -44,7 +44,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
|
||||||
axios({
|
axios({
|
||||||
method: 'post',
|
method: 'post',
|
||||||
url: '/api/presets',
|
url: '/api/presets',
|
||||||
data: cleanupPreset({ preset, endpointsFilter }),
|
data: cleanupPreset({ preset, endpointsConfig }),
|
||||||
withCredentials: true
|
withCredentials: true
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
setPresets(res?.data);
|
setPresets(res?.data);
|
||||||
|
|
@ -54,7 +54,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
|
||||||
const exportPreset = () => {
|
const exportPreset = () => {
|
||||||
const fileName = filenamify(preset?.title || 'preset');
|
const fileName = filenamify(preset?.title || 'preset');
|
||||||
exportFromJSON({
|
exportFromJSON({
|
||||||
data: cleanupPreset({ preset, endpointsFilter }),
|
data: cleanupPreset({ preset, endpointsConfig }),
|
||||||
fileName,
|
fileName,
|
||||||
exportType: exportFromJSON.types.json
|
exportType: exportFromJSON.types.json
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) =
|
||||||
const [preset, setPreset] = useState(_preset);
|
const [preset, setPreset] = useState(_preset);
|
||||||
|
|
||||||
const [saveAsDialogShow, setSaveAsDialogShow] = useState(false);
|
const [saveAsDialogShow, setSaveAsDialogShow] = useState(false);
|
||||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||||
|
|
||||||
const setOption = param => newValue => {
|
const setOption = param => newValue => {
|
||||||
let update = {};
|
let update = {};
|
||||||
|
|
@ -33,7 +33,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) =
|
||||||
|
|
||||||
const exportPreset = () => {
|
const exportPreset = () => {
|
||||||
exportFromJSON({
|
exportFromJSON({
|
||||||
data: cleanupPreset({ preset, endpointsFilter }),
|
data: cleanupPreset({ preset, endpointsConfig }),
|
||||||
fileName: `${preset?.title}.json`,
|
fileName: `${preset?.title}.json`,
|
||||||
exportType: exportFromJSON.types.json
|
exportType: exportFromJSON.types.json
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import { useEffect, useState } from 'react';
|
import React, { useEffect, useState } from 'react';
|
||||||
import { useRecoilValue } from 'recoil';
|
import { useRecoilValue } from 'recoil';
|
||||||
import DialogTemplate from '../ui/DialogTemplate';
|
import DialogTemplate from '../ui/DialogTemplate';
|
||||||
import { Dialog } from '../ui/Dialog.tsx';
|
import { Dialog } from '../ui/Dialog.tsx';
|
||||||
|
|
@ -11,7 +11,7 @@ import store from '~/store';
|
||||||
|
|
||||||
const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => {
|
const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => {
|
||||||
const [title, setTitle] = useState(preset?.title || 'My Preset');
|
const [title, setTitle] = useState(preset?.title || 'My Preset');
|
||||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||||
const createPresetMutation = useCreatePresetMutation();
|
const createPresetMutation = useCreatePresetMutation();
|
||||||
|
|
||||||
const defaultTextProps =
|
const defaultTextProps =
|
||||||
|
|
@ -23,7 +23,7 @@ const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => {
|
||||||
...preset,
|
...preset,
|
||||||
title
|
title
|
||||||
},
|
},
|
||||||
endpointsFilter
|
endpointsConfig
|
||||||
});
|
});
|
||||||
createPresetMutation.mutate(_preset);
|
createPresetMutation.mutate(_preset);
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,16 @@
|
||||||
import React from 'react';
|
import React, { useState } from 'react';
|
||||||
import { DropdownMenuRadioItem } from '../../ui/DropdownMenu.tsx';
|
import { DropdownMenuRadioItem } from '../../ui/DropdownMenu.tsx';
|
||||||
|
import { Settings } from 'lucide-react';
|
||||||
import getIcon from '~/utils/getIcon';
|
import getIcon from '~/utils/getIcon';
|
||||||
|
import { useRecoilValue } from 'recoil';
|
||||||
|
import SetTokenDialog from '../SetTokenDialog';
|
||||||
|
|
||||||
|
import store from '../../../store';
|
||||||
|
|
||||||
export default function ModelItem({ endpoint, value, onSelect }) {
|
export default function ModelItem({ endpoint, value, onSelect }) {
|
||||||
|
const [setTokenDialogOpen, setSetTokenDialogOpen] = useState(false);
|
||||||
|
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||||
|
|
||||||
const icon = getIcon({
|
const icon = getIcon({
|
||||||
size: 20,
|
size: 20,
|
||||||
endpoint,
|
endpoint,
|
||||||
|
|
@ -10,15 +18,37 @@ export default function ModelItem({ endpoint, value, onSelect }) {
|
||||||
className: 'mr-2'
|
className: 'mr-2'
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const isuserProvide = endpointsConfig?.[endpoint]?.userProvide;
|
||||||
|
|
||||||
// regular model
|
// regular model
|
||||||
return (
|
return (
|
||||||
|
<>
|
||||||
<DropdownMenuRadioItem
|
<DropdownMenuRadioItem
|
||||||
value={value}
|
value={value}
|
||||||
className="dark:font-semibold dark:text-gray-100 dark:hover:bg-gray-800"
|
className="group dark:font-semibold dark:text-gray-100 dark:hover:bg-gray-800"
|
||||||
>
|
>
|
||||||
{icon}
|
{icon}
|
||||||
{endpoint}
|
{endpoint}
|
||||||
{!!['azureOpenAI', 'openAI'].find(e => e === endpoint) && <sup>$</sup>}
|
{!!['azureOpenAI', 'openAI'].find(e => e === endpoint) && <sup>$</sup>}
|
||||||
|
<div className="flex w-4 flex-1" />
|
||||||
|
{isuserProvide ? (
|
||||||
|
<button
|
||||||
|
className="invisible m-0 mr-1 flex-initial rounded-md p-0 text-xs font-medium text-gray-400 hover:text-gray-700 group-hover:visible dark:font-normal dark:text-gray-400 dark:hover:text-gray-200"
|
||||||
|
onClick={e => {
|
||||||
|
e.preventDefault();
|
||||||
|
setSetTokenDialogOpen(true);
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<Settings className="mr-1 inline-block w-[16px] items-center stroke-1" />
|
||||||
|
Config Token
|
||||||
|
</button>
|
||||||
|
) : null}
|
||||||
</DropdownMenuRadioItem>
|
</DropdownMenuRadioItem>
|
||||||
|
<SetTokenDialog
|
||||||
|
open={setTokenDialogOpen}
|
||||||
|
onOpenChange={setSetTokenDialogOpen}
|
||||||
|
endpoint={endpoint}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ import store from '~/store';
|
||||||
|
|
||||||
const FileUpload = ({ onFileSelected }) => {
|
const FileUpload = ({ onFileSelected }) => {
|
||||||
// const setPresets = useSetRecoilState(store.presets);
|
// const setPresets = useSetRecoilState(store.presets);
|
||||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||||
|
|
||||||
const handleFileChange = event => {
|
const handleFileChange = event => {
|
||||||
const file = event.target.files[0];
|
const file = event.target.files[0];
|
||||||
|
|
@ -16,7 +16,7 @@ const FileUpload = ({ onFileSelected }) => {
|
||||||
const reader = new FileReader();
|
const reader = new FileReader();
|
||||||
reader.onload = e => {
|
reader.onload = e => {
|
||||||
const jsonData = JSON.parse(e.target.result);
|
const jsonData = JSON.parse(e.target.result);
|
||||||
onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsFilter }), presetId: null });
|
onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsConfig }), presetId: null });
|
||||||
};
|
};
|
||||||
reader.readAsText(file);
|
reader.readAsText(file);
|
||||||
};
|
};
|
||||||
|
|
@ -24,10 +24,10 @@ const FileUpload = ({ onFileSelected }) => {
|
||||||
return (
|
return (
|
||||||
<label
|
<label
|
||||||
htmlFor="file-upload"
|
htmlFor="file-upload"
|
||||||
className=" mr-1 flex h-auto cursor-pointer items-center rounded bg-transparent px-2 py-1 text-xs font-medium font-normal text-gray-600 hover:bg-slate-200 hover:text-green-700 dark:bg-transparent dark:text-gray-300 dark:hover:bg-gray-800 dark:hover:text-green-500"
|
className=" mr-1 flex h-auto cursor-pointer items-center rounded bg-transparent px-2 py-1 text-xs font-medium font-normal text-gray-600 transition-colors hover:bg-slate-200 hover:text-green-700 dark:bg-transparent dark:text-gray-300 dark:hover:bg-gray-800 dark:hover:text-green-500"
|
||||||
>
|
>
|
||||||
<FileUp className="flex w-[22px] items-center stroke-1" />
|
<FileUp className="mr-1 flex w-[22px] items-center stroke-1" />
|
||||||
<span className="ml-1 flex text-xs ">Import</span>
|
<span className="flex text-xs ">Import</span>
|
||||||
<input
|
<input
|
||||||
id="file-upload"
|
id="file-upload"
|
||||||
value=""
|
value=""
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,9 @@
|
||||||
import { useState, useEffect } from 'react';
|
import React, { useState, useEffect } from 'react';
|
||||||
import { useRecoilValue, useRecoilState } from 'recoil';
|
import { useRecoilValue, useRecoilState } from 'recoil';
|
||||||
import EditPresetDialog from '../../Endpoints/EditPresetDialog';
|
import EditPresetDialog from '../../Endpoints/EditPresetDialog';
|
||||||
import EndpointItems from './EndpointItems';
|
import EndpointItems from './EndpointItems';
|
||||||
import PresetItems from './PresetItems';
|
import PresetItems from './PresetItems';
|
||||||
|
import { Trash2 } from 'lucide-react';
|
||||||
import FileUpload from './FileUpload';
|
import FileUpload from './FileUpload';
|
||||||
import getIcon from '~/utils/getIcon';
|
import getIcon from '~/utils/getIcon';
|
||||||
import { useDeletePresetMutation, useCreatePresetMutation } from '~/data-provider';
|
import { useDeletePresetMutation, useCreatePresetMutation } from '~/data-provider';
|
||||||
|
|
@ -36,14 +37,17 @@ export default function NewConversationMenu() {
|
||||||
const createPresetMutation = useCreatePresetMutation();
|
const createPresetMutation = useCreatePresetMutation();
|
||||||
|
|
||||||
const importPreset = jsonData => {
|
const importPreset = jsonData => {
|
||||||
createPresetMutation.mutate({...jsonData}, {
|
createPresetMutation.mutate(
|
||||||
onSuccess: (data) => {
|
{ ...jsonData },
|
||||||
|
{
|
||||||
|
onSuccess: data => {
|
||||||
setPresets(data);
|
setPresets(data);
|
||||||
},
|
},
|
||||||
onError: (error) => {
|
onError: error => {
|
||||||
console.error('Error uploading the preset:', error);
|
console.error('Error uploading the preset:', error);
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
// update the default model when availableModels changes
|
// update the default model when availableModels changes
|
||||||
|
|
@ -146,12 +150,18 @@ export default function NewConversationMenu() {
|
||||||
<FileUpload onFileSelected={importPreset} />
|
<FileUpload onFileSelected={importPreset} />
|
||||||
<Dialog>
|
<Dialog>
|
||||||
<DialogTrigger asChild>
|
<DialogTrigger asChild>
|
||||||
<Button
|
<label
|
||||||
|
htmlFor="file-upload"
|
||||||
|
className=" mr-1 flex h-[32px] h-auto cursor-pointer items-center rounded bg-transparent px-2 py-1 text-xs font-medium font-normal text-gray-600 transition-colors hover:bg-slate-200 hover:text-red-700 dark:bg-transparent dark:text-gray-300 dark:hover:bg-gray-800 dark:hover:text-green-500"
|
||||||
|
>
|
||||||
|
{/* <Button
|
||||||
type="button"
|
type="button"
|
||||||
className="h-auto bg-transparent px-2 py-1 text-xs font-medium font-normal text-red-700 hover:bg-slate-200 hover:text-red-700 dark:bg-transparent dark:text-red-400 dark:hover:bg-gray-800 dark:hover:text-red-400"
|
className="h-auto bg-transparent px-2 py-1 text-xs font-medium font-normal text-red-700 hover:bg-slate-200 hover:text-red-700 dark:bg-transparent dark:text-red-400 dark:hover:bg-gray-800 dark:hover:text-red-400"
|
||||||
>
|
> */}
|
||||||
|
<Trash2 className="mr-1 flex w-[22px] items-center stroke-1" />
|
||||||
Clear All
|
Clear All
|
||||||
</Button>
|
{/* </Button> */}
|
||||||
|
</label>
|
||||||
</DialogTrigger>
|
</DialogTrigger>
|
||||||
<DialogTemplate
|
<DialogTemplate
|
||||||
title="Clear presets"
|
title="Clear presets"
|
||||||
|
|
|
||||||
102
client/src/components/Input/SetTokenDialog/index.jsx
Normal file
102
client/src/components/Input/SetTokenDialog/index.jsx
Normal file
|
|
@ -0,0 +1,102 @@
|
||||||
|
import React, { useEffect, useState } from 'react';
|
||||||
|
import { useRecoilValue } from 'recoil';
|
||||||
|
import DialogTemplate from '../../ui/DialogTemplate';
|
||||||
|
import { Dialog } from '../../ui/Dialog.tsx';
|
||||||
|
import { Input } from '../../ui/Input.tsx';
|
||||||
|
import { Label } from '../../ui/Label.tsx';
|
||||||
|
import { cn } from '~/utils/';
|
||||||
|
import cleanupPreset from '~/utils/cleanupPreset';
|
||||||
|
import { useCreatePresetMutation } from '~/data-provider';
|
||||||
|
import store from '~/store';
|
||||||
|
|
||||||
|
const SetTokenDialog = ({ open, onOpenChange, endpoint }) => {
|
||||||
|
const [token, setToken] = useState('');
|
||||||
|
const { getToken, saveToken } = store.useToken(endpoint);
|
||||||
|
|
||||||
|
const defaultTextProps =
|
||||||
|
'rounded-md border border-gray-300 bg-transparent text-sm shadow-[0_0_10px_rgba(0,0,0,0.10)] outline-none placeholder:text-gray-400 focus:outline-none focus:ring-gray-400 focus:ring-opacity-20 focus:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 dark:border-gray-400 dark:bg-gray-700 dark:text-gray-50 dark:shadow-[0_0_15px_rgba(0,0,0,0.10)] dark:focus:border-gray-400 dark:focus:outline-none dark:focus:ring-0 dark:focus:ring-gray-400 dark:focus:ring-offset-0';
|
||||||
|
|
||||||
|
const submit = () => {
|
||||||
|
saveToken(token);
|
||||||
|
onOpenChange(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
setToken(getToken() ?? '');
|
||||||
|
}, [open]);
|
||||||
|
|
||||||
|
const helpText = {
|
||||||
|
bingAI: (
|
||||||
|
<small className="break-all text-gray-600">
|
||||||
|
'The Bing Access Token is the "_U" cookie from bing.com. Use dev tools or an extension while logged
|
||||||
|
into the site to view it.'
|
||||||
|
</small>
|
||||||
|
),
|
||||||
|
chatGPTBrowser: (
|
||||||
|
<small className="break-all text-gray-600">
|
||||||
|
To get your Access token For ChatGPT 'Free Version', login to{' '}
|
||||||
|
<a
|
||||||
|
target="_blank"
|
||||||
|
href="https://chat.openai.com"
|
||||||
|
rel="noreferrer"
|
||||||
|
className="text-blue-600 underline"
|
||||||
|
>
|
||||||
|
https://chat.openai.com
|
||||||
|
</a>
|
||||||
|
, then visit{' '}
|
||||||
|
<a
|
||||||
|
target="_blank"
|
||||||
|
href="https://chat.openai.com/api/auth/session"
|
||||||
|
rel="noreferrer"
|
||||||
|
className="text-blue-600 underline"
|
||||||
|
>
|
||||||
|
https://chat.openai.com/api/auth/session
|
||||||
|
</a>
|
||||||
|
. Copy access token.
|
||||||
|
</small>
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Dialog
|
||||||
|
open={open}
|
||||||
|
onOpenChange={onOpenChange}
|
||||||
|
>
|
||||||
|
<DialogTemplate
|
||||||
|
title={`Set Token of ${endpoint}`}
|
||||||
|
main={
|
||||||
|
<div className="grid w-full items-center gap-2">
|
||||||
|
<Label
|
||||||
|
htmlFor="chatGptLabel"
|
||||||
|
className="text-left text-sm font-medium"
|
||||||
|
>
|
||||||
|
Token Name
|
||||||
|
<br />
|
||||||
|
</Label>
|
||||||
|
<Input
|
||||||
|
id="chatGptLabel"
|
||||||
|
value={token || ''}
|
||||||
|
onChange={e => setToken(e.target.value || '')}
|
||||||
|
placeholder="Set the token."
|
||||||
|
className={cn(
|
||||||
|
defaultTextProps,
|
||||||
|
'flex h-10 max-h-10 w-full resize-none px-3 py-2 focus:outline-none focus:ring-0 focus:ring-opacity-0 focus:ring-offset-0'
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
<small className="text-red-600">
|
||||||
|
Your token will be send to the server, but we won't save it.
|
||||||
|
</small>
|
||||||
|
{helpText?.[endpoint]}
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
selection={{
|
||||||
|
selectHandler: submit,
|
||||||
|
selectClasses: 'bg-green-600 hover:bg-green-700 dark:hover:bg-green-800 text-white',
|
||||||
|
selectText: 'Submit'
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Dialog>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default SetTokenDialog;
|
||||||
|
|
@ -1,12 +1,31 @@
|
||||||
import React from 'react';
|
import React, { useState } from 'react';
|
||||||
import StopGeneratingIcon from '../svg/StopGeneratingIcon';
|
import StopGeneratingIcon from '../svg/StopGeneratingIcon';
|
||||||
|
import { Settings } from 'lucide-react';
|
||||||
|
import SetTokenDialog from './SetTokenDialog';
|
||||||
|
import store from '../../store';
|
||||||
|
|
||||||
|
export default function SubmitButton({
|
||||||
|
endpoint,
|
||||||
|
submitMessage,
|
||||||
|
handleStopGenerating,
|
||||||
|
disabled,
|
||||||
|
isSubmitting,
|
||||||
|
endpointsConfig
|
||||||
|
}) {
|
||||||
|
const [setTokenDialogOpen, setSetTokenDialogOpen] = useState(false);
|
||||||
|
const { getToken } = store.useToken(endpoint);
|
||||||
|
|
||||||
|
const isTokenProvided = endpointsConfig?.[endpoint]?.userProvide ? !!getToken() : true;
|
||||||
|
|
||||||
export default function SubmitButton({ submitMessage, handleStopGenerating, disabled, isSubmitting }) {
|
|
||||||
const clickHandler = e => {
|
const clickHandler = e => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
submitMessage();
|
submitMessage();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const setToken = () => {
|
||||||
|
setSetTokenDialogOpen(true);
|
||||||
|
};
|
||||||
|
|
||||||
if (isSubmitting)
|
if (isSubmitting)
|
||||||
return (
|
return (
|
||||||
<button
|
<button
|
||||||
|
|
@ -42,7 +61,27 @@ export default function SubmitButton({ submitMessage, handleStopGenerating, disa
|
||||||
// </div>
|
// </div>
|
||||||
// </button>
|
// </button>
|
||||||
// );
|
// );
|
||||||
else
|
else if (!isTokenProvided) {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<button
|
||||||
|
onClick={setToken}
|
||||||
|
type="button"
|
||||||
|
className="group absolute bottom-0 right-0 flex h-[100%] w-auto items-center justify-center bg-transparent p-1 text-gray-500"
|
||||||
|
>
|
||||||
|
<div className="m-1 mr-0 rounded-md p-2 pt-[10px] pb-[10px] align-middle text-xs group-hover:bg-gray-100 group-disabled:hover:bg-transparent dark:group-hover:bg-gray-900 dark:group-hover:text-gray-400 dark:group-disabled:hover:bg-transparent">
|
||||||
|
<Settings className="mr-1 inline-block w-[18px]" />
|
||||||
|
Set Token First
|
||||||
|
</div>
|
||||||
|
</button>
|
||||||
|
<SetTokenDialog
|
||||||
|
open={setTokenDialogOpen}
|
||||||
|
onOpenChange={setSetTokenDialogOpen}
|
||||||
|
endpoint={endpoint}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
} else
|
||||||
return (
|
return (
|
||||||
<button
|
<button
|
||||||
onClick={clickHandler}
|
onClick={clickHandler}
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,7 @@ export default function TextChat({ isSearchView = false }) {
|
||||||
const [text, setText] = useRecoilState(store.text);
|
const [text, setText] = useRecoilState(store.text);
|
||||||
// const [text, setText] = useState('');
|
// const [text, setText] = useState('');
|
||||||
|
|
||||||
|
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||||
const isSubmitting = useRecoilValue(store.isSubmitting);
|
const isSubmitting = useRecoilValue(store.isSubmitting);
|
||||||
|
|
||||||
// TODO: do we need this?
|
// TODO: do we need this?
|
||||||
|
|
@ -62,7 +63,7 @@ export default function TextChat({ isSearchView = false }) {
|
||||||
setText('');
|
setText('');
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleStopGenerating = (e) => {
|
const handleStopGenerating = e => {
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
stopGenerating();
|
stopGenerating();
|
||||||
};
|
};
|
||||||
|
|
@ -169,6 +170,8 @@ export default function TextChat({ isSearchView = false }) {
|
||||||
handleStopGenerating={handleStopGenerating}
|
handleStopGenerating={handleStopGenerating}
|
||||||
disabled={disabled || isNotAppendable}
|
disabled={disabled || isNotAppendable}
|
||||||
isSubmitting={isSubmitting}
|
isSubmitting={isSubmitting}
|
||||||
|
endpointsConfig={endpointsConfig}
|
||||||
|
endpoint={conversation?.endpoint}
|
||||||
/>
|
/>
|
||||||
{latestMessage && conversation?.jailbreak && conversation.endpoint === 'bingAI' ? (
|
{latestMessage && conversation?.jailbreak && conversation.endpoint === 'bingAI' ? (
|
||||||
<AdjustToneButton onClick={handleBingToneSetting} />
|
<AdjustToneButton onClick={handleBingToneSetting} />
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@ export default function ExportModel({ open, onOpenChange }) {
|
||||||
|
|
||||||
const conversation = useRecoilValue(store.conversation) || {};
|
const conversation = useRecoilValue(store.conversation) || {};
|
||||||
const messagesTree = useRecoilValue(store.messagesTree) || [];
|
const messagesTree = useRecoilValue(store.messagesTree) || [];
|
||||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||||
|
|
||||||
const getSiblingIdx = useRecoilCallback(
|
const getSiblingIdx = useRecoilCallback(
|
||||||
({ snapshot }) =>
|
({ snapshot }) =>
|
||||||
|
|
@ -164,7 +164,7 @@ export default function ExportModel({ open, onOpenChange }) {
|
||||||
|
|
||||||
if (includeOptions) {
|
if (includeOptions) {
|
||||||
data += `\n## Options\n`;
|
data += `\n## Options\n`;
|
||||||
const options = cleanupPreset({ preset: conversation, endpointsFilter });
|
const options = cleanupPreset({ preset: conversation, endpointsConfig });
|
||||||
|
|
||||||
for (const key of Object.keys(options)) {
|
for (const key of Object.keys(options)) {
|
||||||
data += `- ${key}: ${options[key]}\n`;
|
data += `- ${key}: ${options[key]}\n`;
|
||||||
|
|
@ -203,7 +203,7 @@ export default function ExportModel({ open, onOpenChange }) {
|
||||||
|
|
||||||
if (includeOptions) {
|
if (includeOptions) {
|
||||||
data += `\nOptions\n########################\n`;
|
data += `\nOptions\n########################\n`;
|
||||||
const options = cleanupPreset({ preset: conversation, endpointsFilter });
|
const options = cleanupPreset({ preset: conversation, endpointsConfig });
|
||||||
|
|
||||||
for (const key of Object.keys(options)) {
|
for (const key of Object.keys(options)) {
|
||||||
data += `${key}: ${options[key]}\n`;
|
data += `${key}: ${options[key]}\n`;
|
||||||
|
|
@ -241,7 +241,7 @@ export default function ExportModel({ open, onOpenChange }) {
|
||||||
recursive: recursive
|
recursive: recursive
|
||||||
};
|
};
|
||||||
|
|
||||||
if (includeOptions) data.options = cleanupPreset({ preset: conversation, endpointsFilter });
|
if (includeOptions) data.options = cleanupPreset({ preset: conversation, endpointsConfig });
|
||||||
|
|
||||||
const messages = await buildMessageTree({
|
const messages = await buildMessageTree({
|
||||||
messageId: conversation?.conversationId,
|
messageId: conversation?.conversationId,
|
||||||
|
|
|
||||||
|
|
@ -79,9 +79,9 @@ const useConversation = () => {
|
||||||
({ snapshot }) =>
|
({ snapshot }) =>
|
||||||
async (_conversation, messages = null, preset = null) => {
|
async (_conversation, messages = null, preset = null) => {
|
||||||
const prevConversation = await snapshot.getPromise(conversation);
|
const prevConversation = await snapshot.getPromise(conversation);
|
||||||
const endpointsFilter = await snapshot.getPromise(endpoints.endpointsFilter);
|
const endpointsConfig = await snapshot.getPromise(endpoints.endpointsConfig);
|
||||||
_switchToConversation(_conversation, messages, preset, {
|
_switchToConversation(_conversation, messages, preset, {
|
||||||
endpointsFilter,
|
endpointsConfig,
|
||||||
prevConversation
|
prevConversation
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
@ -92,7 +92,7 @@ const useConversation = () => {
|
||||||
conversation,
|
conversation,
|
||||||
messages = null,
|
messages = null,
|
||||||
preset = null,
|
preset = null,
|
||||||
{ endpointsFilter = {}, prevConversation = {} }
|
{ endpointsConfig = {}, prevConversation = {} }
|
||||||
) => {
|
) => {
|
||||||
let { endpoint = null } = conversation;
|
let { endpoint = null } = conversation;
|
||||||
|
|
||||||
|
|
@ -100,7 +100,7 @@ const useConversation = () => {
|
||||||
// get the default model
|
// get the default model
|
||||||
conversation = getDefaultConversation({
|
conversation = getDefaultConversation({
|
||||||
conversation,
|
conversation,
|
||||||
endpointsFilter,
|
endpointsConfig,
|
||||||
prevConversation,
|
prevConversation,
|
||||||
preset
|
preset
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ import text from './text';
|
||||||
import submission from './submission';
|
import submission from './submission';
|
||||||
import search from './search';
|
import search from './search';
|
||||||
import preset from './preset';
|
import preset from './preset';
|
||||||
|
import token from './token';
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
...conversation,
|
...conversation,
|
||||||
|
|
@ -15,5 +16,6 @@ export default {
|
||||||
...text,
|
...text,
|
||||||
...submission,
|
...submission,
|
||||||
...search,
|
...search,
|
||||||
...preset
|
...preset,
|
||||||
|
...token
|
||||||
};
|
};
|
||||||
|
|
|
||||||
21
client/src/store/token.js
Normal file
21
client/src/store/token.js
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
import { atom, useRecoilState } from 'recoil';
|
||||||
|
|
||||||
|
const tokenRefreshHints = atom({
|
||||||
|
key: 'tokenRefreshHints',
|
||||||
|
default: 1
|
||||||
|
});
|
||||||
|
|
||||||
|
const useToken = endpoint => {
|
||||||
|
const [hints, setHints] = useRecoilState(tokenRefreshHints);
|
||||||
|
const getToken = () => localStorage.getItem(`${endpoint}_token`);
|
||||||
|
const saveToken = value => {
|
||||||
|
localStorage.setItem(`${endpoint}_token`, value);
|
||||||
|
setHints(prev => prev + 1);
|
||||||
|
};
|
||||||
|
|
||||||
|
return { token: getToken(), getToken, saveToken };
|
||||||
|
};
|
||||||
|
|
||||||
|
export default {
|
||||||
|
useToken
|
||||||
|
};
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
|
const cleanupPreset = ({ preset: _preset, endpointsConfig = {} }) => {
|
||||||
const { endpoint } = _preset;
|
const { endpoint } = _preset;
|
||||||
|
|
||||||
let preset = {};
|
let preset = {};
|
||||||
|
|
@ -6,7 +6,7 @@ const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
|
||||||
preset = {
|
preset = {
|
||||||
endpoint,
|
endpoint,
|
||||||
presetId: _preset?.presetId ?? null,
|
presetId: _preset?.presetId ?? null,
|
||||||
model: _preset?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
model: _preset?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
||||||
chatGptLabel: _preset?.chatGptLabel ?? null,
|
chatGptLabel: _preset?.chatGptLabel ?? null,
|
||||||
promptPrefix: _preset?.promptPrefix ?? null,
|
promptPrefix: _preset?.promptPrefix ?? null,
|
||||||
temperature: _preset?.temperature ?? 1,
|
temperature: _preset?.temperature ?? 1,
|
||||||
|
|
@ -30,7 +30,7 @@ const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
|
||||||
endpoint,
|
endpoint,
|
||||||
presetId: _preset?.presetId ?? null,
|
presetId: _preset?.presetId ?? null,
|
||||||
model:
|
model:
|
||||||
_preset?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'text-davinci-002-render-sha',
|
_preset?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'text-davinci-002-render-sha',
|
||||||
title: _preset?.title ?? 'New Preset'
|
title: _preset?.title ?? 'New Preset'
|
||||||
};
|
};
|
||||||
} else if (endpoint === null) {
|
} else if (endpoint === null) {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
const buildDefaultConversation = ({
|
const buildDefaultConversation = ({
|
||||||
conversation,
|
conversation,
|
||||||
endpoint,
|
endpoint,
|
||||||
endpointsFilter = {},
|
endpointsConfig = {},
|
||||||
lastConversationSetup = {}
|
lastConversationSetup = {}
|
||||||
}) => {
|
}) => {
|
||||||
if (endpoint === 'azureOpenAI' || endpoint === 'openAI') {
|
if (endpoint === 'azureOpenAI' || endpoint === 'openAI') {
|
||||||
|
|
@ -9,7 +9,7 @@ const buildDefaultConversation = ({
|
||||||
...conversation,
|
...conversation,
|
||||||
endpoint,
|
endpoint,
|
||||||
model:
|
model:
|
||||||
lastConversationSetup?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
lastConversationSetup?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
||||||
chatGptLabel: lastConversationSetup?.chatGptLabel ?? null,
|
chatGptLabel: lastConversationSetup?.chatGptLabel ?? null,
|
||||||
promptPrefix: lastConversationSetup?.promptPrefix ?? null,
|
promptPrefix: lastConversationSetup?.promptPrefix ?? null,
|
||||||
temperature: lastConversationSetup?.temperature ?? 1,
|
temperature: lastConversationSetup?.temperature ?? 1,
|
||||||
|
|
@ -36,7 +36,7 @@ const buildDefaultConversation = ({
|
||||||
endpoint,
|
endpoint,
|
||||||
model:
|
model:
|
||||||
lastConversationSetup?.model ??
|
lastConversationSetup?.model ??
|
||||||
endpointsFilter[endpoint]?.availableModels?.[0] ??
|
endpointsConfig[endpoint]?.availableModels?.[0] ??
|
||||||
'text-davinci-002-render-sha'
|
'text-davinci-002-render-sha'
|
||||||
};
|
};
|
||||||
} else if (endpoint === null) {
|
} else if (endpoint === null) {
|
||||||
|
|
@ -55,35 +55,35 @@ const buildDefaultConversation = ({
|
||||||
return conversation;
|
return conversation;
|
||||||
};
|
};
|
||||||
|
|
||||||
const getDefaultConversation = ({ conversation, prevConversation, endpointsFilter, preset }) => {
|
const getDefaultConversation = ({ conversation, prevConversation, endpointsConfig, preset }) => {
|
||||||
const { endpoint: targetEndpoint } = preset || {};
|
const { endpoint: targetEndpoint } = preset || {};
|
||||||
|
|
||||||
if (targetEndpoint) {
|
if (targetEndpoint) {
|
||||||
// try to use preset
|
// try to use preset
|
||||||
const endpoint = targetEndpoint;
|
const endpoint = targetEndpoint;
|
||||||
if (endpointsFilter?.[endpoint]) {
|
if (endpointsConfig?.[endpoint]) {
|
||||||
conversation = buildDefaultConversation({
|
conversation = buildDefaultConversation({
|
||||||
conversation,
|
conversation,
|
||||||
endpoint,
|
endpoint,
|
||||||
lastConversationSetup: preset,
|
lastConversationSetup: preset,
|
||||||
endpointsFilter
|
endpointsConfig
|
||||||
});
|
});
|
||||||
return conversation;
|
return conversation;
|
||||||
} else {
|
} else {
|
||||||
console.log(endpoint);
|
console.log(endpoint);
|
||||||
console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsFilter}`);
|
console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsConfig}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// try {
|
// try {
|
||||||
// // try to use current model
|
// // try to use current model
|
||||||
// const { endpoint = null } = prevConversation || {};
|
// const { endpoint = null } = prevConversation || {};
|
||||||
// if (endpointsFilter?.[endpoint]) {
|
// if (endpointsConfig?.[endpoint]) {
|
||||||
// conversation = buildDefaultConversation({
|
// conversation = buildDefaultConversation({
|
||||||
// conversation,
|
// conversation,
|
||||||
// endpoint,
|
// endpoint,
|
||||||
// lastConversationSetup: prevConversation,
|
// lastConversationSetup: prevConversation,
|
||||||
// endpointsFilter
|
// endpointsConfig
|
||||||
// });
|
// });
|
||||||
// return conversation;
|
// return conversation;
|
||||||
// }
|
// }
|
||||||
|
|
@ -94,20 +94,20 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
|
||||||
const lastConversationSetup = JSON.parse(localStorage.getItem('lastConversationSetup'));
|
const lastConversationSetup = JSON.parse(localStorage.getItem('lastConversationSetup'));
|
||||||
const { endpoint = null } = lastConversationSetup;
|
const { endpoint = null } = lastConversationSetup;
|
||||||
|
|
||||||
if (endpointsFilter?.[endpoint]) {
|
if (endpointsConfig?.[endpoint]) {
|
||||||
conversation = buildDefaultConversation({ conversation, endpoint, endpointsFilter });
|
conversation = buildDefaultConversation({ conversation, endpoint, endpointsConfig });
|
||||||
return conversation;
|
return conversation;
|
||||||
}
|
}
|
||||||
} catch (error) {}
|
} catch (error) {}
|
||||||
|
|
||||||
// if anything happens, reset to default model
|
// if anything happens, reset to default model
|
||||||
|
|
||||||
const endpoint = ['openAI', 'azureOpenAI', 'bingAI', 'chatGPTBrowser'].find(e => endpointsFilter?.[e]);
|
const endpoint = ['openAI', 'azureOpenAI', 'bingAI', 'chatGPTBrowser'].find(e => endpointsConfig?.[e]);
|
||||||
if (endpoint) {
|
if (endpoint) {
|
||||||
conversation = buildDefaultConversation({ conversation, endpoint, endpointsFilter });
|
conversation = buildDefaultConversation({ conversation, endpoint, endpointsConfig });
|
||||||
return conversation;
|
return conversation;
|
||||||
} else {
|
} else {
|
||||||
conversation = buildDefaultConversation({ conversation, endpoint: null, endpointsFilter });
|
conversation = buildDefaultConversation({ conversation, endpoint: null, endpointsConfig });
|
||||||
return conversation;
|
return conversation;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,9 @@ const useMessageHandler = () => {
|
||||||
const currentConversation = useRecoilValue(store.conversation) || {};
|
const currentConversation = useRecoilValue(store.conversation) || {};
|
||||||
const setSubmission = useSetRecoilState(store.submission);
|
const setSubmission = useSetRecoilState(store.submission);
|
||||||
const isSubmitting = useRecoilValue(store.isSubmitting);
|
const isSubmitting = useRecoilValue(store.isSubmitting);
|
||||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||||
|
|
||||||
|
const { getToken } = store.useToken(currentConversation?.endpoint);
|
||||||
|
|
||||||
const latestMessage = useRecoilValue(store.latestMessage);
|
const latestMessage = useRecoilValue(store.latestMessage);
|
||||||
|
|
||||||
|
|
@ -29,7 +31,7 @@ const useMessageHandler = () => {
|
||||||
endpointOption = {
|
endpointOption = {
|
||||||
endpoint,
|
endpoint,
|
||||||
model:
|
model:
|
||||||
currentConversation?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
currentConversation?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
||||||
chatGptLabel: currentConversation?.chatGptLabel ?? null,
|
chatGptLabel: currentConversation?.chatGptLabel ?? null,
|
||||||
promptPrefix: currentConversation?.promptPrefix ?? null,
|
promptPrefix: currentConversation?.promptPrefix ?? null,
|
||||||
temperature: currentConversation?.temperature ?? 1,
|
temperature: currentConversation?.temperature ?? 1,
|
||||||
|
|
@ -48,7 +50,8 @@ const useMessageHandler = () => {
|
||||||
jailbreakConversationId: currentConversation?.jailbreakConversationId ?? null,
|
jailbreakConversationId: currentConversation?.jailbreakConversationId ?? null,
|
||||||
conversationSignature: currentConversation?.conversationSignature ?? null,
|
conversationSignature: currentConversation?.conversationSignature ?? null,
|
||||||
clientId: currentConversation?.clientId ?? null,
|
clientId: currentConversation?.clientId ?? null,
|
||||||
invocationId: currentConversation?.invocationId ?? 1
|
invocationId: currentConversation?.invocationId ?? 1,
|
||||||
|
token: endpointsConfig[endpoint]?.userProvide ? getToken() : null
|
||||||
};
|
};
|
||||||
responseSender = endpointOption.jailbreak ? 'Sydney' : 'BingAI';
|
responseSender = endpointOption.jailbreak ? 'Sydney' : 'BingAI';
|
||||||
} else if (endpoint === 'chatGPTBrowser') {
|
} else if (endpoint === 'chatGPTBrowser') {
|
||||||
|
|
@ -56,8 +59,9 @@ const useMessageHandler = () => {
|
||||||
endpoint,
|
endpoint,
|
||||||
model:
|
model:
|
||||||
currentConversation?.model ??
|
currentConversation?.model ??
|
||||||
endpointsFilter[endpoint]?.availableModels?.[0] ??
|
endpointsConfig[endpoint]?.availableModels?.[0] ??
|
||||||
'text-davinci-002-render-sha'
|
'text-davinci-002-render-sha',
|
||||||
|
token: endpointsConfig[endpoint]?.userProvide ? getToken() : null
|
||||||
};
|
};
|
||||||
responseSender = 'ChatGPT';
|
responseSender = 'ChatGPT';
|
||||||
} else if (endpoint === null) {
|
} else if (endpoint === null) {
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue