mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-18 01:10:14 +01:00
feat: endpoint setting mobile style.
feat: save and endpoint option works!
This commit is contained in:
parent
edaf7c3ad1
commit
46e3ef4049
8 changed files with 91 additions and 38 deletions
|
|
@ -12,6 +12,7 @@ const askClient = async ({
|
|||
temperature,
|
||||
top_p,
|
||||
presence_penalty,
|
||||
frequency_penalty,
|
||||
onProgress,
|
||||
abortController
|
||||
}) => {
|
||||
|
|
@ -25,7 +26,8 @@ const askClient = async ({
|
|||
model: model,
|
||||
temperature,
|
||||
top_p,
|
||||
presence_penalty
|
||||
presence_penalty,
|
||||
frequency_penalty
|
||||
},
|
||||
chatGptLabel,
|
||||
promptPrefix,
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ const convoSchema = mongoose.Schema(
|
|||
},
|
||||
temperature: {
|
||||
type: Number,
|
||||
default: 0.8,
|
||||
default: 1,
|
||||
required: false
|
||||
},
|
||||
top_p: {
|
||||
|
|
@ -54,7 +54,12 @@ const convoSchema = mongoose.Schema(
|
|||
},
|
||||
presence_penalty: {
|
||||
type: Number,
|
||||
default: 1,
|
||||
default: 0,
|
||||
required: false
|
||||
},
|
||||
frequency_penalty: {
|
||||
type: Number,
|
||||
default: 0,
|
||||
required: false
|
||||
},
|
||||
// for bingai only
|
||||
|
|
|
|||
|
|
@ -34,9 +34,10 @@ router.post('/', async (req, res) => {
|
|||
model: req.body?.model || 'gpt-3.5-turbo',
|
||||
chatGptLabel: req.body?.chatGptLabel || null,
|
||||
promptPrefix: req.body?.promptPrefix || null,
|
||||
temperature: req.body?.temperature || 0.8,
|
||||
temperature: req.body?.temperature || 1,
|
||||
top_p: req.body?.top_p || 1,
|
||||
presence_penalty: req.body?.presence_penalty || 1
|
||||
presence_penalty: req.body?.presence_penalty || 0,
|
||||
frequency_penalty: req.body?.frequency_penalty || 0
|
||||
};
|
||||
|
||||
console.log('ask log', {
|
||||
|
|
|
|||
|
|
@ -12,25 +12,48 @@ const defaultTextProps =
|
|||
const optionText =
|
||||
'p-0 shadow-none text-right pr-1 h-8 border-transparent focus:ring-[#10a37f] focus:ring-offset-0 focus:ring-opacity-100 hover:bg-gray-800/10 dark:hover:bg-white/10 focus:bg-gray-800/10 dark:focus:bg-white/10 transition-colors';
|
||||
|
||||
function Settings({ isOpen }) {
|
||||
function Settings(props) {
|
||||
const {
|
||||
model,
|
||||
setModel,
|
||||
chatGptLabel,
|
||||
setChatGptLabel,
|
||||
promptPrefix,
|
||||
setPromptPrefix,
|
||||
temperature,
|
||||
setTemperature,
|
||||
topP,
|
||||
setTopP,
|
||||
freqP,
|
||||
setFreqP,
|
||||
presP,
|
||||
setPresP
|
||||
} = props;
|
||||
|
||||
// temperature
|
||||
// top_p
|
||||
// presence_penalty
|
||||
// frequency_penalty
|
||||
// chatGptLabel
|
||||
// promptPrefix
|
||||
// const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
|
||||
// const availableModels = endpointsConfig?.['openAI']?.['availableModels'] || [];
|
||||
|
||||
const [model, setModel] = useState('text-davinci-003');
|
||||
const [chatGptLabel, setChatGptLabel] = useState('');
|
||||
const [promptPrefix, setPromptPrefix] = useState('');
|
||||
const [temperature, setTemperature] = useState(1);
|
||||
const [maxTokens, setMaxTokens] = useState(2048);
|
||||
const [topP, setTopP] = useState(1);
|
||||
const [freqP, setFreqP] = useState(0);
|
||||
const [presP, setPresP] = useState(0);
|
||||
// const [model, setModel] = useState('text-davinci-003');
|
||||
// const [chatGptLabel, setChatGptLabel] = useState('');
|
||||
// const [promptPrefix, setPromptPrefix] = useState('');
|
||||
// const [temperature, setTemperature] = useState(1);
|
||||
// // const [maxTokens, setMaxTokens] = useState(2048);
|
||||
// const [topP, setTopP] = useState(1);
|
||||
// const [freqP, setFreqP] = useState(0);
|
||||
// const [presP, setPresP] = useState(0);
|
||||
// const textareaRef = useRef(null);
|
||||
// const inputRef = useRef(null);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="grid grid-cols-2 gap-6">
|
||||
<div className="grid gap-6 sm:grid-cols-2">
|
||||
<div className="col-span-1 flex flex-col items-center justify-start gap-6">
|
||||
<div className="grid w-full items-center gap-2">
|
||||
<Label
|
||||
|
|
@ -133,7 +156,7 @@ function Settings({ isOpen }) {
|
|||
/>
|
||||
</HoverCard>
|
||||
|
||||
<HoverCard>
|
||||
{/* <HoverCard>
|
||||
<HoverCardTrigger className="grid w-full items-center gap-2">
|
||||
<div className="flex justify-between">
|
||||
<Label
|
||||
|
|
@ -165,7 +188,7 @@ function Settings({ isOpen }) {
|
|||
type="max"
|
||||
side="left"
|
||||
/>
|
||||
</HoverCard>
|
||||
</HoverCard> */}
|
||||
|
||||
<HoverCard>
|
||||
<HoverCardTrigger className="grid w-full items-center gap-2">
|
||||
|
|
|
|||
|
|
@ -10,21 +10,25 @@ import store from '~/store';
|
|||
|
||||
function OpenAIOptions() {
|
||||
const [advancedMode, setAdvancedMode] = useState(false);
|
||||
const [conversation, setConversation] = useRecoilState(store.conversation) || {};
|
||||
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
const availableModels = endpointsConfig?.['openAI']?.['availableModels'] || [];
|
||||
|
||||
const [conversation, setConversation] = useRecoilState(store.conversation) || {};
|
||||
const { endpoint, conversationId } = conversation;
|
||||
const { model, chatGptLabel, promptPrefix, temperature, top_p, presence_penalty, frequency_penalty } =
|
||||
conversation;
|
||||
|
||||
useEffect(() => {
|
||||
const { endpoint, chatGptLabel, promptPrefix, temperature, top_p, presence_penalty } = conversation;
|
||||
|
||||
if (endpoint !== 'openAI') return;
|
||||
|
||||
const mustInAdvancedMode =
|
||||
chatGptLabel !== null ||
|
||||
promptPrefix !== null ||
|
||||
temperature !== 0.8 ||
|
||||
temperature !== 1 ||
|
||||
top_p !== 1 ||
|
||||
presence_penalty !== 1;
|
||||
presence_penalty !== 0 ||
|
||||
frequency_penalty !== 0;
|
||||
|
||||
if (mustInAdvancedMode && !advancedMode) setAdvancedMode(true);
|
||||
}, [conversation, advancedMode]);
|
||||
|
|
@ -32,9 +36,6 @@ function OpenAIOptions() {
|
|||
if (endpoint !== 'openAI') return null;
|
||||
if (conversationId !== 'new') return null;
|
||||
|
||||
const { model } = conversation;
|
||||
const availableModels = endpointsConfig?.['openAI']?.['availableModels'] || [];
|
||||
|
||||
const triggerAdvancedMode = () => setAdvancedMode(prev => !prev);
|
||||
|
||||
const switchToSimpleMode = () => {
|
||||
|
|
@ -42,17 +43,20 @@ function OpenAIOptions() {
|
|||
...prevState,
|
||||
chatGptLabel: null,
|
||||
promptPrefix: null,
|
||||
temperature: 0.8,
|
||||
temperature: 1,
|
||||
top_p: 1,
|
||||
presence_penalty: 1
|
||||
presence_penalty: 0,
|
||||
frequency_penalty: 0
|
||||
}));
|
||||
setAdvancedMode(false);
|
||||
};
|
||||
|
||||
const setModel = newModel => {
|
||||
const setOption = param => newValue => {
|
||||
let update = {};
|
||||
update[param] = newValue;
|
||||
setConversation(prevState => ({
|
||||
...prevState,
|
||||
model: newModel
|
||||
...update
|
||||
}));
|
||||
};
|
||||
|
||||
|
|
@ -70,7 +74,7 @@ function OpenAIOptions() {
|
|||
<ModelSelect
|
||||
model={model}
|
||||
availableModels={availableModels}
|
||||
onChange={setModel}
|
||||
onChange={setOption('model')}
|
||||
type="button"
|
||||
className={cn(
|
||||
cardStyle,
|
||||
|
|
@ -90,7 +94,7 @@ function OpenAIOptions() {
|
|||
</div>
|
||||
<div
|
||||
className={
|
||||
' openAIOptions-advanced-container absolute bottom-[-10px] flex w-full flex-col items-center justify-center px-4' +
|
||||
' openAIOptions-advanced-container absolute bottom-[-10px] flex w-full flex-col items-center justify-center md:px-4' +
|
||||
(advancedMode ? ' show' : '')
|
||||
}
|
||||
>
|
||||
|
|
@ -111,7 +115,22 @@ function OpenAIOptions() {
|
|||
</Button>
|
||||
</div>
|
||||
<div className="px-4 py-4">
|
||||
<Settings isOpen={advancedMode} />
|
||||
<Settings
|
||||
model={model}
|
||||
setModel={setOption('model')}
|
||||
chatGptLabel={chatGptLabel}
|
||||
setChatGptLabel={setOption('chatGptLabel')}
|
||||
promptPrefix={promptPrefix}
|
||||
setPromptPrefix={setOption('promptPrefix')}
|
||||
temperature={temperature}
|
||||
setTemperature={setOption('temperature')}
|
||||
topP={top_p}
|
||||
setTopP={setOption('top_p')}
|
||||
freqP={presence_penalty}
|
||||
setFreqP={setOption('presence_penalty')}
|
||||
presP={frequency_penalty}
|
||||
setPresP={setOption('frequency_penalty')}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -16,9 +16,10 @@ import getDefaultConversation from '~/utils/getDefaultConversation';
|
|||
// // for azureOpenAI, openAI only
|
||||
// chatGptLabel: null,
|
||||
// promptPrefix: null,
|
||||
// temperature: 0.8,
|
||||
// temperature: 1,
|
||||
// top_p: 1,
|
||||
// presence_penalty: 1,
|
||||
// presence_penalty: 0,
|
||||
// frequency_penalty: 0,
|
||||
// // for bingAI only
|
||||
// jailbreak: false,
|
||||
// jailbreakConversationId: null,
|
||||
|
|
|
|||
|
|
@ -6,9 +6,10 @@ const buildDefaultConversation = ({ conversation, endpoint, lastConversationSetu
|
|||
model: lastConversationSetup?.model || 'gpt-3.5-turbo',
|
||||
chatGptLabel: lastConversationSetup?.chatGptLabel || null,
|
||||
promptPrefix: lastConversationSetup?.promptPrefix || null,
|
||||
temperature: lastConversationSetup?.temperature || 0.8,
|
||||
temperature: lastConversationSetup?.temperature || 1,
|
||||
top_p: lastConversationSetup?.top_p || 1,
|
||||
presence_penalty: lastConversationSetup?.presence_penalty || 1
|
||||
presence_penalty: lastConversationSetup?.presence_penalty || 0,
|
||||
frequency_penalty: lastConversationSetup?.frequency_penalty || 0
|
||||
};
|
||||
} else if (endpoint === 'bingAI') {
|
||||
conversation = {
|
||||
|
|
|
|||
|
|
@ -30,9 +30,10 @@ const useMessageHandler = () => {
|
|||
model: currentConversation?.model || 'gpt-3.5-turbo',
|
||||
chatGptLabel: currentConversation?.chatGptLabel || null,
|
||||
promptPrefix: currentConversation?.promptPrefix || null,
|
||||
temperature: currentConversation?.temperature || 0.8,
|
||||
temperature: currentConversation?.temperature || 1,
|
||||
top_p: currentConversation?.top_p || 1,
|
||||
presence_penalty: currentConversation?.presence_penalty || 1
|
||||
presence_penalty: currentConversation?.presence_penalty || 0,
|
||||
frequency_penalty: currentConversation?.frequency_penalty || 0
|
||||
};
|
||||
responseSender = endpointOption.chatGptLabel || 'ChatGPT';
|
||||
} else if (endpoint === 'bingAI') {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue