mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 17:00:15 +01:00
feat: use default preset to create new conversation.
This commit is contained in:
parent
579b53de29
commit
ee10e0e43e
4 changed files with 51 additions and 51 deletions
|
|
@ -18,13 +18,13 @@ function BingAIOptions() {
|
|||
const { endpoint, conversationId } = conversation;
|
||||
const { toneStyle, context, systemMessage, jailbreak } = conversation;
|
||||
|
||||
useEffect(() => {
|
||||
if (endpoint !== 'bingAI') return;
|
||||
// useEffect(() => {
|
||||
// if (endpoint !== 'bingAI') return;
|
||||
|
||||
const mustInAdvancedMode = context !== null || systemMessage !== null;
|
||||
// const mustInAdvancedMode = context !== null || systemMessage !== null;
|
||||
|
||||
if (mustInAdvancedMode && !advancedMode) setAdvancedMode(true);
|
||||
}, [conversation, advancedMode]);
|
||||
// if (mustInAdvancedMode && !advancedMode) setAdvancedMode(true);
|
||||
// }, [conversation, advancedMode]);
|
||||
|
||||
if (endpoint !== 'bingAI') return null;
|
||||
if (conversationId !== 'new') return null;
|
||||
|
|
@ -32,11 +32,11 @@ function BingAIOptions() {
|
|||
const triggerAdvancedMode = () => setAdvancedMode(prev => !prev);
|
||||
|
||||
const switchToSimpleMode = () => {
|
||||
setConversation(prevState => ({
|
||||
...prevState,
|
||||
context: null,
|
||||
systemMessage: null
|
||||
}));
|
||||
// setConversation(prevState => ({
|
||||
// ...prevState,
|
||||
// context: null,
|
||||
// systemMessage: null
|
||||
// }));
|
||||
setAdvancedMode(false);
|
||||
};
|
||||
|
||||
|
|
@ -64,7 +64,7 @@ function BingAIOptions() {
|
|||
<>
|
||||
<div
|
||||
className={
|
||||
'openAIOptions-simple-container flex w-full items-center justify-center gap-2' +
|
||||
'openAIOptions-simple-container flex w-auto items-center justify-center gap-2' +
|
||||
(!advancedMode ? ' show' : '')
|
||||
}
|
||||
>
|
||||
|
|
|
|||
|
|
@ -12,9 +12,9 @@ function ChatGPTOptions() {
|
|||
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
|
||||
useEffect(() => {
|
||||
if (endpoint !== 'chatGPTBrowser') return;
|
||||
}, [conversation]);
|
||||
// useEffect(() => {
|
||||
// if (endpoint !== 'chatGPTBrowser') return;
|
||||
// }, [conversation]);
|
||||
|
||||
if (endpoint !== 'chatGPTBrowser') return null;
|
||||
if (conversationId !== 'new') return null;
|
||||
|
|
@ -34,7 +34,7 @@ function ChatGPTOptions() {
|
|||
'transition-colors shadow-md rounded-md min-w-[75px] font-normal bg-white border-black/10 hover:border-black/10 focus:border-black/10 dark:border-black/10 dark:hover:border-black/10 dark:focus:border-black/10 border dark:bg-gray-700 text-black dark:text-white';
|
||||
|
||||
return (
|
||||
<div className="openAIOptions-simple-container show flex w-full items-center justify-center gap-2">
|
||||
<div className="openAIOptions-simple-container show flex w-auto items-center justify-center gap-2">
|
||||
<SelectDropdown
|
||||
value={model}
|
||||
setValue={setOption('model')}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { Settings2 } from 'lucide-react';
|
||||
import { useRecoilState, useRecoilValue } from 'recoil';
|
||||
import SelectDropdown from '../../ui/SelectDropdown';
|
||||
|
|
@ -21,19 +21,19 @@ function OpenAIOptions() {
|
|||
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
|
||||
useEffect(() => {
|
||||
if (endpoint !== 'openAI') return;
|
||||
// useEffect(() => {
|
||||
// if (endpoint !== 'openAI') return;
|
||||
|
||||
const mustInAdvancedMode =
|
||||
chatGptLabel !== null ||
|
||||
promptPrefix !== null ||
|
||||
temperature !== 1 ||
|
||||
top_p !== 1 ||
|
||||
presence_penalty !== 0 ||
|
||||
frequency_penalty !== 0;
|
||||
// const mustInAdvancedMode =
|
||||
// chatGptLabel !== null ||
|
||||
// promptPrefix !== null ||
|
||||
// temperature !== 1 ||
|
||||
// top_p !== 1 ||
|
||||
// presence_penalty !== 0 ||
|
||||
// frequency_penalty !== 0;
|
||||
|
||||
if (mustInAdvancedMode && !advancedMode) setAdvancedMode(true);
|
||||
}, [conversation, advancedMode]);
|
||||
// if (mustInAdvancedMode && !advancedMode) setAdvancedMode(true);
|
||||
// }, [conversation, advancedMode]);
|
||||
|
||||
if (endpoint !== 'openAI') return null;
|
||||
if (conversationId !== 'new') return null;
|
||||
|
|
@ -43,15 +43,15 @@ function OpenAIOptions() {
|
|||
const triggerAdvancedMode = () => setAdvancedMode(prev => !prev);
|
||||
|
||||
const switchToSimpleMode = () => {
|
||||
setConversation(prevState => ({
|
||||
...prevState,
|
||||
chatGptLabel: null,
|
||||
promptPrefix: null,
|
||||
temperature: 1,
|
||||
top_p: 1,
|
||||
presence_penalty: 0,
|
||||
frequency_penalty: 0
|
||||
}));
|
||||
// setConversation(prevState => ({
|
||||
// ...prevState,
|
||||
// chatGptLabel: null,
|
||||
// promptPrefix: null,
|
||||
// temperature: 1,
|
||||
// top_p: 1,
|
||||
// presence_penalty: 0,
|
||||
// frequency_penalty: 0
|
||||
// }));
|
||||
setAdvancedMode(false);
|
||||
};
|
||||
|
||||
|
|
@ -75,7 +75,7 @@ function OpenAIOptions() {
|
|||
<>
|
||||
<div
|
||||
className={
|
||||
'openAIOptions-simple-container flex w-full items-center justify-center gap-2' +
|
||||
'openAIOptions-simple-container flex w-auto items-center justify-center gap-2' +
|
||||
(!advancedMode ? ' show' : '')
|
||||
}
|
||||
>
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
|
|||
const { endpoint: targetEndpoint } = preset || {};
|
||||
|
||||
if (targetEndpoint) {
|
||||
// try to use current model
|
||||
// try to use preset
|
||||
const endpoint = targetEndpoint;
|
||||
if (endpointsFilter?.[endpoint]) {
|
||||
conversation = buildDefaultConversation({
|
||||
|
|
@ -65,18 +65,18 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
|
|||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// try to use current model
|
||||
const { endpoint = null } = prevConversation || {};
|
||||
if (endpointsFilter?.[endpoint]) {
|
||||
conversation = buildDefaultConversation({
|
||||
conversation,
|
||||
endpoint,
|
||||
lastConversationSetup: prevConversation
|
||||
});
|
||||
return conversation;
|
||||
}
|
||||
} catch (error) {}
|
||||
// try {
|
||||
// // try to use current model
|
||||
// const { endpoint = null } = prevConversation || {};
|
||||
// if (endpointsFilter?.[endpoint]) {
|
||||
// conversation = buildDefaultConversation({
|
||||
// conversation,
|
||||
// endpoint,
|
||||
// lastConversationSetup: prevConversation
|
||||
// });
|
||||
// return conversation;
|
||||
// }
|
||||
// } catch (error) {}
|
||||
|
||||
try {
|
||||
// try to read latest selected model from local storage
|
||||
|
|
@ -84,7 +84,7 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
|
|||
const { endpoint = null } = lastConversationSetup;
|
||||
|
||||
if (endpointsFilter?.[endpoint]) {
|
||||
conversation = buildDefaultConversation({ conversation, endpoint, lastConversationSetup });
|
||||
conversation = buildDefaultConversation({ conversation, endpoint });
|
||||
return conversation;
|
||||
}
|
||||
} catch (error) {}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue