diff --git a/api/app/clients/chatgpt-client.js b/api/app/clients/chatgpt-client.js
index eb01ae374f..1cf64656b0 100644
--- a/api/app/clients/chatgpt-client.js
+++ b/api/app/clients/chatgpt-client.js
@@ -12,6 +12,7 @@ const askClient = async ({
temperature,
top_p,
presence_penalty,
+ frequency_penalty,
onProgress,
abortController
}) => {
@@ -25,7 +26,8 @@ const askClient = async ({
model: model,
temperature,
top_p,
- presence_penalty
+ presence_penalty,
+ frequency_penalty
},
chatGptLabel,
promptPrefix,
diff --git a/api/models/schema/convoSchema.js b/api/models/schema/convoSchema.js
index 9cb13ebc0b..7c0e3d493c 100644
--- a/api/models/schema/convoSchema.js
+++ b/api/models/schema/convoSchema.js
@@ -44,7 +44,7 @@ const convoSchema = mongoose.Schema(
},
temperature: {
type: Number,
- default: 0.8,
+ default: 1,
required: false
},
top_p: {
@@ -54,7 +54,12 @@ const convoSchema = mongoose.Schema(
},
presence_penalty: {
type: Number,
- default: 1,
+ default: 0,
+ required: false
+ },
+ frequency_penalty: {
+ type: Number,
+ default: 0,
required: false
},
// for bingai only
diff --git a/api/server/routes/askOpenAI.js b/api/server/routes/askOpenAI.js
index 80969e8013..3b88c35dc2 100644
--- a/api/server/routes/askOpenAI.js
+++ b/api/server/routes/askOpenAI.js
@@ -34,9 +34,10 @@ router.post('/', async (req, res) => {
model: req.body?.model || 'gpt-3.5-turbo',
chatGptLabel: req.body?.chatGptLabel || null,
promptPrefix: req.body?.promptPrefix || null,
- temperature: req.body?.temperature || 0.8,
+ temperature: req.body?.temperature || 1,
top_p: req.body?.top_p || 1,
- presence_penalty: req.body?.presence_penalty || 1
+ presence_penalty: req.body?.presence_penalty || 0,
+ frequency_penalty: req.body?.frequency_penalty || 0
};
console.log('ask log', {
diff --git a/client/src/components/Input/OpenAIOptions/Settings.jsx b/client/src/components/Input/OpenAIOptions/Settings.jsx
index 3ad5c249a0..b72b861463 100644
--- a/client/src/components/Input/OpenAIOptions/Settings.jsx
+++ b/client/src/components/Input/OpenAIOptions/Settings.jsx
@@ -12,25 +12,48 @@ const defaultTextProps =
const optionText =
'p-0 shadow-none text-right pr-1 h-8 border-transparent focus:ring-[#10a37f] focus:ring-offset-0 focus:ring-opacity-100 hover:bg-gray-800/10 dark:hover:bg-white/10 focus:bg-gray-800/10 dark:focus:bg-white/10 transition-colors';
-function Settings({ isOpen }) {
+function Settings(props) {
+ const {
+ model,
+ setModel,
+ chatGptLabel,
+ setChatGptLabel,
+ promptPrefix,
+ setPromptPrefix,
+ temperature,
+ setTemperature,
+ topP,
+ setTopP,
+ freqP,
+ setFreqP,
+ presP,
+ setPresP
+ } = props;
+
+ // temperature
+ // top_p
+ // presence_penalty
+ // frequency_penalty
+ // chatGptLabel
+ // promptPrefix
// const endpointsConfig = useRecoilValue(store.endpointsConfig);
// const availableModels = endpointsConfig?.['openAI']?.['availableModels'] || [];
- const [model, setModel] = useState('text-davinci-003');
- const [chatGptLabel, setChatGptLabel] = useState('');
- const [promptPrefix, setPromptPrefix] = useState('');
- const [temperature, setTemperature] = useState(1);
- const [maxTokens, setMaxTokens] = useState(2048);
- const [topP, setTopP] = useState(1);
- const [freqP, setFreqP] = useState(0);
- const [presP, setPresP] = useState(0);
+ // const [model, setModel] = useState('text-davinci-003');
+ // const [chatGptLabel, setChatGptLabel] = useState('');
+ // const [promptPrefix, setPromptPrefix] = useState('');
+ // const [temperature, setTemperature] = useState(1);
+ // // const [maxTokens, setMaxTokens] = useState(2048);
+ // const [topP, setTopP] = useState(1);
+ // const [freqP, setFreqP] = useState(0);
+ // const [presP, setPresP] = useState(0);
// const textareaRef = useRef(null);
// const inputRef = useRef(null);
return (
<>
-
+
-
+ {/*
-
+ */}
diff --git a/client/src/components/Input/OpenAIOptions/index.jsx b/client/src/components/Input/OpenAIOptions/index.jsx
index 50238a7e66..a86c79c673 100644
--- a/client/src/components/Input/OpenAIOptions/index.jsx
+++ b/client/src/components/Input/OpenAIOptions/index.jsx
@@ -10,21 +10,25 @@ import store from '~/store';
function OpenAIOptions() {
const [advancedMode, setAdvancedMode] = useState(false);
- const [conversation, setConversation] = useRecoilState(store.conversation) || {};
+
const endpointsConfig = useRecoilValue(store.endpointsConfig);
+ const availableModels = endpointsConfig?.['openAI']?.['availableModels'] || [];
+
+ const [conversation, setConversation] = useRecoilState(store.conversation) || {};
const { endpoint, conversationId } = conversation;
+ const { model, chatGptLabel, promptPrefix, temperature, top_p, presence_penalty, frequency_penalty } =
+ conversation;
useEffect(() => {
- const { endpoint, chatGptLabel, promptPrefix, temperature, top_p, presence_penalty } = conversation;
-
if (endpoint !== 'openAI') return;
const mustInAdvancedMode =
chatGptLabel !== null ||
promptPrefix !== null ||
- temperature !== 0.8 ||
+ temperature !== 1 ||
top_p !== 1 ||
- presence_penalty !== 1;
+ presence_penalty !== 0 ||
+ frequency_penalty !== 0;
if (mustInAdvancedMode && !advancedMode) setAdvancedMode(true);
}, [conversation, advancedMode]);
@@ -32,9 +36,6 @@ function OpenAIOptions() {
if (endpoint !== 'openAI') return null;
if (conversationId !== 'new') return null;
- const { model } = conversation;
- const availableModels = endpointsConfig?.['openAI']?.['availableModels'] || [];
-
const triggerAdvancedMode = () => setAdvancedMode(prev => !prev);
const switchToSimpleMode = () => {
@@ -42,17 +43,20 @@ function OpenAIOptions() {
...prevState,
chatGptLabel: null,
promptPrefix: null,
- temperature: 0.8,
+ temperature: 1,
top_p: 1,
- presence_penalty: 1
+ presence_penalty: 0,
+ frequency_penalty: 0
}));
setAdvancedMode(false);
};
- const setModel = newModel => {
+ const setOption = param => newValue => {
+ let update = {};
+ update[param] = newValue;
setConversation(prevState => ({
...prevState,
- model: newModel
+ ...update
}));
};
@@ -70,7 +74,7 @@ function OpenAIOptions() {
@@ -111,7 +115,22 @@ function OpenAIOptions() {
-
+
diff --git a/client/src/store/conversation.js b/client/src/store/conversation.js
index 18f48faa6d..6088c9957d 100644
--- a/client/src/store/conversation.js
+++ b/client/src/store/conversation.js
@@ -16,9 +16,10 @@ import getDefaultConversation from '~/utils/getDefaultConversation';
// // for azureOpenAI, openAI only
// chatGptLabel: null,
// promptPrefix: null,
-// temperature: 0.8,
+// temperature: 1,
// top_p: 1,
-// presence_penalty: 1,
+// presence_penalty: 0,
+// frequency_penalty: 0,
// // for bingAI only
// jailbreak: false,
// jailbreakConversationId: null,
diff --git a/client/src/utils/getDefaultConversation.js b/client/src/utils/getDefaultConversation.js
index a95a8f1486..e736d5aefd 100644
--- a/client/src/utils/getDefaultConversation.js
+++ b/client/src/utils/getDefaultConversation.js
@@ -6,9 +6,10 @@ const buildDefaultConversation = ({ conversation, endpoint, lastConversationSetu
model: lastConversationSetup?.model || 'gpt-3.5-turbo',
chatGptLabel: lastConversationSetup?.chatGptLabel || null,
promptPrefix: lastConversationSetup?.promptPrefix || null,
- temperature: lastConversationSetup?.temperature || 0.8,
+ temperature: lastConversationSetup?.temperature || 1,
top_p: lastConversationSetup?.top_p || 1,
- presence_penalty: lastConversationSetup?.presence_penalty || 1
+ presence_penalty: lastConversationSetup?.presence_penalty || 0,
+ frequency_penalty: lastConversationSetup?.frequency_penalty || 0
};
} else if (endpoint === 'bingAI') {
conversation = {
diff --git a/client/src/utils/handleSubmit.js b/client/src/utils/handleSubmit.js
index c545ed9ccd..9a912c12a4 100644
--- a/client/src/utils/handleSubmit.js
+++ b/client/src/utils/handleSubmit.js
@@ -30,9 +30,10 @@ const useMessageHandler = () => {
model: currentConversation?.model || 'gpt-3.5-turbo',
chatGptLabel: currentConversation?.chatGptLabel || null,
promptPrefix: currentConversation?.promptPrefix || null,
- temperature: currentConversation?.temperature || 0.8,
+ temperature: currentConversation?.temperature || 1,
top_p: currentConversation?.top_p || 1,
- presence_penalty: currentConversation?.presence_penalty || 1
+ presence_penalty: currentConversation?.presence_penalty || 0,
+ frequency_penalty: currentConversation?.frequency_penalty || 0
};
responseSender = endpointOption.chatGptLabel || 'ChatGPT';
} else if (endpoint === 'bingAI') {