feat: feat: new endpoint-style submit

This commit is contained in:
Wentao Lyu 2023-03-31 03:22:57 +08:00
parent 089ca5f120
commit adcc021c9e
22 changed files with 566 additions and 478 deletions

View file

@ -38,7 +38,7 @@ const router = createBrowserRouter([
const App = () => {
const [user, setUser] = useRecoilState(store.user);
const setIsSearchEnabled = useSetRecoilState(store.isSearchEnabled);
const setModelsFilter = useSetRecoilState(store.modelsFilter);
const setEndpointsFilter = useSetRecoilState(store.endpointsFilter);
useEffect(() => {
// fetch if seatch enabled
@ -58,19 +58,12 @@ const App = () => {
// fetch models
axios
.get('/api/models', {
.get('/api/endpoints', {
timeout: 1000,
withCredentials: true
})
.then(({ data }) => {
const filter = {
chatgpt: data?.hasOpenAI,
chatgptCustom: data?.hasOpenAI,
bingai: data?.hasBing,
sydney: data?.hasBing,
chatgptBrowser: data?.hasChatGpt
};
setModelsFilter(filter);
setEndpointsFilter(data);
})
.catch(error => {
console.error(error);

View file

@ -1,14 +1,13 @@
import React, { useEffect, useRef, useState } from 'react';
import { useRecoilState, useResetRecoilState, useSetRecoilState } from 'recoil';
import { useEffect } from 'react';
import { useRecoilValue, useResetRecoilState, useSetRecoilState } from 'recoil';
import { SSE } from '~/utils/sse';
import { useMessageHandler } from '../../utils/handleSubmit';
import createPayload from '~/utils/createPayload';
import store from '~/store';
export default function MessageHandler({ messages }) {
const [submission, setSubmission] = useRecoilState(store.submission);
const [isSubmitting, setIsSubmitting] = useRecoilState(store.isSubmitting);
export default function MessageHandler() {
const submission = useRecoilValue(store.submission);
const setIsSubmitting = useSetRecoilState(store.isSubmitting);
const setMessages = useSetRecoilState(store.messages);
const setConversation = useSetRecoilState(store.conversation);
const resetLatestMessage = useResetRecoilState(store.latestMessage);
@ -105,10 +104,9 @@ export default function MessageHandler({ messages }) {
};
const finalHandler = (data, submission) => {
const { conversation, messages, message, initialResponse, isRegenerate = false } = submission;
const { messages, isRegenerate = false } = submission;
const { requestMessage, responseMessage } = data;
const { conversationId } = requestMessage;
const { requestMessage, responseMessage, conversation } = data;
// update the messages
if (isRegenerate) setMessages([...messages, responseMessage]);
@ -127,66 +125,14 @@ export default function MessageHandler({ messages }) {
}, 5000);
}
const { model, chatGptLabel, promptPrefix } = conversation;
const isBing = model === 'bingai' || model === 'sydney';
if (!isBing) {
const { title } = data;
const { conversationId } = responseMessage;
setConversation(prevState => ({
...prevState,
title,
conversationId,
jailbreakConversationId: null,
conversationSignature: null,
clientId: null,
invocationId: null,
chatGptLabel,
promptPrefix,
latestMessage: null
}));
} else if (model === 'bingai') {
const { title } = data;
const { conversationSignature, clientId, conversationId, invocationId } = responseMessage;
setConversation(prevState => ({
...prevState,
title,
conversationId,
jailbreakConversationId: null,
conversationSignature,
clientId,
invocationId,
chatGptLabel,
promptPrefix,
latestMessage: null
}));
} else if (model === 'sydney') {
const { title } = data;
const {
jailbreakConversationId,
parentMessageId,
conversationSignature,
clientId,
conversationId,
invocationId
} = responseMessage;
setConversation(prevState => ({
...prevState,
title,
conversationId,
jailbreakConversationId,
conversationSignature,
clientId,
invocationId,
chatGptLabel,
promptPrefix,
latestMessage: null
}));
}
setConversation(prevState => ({
...prevState,
...conversation
}));
};
const errorHandler = (data, submission) => {
const { conversation, messages, message, initialResponse, isRegenerate = false } = submission;
const { messages, message } = submission;
console.log('Error:', data);
const errorResponse = {
@ -203,7 +149,6 @@ export default function MessageHandler({ messages }) {
if (submission === null) return;
if (Object.keys(submission).length === 0) return;
const { messages, initialResponse, isRegenerate = false } = submission;
let { message } = submission;
const { server, payload } = createPayload(submission);
@ -224,9 +169,6 @@ export default function MessageHandler({ messages }) {
if (data.created) {
message = {
...data.message,
model: message?.model,
chatGptLabel: message?.chatGptLabel,
promptPrefix: message?.promptPrefix,
overrideParentMessageId: message?.overrideParentMessageId
};
createdHandler(data, { ...submission, message });
@ -245,7 +187,7 @@ export default function MessageHandler({ messages }) {
events.onopen = () => console.log('connection is opened');
events.oncancel = e => cancelHandler(latestResponseText, { ...submission, message });
events.oncancel = () => cancelHandler(latestResponseText, { ...submission, message });
events.onerror = function (e) {
console.log('error in opening conn.');

View file

@ -97,6 +97,8 @@ export default function Messages({ isSearchView = false }) {
if (model) _title += `: ${model}`;
} else if (endpoint === null) {
null;
} else {
null;
}
return _title;
}

View file

@ -79,7 +79,7 @@ const useConversation = () => {
if (endpoint === null)
// get the default model
conversation = getDefaultConversation({ conversation, availableEndpoints, prevConversation });
console.log(conversation);
setConversation(conversation);
setMessages(messages);
resetLatestMessage();

View file

@ -1,6 +1,7 @@
import conversation from './conversation';
import conversations from './conversations';
import models from './models';
import endpoints from './endpoints';
import user from './user';
import text from './text';
import submission from './submission';
@ -10,6 +11,7 @@ export default {
...conversation,
...conversations,
...models,
...endpoints,
...user,
text,
...submission,

View file

@ -1,55 +1,22 @@
export default function createPayload(submission) {
const { conversation, messages, message, initialResponse, isRegenerate = false } = submission;
const { conversation, message, endpointOption } = submission;
const { conversationId } = conversation;
const { endpoint } = endpointOption;
const endpoint = `/api/ask`;
const {
model,
chatGptLabel,
promptPrefix,
jailbreakConversationId,
conversationId,
conversationSignature,
clientId,
invocationId,
toneStyle
} = conversation;
const endpointUrlMap = {
azureOpenAI: '/api/ask/azureOpenAI',
openAI: '/api/ask/openAI',
bingAI: '/api/ask/bingAI',
chatGPTBrowser: '/api/ask/chatGPTBrowser'
};
const server = endpointUrlMap[endpoint];
let payload = {
...message,
...{
model,
chatGptLabel,
promptPrefix,
conversationId
}
...endpointOption,
conversationId
};
// if (!payload.conversationId)
// if (convo?.conversationId && convo?.parentMessageId) {
// payload = {
// ...payload,
// conversationId: convo.conversationId,
// parentMessageId: convo.parentMessageId || '00000000-0000-0000-0000-000000000000'
// };
// }
const isBing = model === 'bingai' || model === 'sydney';
if (isBing && !conversationId) {
payload.toneStyle = toneStyle || 'fast';
}
if (isBing && conversationId) {
payload = {
...payload,
jailbreakConversationId,
conversationSignature,
clientId,
invocationId
};
}
let server = endpoint;
server = model === 'bingai' ? server + '/bing' : server;
server = model === 'sydney' ? server + '/sydney' : server;
return { server, payload };
}

View file

@ -19,7 +19,7 @@ const buildDefaultConversation = ({ conversation, endpoint, lastConversationSetu
conversationSignature: lastConversationSetup?.conversationSignature || null,
clientId: lastConversationSetup?.clientId || null,
invocationId: lastConversationSetup?.invocationId || null,
toneStyle: lastConversationSetup?.toneStyle || null,
toneStyle: lastConversationSetup?.toneStyle || 'fast',
suggestions: lastConversationSetup?.suggestions || []
};
} else if (endpoint === 'chatGPTBrowser') {
@ -33,6 +33,12 @@ const buildDefaultConversation = ({ conversation, endpoint, lastConversationSetu
...conversation,
endpoint
};
} else {
console.error(`Unknown endpoint ${endpoint}`);
conversation = {
...conversation,
endpoint: null
};
}
return conversation;

View file

@ -27,10 +27,7 @@ const getIcon = props => {
else if (!isCreatedByUser) {
const { endpoint, error } = props;
let icon = <GPTIcon size={size * 0.7} />;
let bg = 'grey';
let name = 'UNKNOWN';
let icon, bg, name;
if (endpoint === 'azureOpenAI') {
const { chatGptLabel } = props;
@ -59,6 +56,10 @@ const getIcon = props => {
icon = <GPTIcon size={size * 0.7} />;
bg = `grey`;
name = 'N/A';
} else {
icon = <GPTIcon size={size * 0.7} />;
bg = `grey`;
name = 'UNKNOWN';
}
return (

View file

@ -1,29 +1,14 @@
// import resetConvo from './resetConvo';
// import { useSelector, useDispatch } from 'react-redux';
// import { setNewConvo } from '~/store/convoSlice';
// import { setMessages } from '~/store/messageSlice';
// import { setSubmitState, setSubmission } from '~/store/submitSlice';
// import { setText } from '~/store/textSlice';
// import { setError } from '~/store/convoSlice';
import { v4 } from 'uuid';
import { useRecoilState, useRecoilValue, useSetRecoilState } from 'recoil';
import store from '~/store';
const useMessageHandler = () => {
// const dispatch = useDispatch();
// const convo = useSelector((state) => state.convo);
// const { initial } = useSelector((state) => state.models);
// const { messages } = useSelector((state) => state.messages);
// const { model, chatGptLabel, promptPrefix, isSubmitting } = useSelector((state) => state.submit);
// const { latestMessage, error } = convo;
const [currentConversation, setCurrentConversation] = useRecoilState(store.conversation) || {};
const currentConversation = useRecoilValue(store.conversation) || {};
const setSubmission = useSetRecoilState(store.submission);
const isSubmitting = useRecoilValue(store.isSubmitting);
const latestMessage = useRecoilValue(store.latestMessage);
const { error } = currentConversation;
const [messages, setMessages] = useRecoilState(store.messages);
@ -36,16 +21,53 @@ const useMessageHandler = () => {
}
// determine the model to be used
const { model = null, chatGptLabel = null, promptPrefix = null } = currentConversation;
const { endpoint } = currentConversation;
let endpointOption = {};
let responseSender = '';
if (endpoint === 'azureOpenAI' || endpoint === 'openAI') {
endpointOption = {
endpoint,
model: currentConversation?.model || 'gpt-3.5-turbo',
chatGptLabel: currentConversation?.chatGptLabel || null,
promptPrefix: currentConversation?.promptPrefix || null,
temperature: currentConversation?.temperature || 0.8,
top_p: currentConversation?.top_p || 1,
presence_penalty: currentConversation?.presence_penalty || 1
};
responseSender = endpointOption.chatGptLabel || 'ChatGPT';
} else if (endpoint === 'bingAI') {
endpointOption = {
endpoint,
jailbreak: currentConversation?.jailbreak || false,
jailbreakConversationId: currentConversation?.jailbreakConversationId || null,
conversationSignature: currentConversation?.conversationSignature || null,
clientId: currentConversation?.clientId || null,
invocationId: currentConversation?.invocationId || null,
toneStyle: currentConversation?.toneStyle || 'fast',
suggestions: currentConversation?.suggestions || []
};
responseSender = endpointOption.jailbreak ? 'Sydney' : 'BingAI';
} else if (endpoint === 'chatGPTBrowser') {
endpointOption = {
endpoint,
model: currentConversation?.model || 'text-davinci-002-render-sha'
};
responseSender = 'ChatGPT';
} else if (endpoint === null) {
console.error('No endpoint available');
return;
} else {
console.error(`Unknown endpoint ${endpoint}`);
return;
}
let currentMessages = messages;
// construct the query message
// this is not a real messageId, it is used as placeholder before real messageId returned
text = text.trim();
const fakeMessageId = v4();
// const isCustomModel = model === 'chatgptCustom' || !initial[model];
// const sender = model === 'chatgptCustom' ? chatGptLabel : model;
parentMessageId = parentMessageId || latestMessage?.messageId || '00000000-0000-0000-0000-000000000000';
let currentMessages = messages;
conversationId = conversationId || currentConversation?.conversationId;
if (conversationId == 'search') {
console.error('cannot send any message under search view!');
@ -68,7 +90,7 @@ const useMessageHandler = () => {
// construct the placeholder response message
const initialResponse = {
sender: chatGptLabel || model,
sender: responseSender,
text: '<span className="result-streaming">█</span>',
parentMessageId: isRegenerate ? messageId : fakeMessageId,
messageId: (isRegenerate ? messageId : fakeMessageId) + '_',
@ -79,16 +101,11 @@ const useMessageHandler = () => {
const submission = {
conversation: {
...currentConversation,
conversationId,
model,
chatGptLabel,
promptPrefix
conversationId
},
endpointOption,
message: {
...currentMsg,
model,
chatGptLabel,
promptPrefix,
overrideParentMessageId: isRegenerate ? messageId : null
},
messages: currentMessages,