feat: create conversation at the beginning then return the userMessage

This commit is contained in:
Wentao Lyu 2023-03-13 13:11:53 +08:00
parent 5a409ccfa6
commit 8773878be2
6 changed files with 50 additions and 40 deletions

View file

@ -17,11 +17,13 @@ router.use('/bing', askBing);
router.use('/sydney', askSydney);
router.post('/', async (req, res) => {
let { model, text, parentMessageId, conversationId, chatGptLabel, promptPrefix } = req.body;
let { model, text, parentMessageId, conversationId: oldConversationId , chatGptLabel, promptPrefix } = req.body;
if (text.length === 0) {
return handleError(res, 'Prompt empty or too short');
}
const conversationId = oldConversationId || crypto.randomUUID();
const userMessageId = crypto.randomUUID();
const userParentMessageId = parentMessageId || '00000000-0000-0000-0000-000000000000'
let userMessage = {
@ -36,8 +38,6 @@ router.post('/', async (req, res) => {
console.log('ask log', {
model,
...userMessage,
parentMessageId: userParentMessageId,
conversationId,
chatGptLabel,
promptPrefix
});
@ -61,12 +61,6 @@ router.post('/', async (req, res) => {
}
}
// if (messageId) {
// // existing conversation
// await saveMessage(userMessage);
// await deleteMessagesSince(userMessage);
// } else {}
res.writeHead(200, {
Connection: 'keep-alive',
'Content-Type': 'text/event-stream',
@ -75,6 +69,10 @@ router.post('/', async (req, res) => {
'X-Accel-Buffering': 'no'
});
await saveMessage(userMessage);
await saveConvo({ ...userMessage, model, chatGptLabel, promptPrefix });
sendMessage(res, { message: userMessage, created: true });
try {
let i = 0;
let tokens = '';
@ -165,7 +163,7 @@ router.post('/', async (req, res) => {
res.end();
} catch (error) {
console.log(error);
await deleteMessages({ messageId: userMessageId });
// await deleteMessages({ messageId: userMessageId });
handleError(res, error.message);
}
});

View file

@ -7,10 +7,12 @@ const { handleError, sendMessage } = require('./handlers');
const citationRegex = /\[\^\d+?\^]/g;
router.post('/', async (req, res) => {
const { model, text, parentMessageId, conversationId, ...convo } = req.body;
const { model, text, parentMessageId, conversationId: oldConversationId, ...convo } = req.body;
if (text.length === 0) {
return handleError(res, 'Prompt empty or too short');
}
const conversationId = oldConversationId || crypto.randomUUID();
const userMessageId = messageId;
const userParentMessageId = parentMessageId || '00000000-0000-0000-0000-000000000000'
@ -26,17 +28,9 @@ router.post('/', async (req, res) => {
console.log('ask log', {
model,
...userMessage,
parentMessageId: userParentMessageId,
conversationId,
...convo
});
// if (messageId) {
// // existing conversation
// await saveMessage(userMessage);
// await deleteMessagesSince(userMessage);
// } else {}
res.writeHead(200, {
Connection: 'keep-alive',
'Content-Type': 'text/event-stream',
@ -45,6 +39,10 @@ router.post('/', async (req, res) => {
'X-Accel-Buffering': 'no'
});
await saveMessage(userMessage);
await saveConvo({ ...userMessage, model, chatGptLabel, promptPrefix });
sendMessage(res, { message: userMessage, created: true });
try {
let tokens = '';
const progressCallback = async (partial) => {
@ -107,7 +105,7 @@ router.post('/', async (req, res) => {
res.end();
} catch (error) {
console.log(error);
await deleteMessages({ messageId: userMessageId });
// await deleteMessages({ messageId: userMessageId });
handleError(res, error.message);
}
});

View file

@ -7,10 +7,12 @@ const { handleError, sendMessage } = require('./handlers');
const citationRegex = /\[\^\d+?\^]/g;
router.post('/', async (req, res) => {
const { model, text, parentMessageId, conversationId, ...convo } = req.body;
const { model, text, parentMessageId, conversationId: oldConversationId, ...convo } = req.body;
if (text.length === 0) {
return handleError(res, 'Prompt empty or too short');
}
const conversationId = oldConversationId || crypto.randomUUID();
const userMessageId = messageId;
const userParentMessageId = parentMessageId || '00000000-0000-0000-0000-000000000000'
@ -27,17 +29,9 @@ router.post('/', async (req, res) => {
console.log('ask log', {
model,
...userMessage,
parentMessageId: userParentMessageId,
conversationId,
...convo
});
// if (messageId) {
// // existing conversation
// await saveMessage(userMessage);
// await deleteMessagesSince(userMessage);
// } else {}
res.writeHead(200, {
Connection: 'keep-alive',
'Content-Type': 'text/event-stream',
@ -46,6 +40,10 @@ router.post('/', async (req, res) => {
'X-Accel-Buffering': 'no'
});
await saveMessage(userMessage);
await saveConvo({ ...userMessage, model, chatGptLabel, promptPrefix });
sendMessage(res, { message: userMessage, created: true });
try {
let tokens = '';
const progressCallback = async (partial) => {
@ -117,7 +115,7 @@ router.post('/', async (req, res) => {
res.end();
} catch (error) {
console.log(error);
await deleteMessages({ messageId: userMessageId });
// await deleteMessages({ messageId: userMessageId });
handleError(res, error.message);
}
});

View file

@ -11,6 +11,7 @@ import ConvoIcon from '../svg/ConvoIcon';
export default function Conversation({
id,
model,
parentMessageId,
conversationId,
title = 'New conversation',
@ -76,12 +77,12 @@ export default function Conversation({
if (chatGptLabel) {
dispatch(setModel('chatgptCustom'));
} else {
dispatch(setModel(data[1].sender));
dispatch(setModel(model));
}
if (modelMap[data[1].sender.toLowerCase()]) {
console.log('sender', data[1].sender);
dispatch(setCustomModel(data[1].sender.toLowerCase()));
if (modelMap[model.toLowerCase()]) {
console.log('sender', model);
dispatch(setCustomModel(model.toLowerCase()));
} else {
dispatch(setCustomModel(null));
}

View file

@ -26,6 +26,7 @@ export default function Conversations({ conversations, conversationId, showMore
<Conversation
key={convo.conversationId}
id={convo.conversationId}
model={convo.model}
parentMessageId={convo.parentMessageId}
title={convo.title}
conversationId={conversationId}

View file

@ -28,14 +28,24 @@ export default function TextChat({ messages }) {
inputRef.current?.focus();
}, [convo?.conversationId, ])
const messageHandler = (data, currentState) => {
const { messages, currentMsg, message, sender } = currentState;
const messageHandler = (data, currentState, currentMsg) => {
const { messages, _currentMsg, message, sender } = currentState;
dispatch(setMessages([...messages, currentMsg, { sender, text: data }]));
};
const convoHandler = (data, currentState) => {
const createdHandler = (data, currentState, currentMsg) => {
const { conversationId } = currentMsg;
dispatch(
setConversation({
conversationId,
})
);
};
const convoHandler = (data, currentState, currentMsg) => {
const { requestMessage, responseMessage } = data;
const { messages, currentMsg, message, isCustomModel, sender } =
const { messages, _currentMsg, message, isCustomModel, sender } =
currentState;
const { model, chatGptLabel, promptPrefix } = message;
dispatch(
@ -210,6 +220,7 @@ export default function TextChat({ messages }) {
}
const currentState = submission;
let currentMsg = currentState.currentMsg;
const { server, payload } = createPayload(submission);
const onMessage = (e) => {
if (stopStream) {
@ -223,12 +234,15 @@ export default function TextChat({ messages }) {
// }
if (data.final) {
convoHandler(data, currentState);
convoHandler(data, currentState, currentMsg);
console.log('final', data);
} if (data.created) {
currentMsg = data.message;
createdHandler(data, currentState, currentMsg);
} else {
let text = data.text || data.response;
if (data.message) {
messageHandler(text, currentState);
messageHandler(text, currentState, currentMsg);
}
// console.log('dataStream', data);
}