feat: feat: new endpoint-style submit

This commit is contained in:
Wentao Lyu 2023-03-31 03:22:57 +08:00
parent 089ca5f120
commit adcc021c9e
22 changed files with 566 additions and 478 deletions

View file

@ -1,8 +1,22 @@
require('dotenv').config();
const { KeyvFile } = require('keyv-file');
const askBing = async ({ text, onProgress, convo }) => {
const askBing = async ({
text,
parentMessageId,
conversationId,
jailbreak,
jailbreakConversationId,
conversationSignature,
clientId,
invocationId,
toneStyle,
onProgress
}) => {
const { BingAIClient } = await import('@waylaidwanderer/chatgpt-api');
const store = {
store: new KeyvFile({ filename: './data/cache.json' })
};
const bingAIClient = new BingAIClient({
// "_U" cookie from bing.com
@ -10,23 +24,25 @@ const askBing = async ({ text, onProgress, convo }) => {
// If the above doesn't work, provide all your cookies as a string instead
// cookies: '',
debug: false,
cache: { store: new KeyvFile({ filename: './data/cache.json' }) },
cache: store,
proxy: process.env.PROXY || null
});
let options = { onProgress };
if (convo) {
options = { ...options, ...convo };
}
let options = {
jailbreakConversationId: jailbreakConversationId || jailbreak,
parentMessageId,
conversationId,
conversationSignature,
clientId,
invocationId,
toneStyle,
onProgress
};
if (options?.jailbreakConversationId == 'false') {
options.jailbreakConversationId = false;
}
if (convo.toneStyle) {
options.toneStyle = convo.toneStyle;
}
console.log('bing options', options);
const res = await bingAIClient.sendMessage(text, options);

View file

@ -1,40 +1,45 @@
require('dotenv').config();
const { KeyvFile } = require('keyv-file');
const set = new Set(["gpt-4", "text-davinci-002-render", "text-davinci-002-render-paid", "text-davinci-002-render-sha"]);
// const set = new Set([
// 'gpt-4',
// 'text-davinci-002-render',
// 'text-davinci-002-render-paid',
// 'text-davinci-002-render-sha'
// ]);
const clientOptions = {
// Warning: This will expose your access token to a third party. Consider the risks before using this.
reverseProxyUrl: 'https://bypass.duti.tech/api/conversation',
// Access token from https://chat.openai.com/api/auth/session
accessToken: process.env.CHATGPT_TOKEN,
// debug: true
proxy: process.env.PROXY || null,
};
// You can check which models you have access to by opening DevTools and going to the Network tab.
// Refresh the page and look at the response body for https://chat.openai.com/backend-api/models.
if (set.has(process.env.BROWSER_MODEL)) {
clientOptions.model = process.env.BROWSER_MODEL;
}
const browserClient = async ({ text, onProgress, convo, abortController }) => {
const browserClient = async ({
text,
parentMessageId,
conversationId,
model,
onProgress,
abortController
}) => {
const { ChatGPTBrowserClient } = await import('@waylaidwanderer/chatgpt-api');
const store = {
store: new KeyvFile({ filename: './data/cache.json' })
};
const clientOptions = {
// Warning: This will expose your access token to a third party. Consider the risks before using this.
reverseProxyUrl: 'https://bypass.duti.tech/api/conversation',
// Access token from https://chat.openai.com/api/auth/session
accessToken: process.env.CHATGPT_TOKEN,
model,
// debug: true
proxy: process.env.PROXY || null
};
const client = new ChatGPTBrowserClient(clientOptions, store);
let options = { onProgress, abortController };
if (!!convo.parentMessageId && !!convo.conversationId) {
options = { ...options, ...convo };
if (!!parentMessageId && !!conversationId) {
options = { ...options, parentMessageId, conversationId };
}
console.log('gptBrowser options', options, clientOptions);
// console.log('gptBrowser options', options, clientOptions);
/* will error if given a convoId at the start */
if (convo.parentMessageId.startsWith('0000')) {
if (parentMessageId === '00000000-0000-0000-0000-000000000000') {
delete options.conversationId;
}

View file

@ -1,30 +1,43 @@
require('dotenv').config();
const { KeyvFile } = require('keyv-file');
const set = new Set(['gpt-4', 'text-davinci-003', 'gpt-3.5-turbo', 'gpt-3.5-turbo-0301']);
// const set = new Set(['gpt-4', 'text-davinci-003', 'gpt-3.5-turbo', 'gpt-3.5-turbo-0301']);
const clientOptions = {
modelOptions: {
model: 'gpt-3.5-turbo'
},
proxy: process.env.PROXY || null,
debug: false
};
if (set.has(process.env.DEFAULT_API_GPT)) {
clientOptions.modelOptions.model = process.env.DEFAULT_API_GPT;
}
const askClient = async ({ text, onProgress, convo, abortController }) => {
const askClient = async ({
text,
parentMessageId,
conversationId,
model,
chatGptLabel,
promptPrefix,
temperature,
top_p,
presence_penalty,
onProgress,
abortController
}) => {
const ChatGPTClient = (await import('@waylaidwanderer/chatgpt-api')).default;
const store = {
store: new KeyvFile({ filename: './data/cache.json' })
};
const clientOptions = {
modelOptions: {
model: model,
temperature,
top_p,
presence_penalty
},
chatGptLabel,
promptPrefix,
proxy: process.env.PROXY || null,
debug: false
};
const client = new ChatGPTClient(process.env.OPENAI_KEY, clientOptions, store);
let options = { onProgress, abortController };
if (!!convo.parentMessageId && !!convo.conversationId) {
options = { ...options, ...convo };
if (!!parentMessageId && !!conversationId) {
options = { ...options, parentMessageId, conversationId };
}
const res = await client.sendMessage(text, options);

View file

@ -1,35 +0,0 @@
require('dotenv').config();
const { KeyvFile } = require('keyv-file');
const clientOptions = {
modelOptions: {
model: 'gpt-3.5-turbo'
},
proxy: process.env.PROXY || null,
debug: false
};
const customClient = async ({ text, onProgress, convo, promptPrefix, chatGptLabel, abortController }) => {
const ChatGPTClient = (await import('@waylaidwanderer/chatgpt-api')).default;
const store = {
store: new KeyvFile({ filename: './data/cache.json' })
};
clientOptions.chatGptLabel = chatGptLabel;
if (promptPrefix?.length > 0) {
clientOptions.promptPrefix = promptPrefix;
}
const client = new ChatGPTClient(process.env.OPENAI_KEY, clientOptions, store);
let options = { onProgress, abortController };
if (!!convo.parentMessageId && !!convo.conversationId) {
options = { ...options, ...convo };
}
const res = await client.sendMessage(text, options);
return res;
};
module.exports = customClient;

View file

@ -2,7 +2,6 @@ const { askClient } = require('./clients/chatgpt-client');
const { browserClient } = require('./clients/chatgpt-browser');
const { askBing } = require('./clients/bingai');
const { askSydney } = require('./clients/sydney');
const customClient = require('./clients/chatgpt-custom');
const titleConvo = require('./titleConvo');
const getCitations = require('../lib/parse/getCitations');
const citeText = require('../lib/parse/citeText');
@ -10,10 +9,9 @@ const citeText = require('../lib/parse/citeText');
module.exports = {
askClient,
browserClient,
customClient,
askBing,
askSydney,
titleConvo,
getCitations,
citeText,
citeText
};

View file

@ -16,7 +16,7 @@ const proxyEnvToAxiosProxy = proxyString => {
return proxyConfig;
};
const titleConvo = async ({ model, text, response }) => {
const titleConvo = async ({ endpoint, text, response }) => {
let title = 'New Chat';
const messages = [
{

View file

@ -27,11 +27,6 @@ module.exports = {
if (!update.jailbreakConversationId) {
update.jailbreakConversationId = null;
}
if (update.model !== 'chatgptCustom' && update.chatGptLabel && update.promptPrefix) {
console.log('Validation error: resetting chatgptCustom fields', update);
update.chatGptLabel = null;
update.promptPrefix = null;
}
return await Conversation.findOneAndUpdate(
{ conversationId: conversationId, user },
@ -149,10 +144,10 @@ module.exports = {
}
},
deleteConvos: async (user, filter) => {
let toRemove = await Conversation.find({...filter, user}).select('conversationId')
let toRemove = await Conversation.find({ ...filter, user }).select('conversationId');
const ids = toRemove.map(instance => instance.conversationId);
let deleteCount = await Conversation.deleteMany({...filter, user}).exec();
deleteCount.messages = await deleteMessages({conversationId: {$in: ids}});
let deleteCount = await Conversation.deleteMany({ ...filter, user }).exec();
deleteCount.messages = await deleteMessages({ conversationId: { $in: ids } });
return deleteCount;
}
};

View file

@ -60,12 +60,13 @@ const projectPath = path.join(__dirname, '..', '..', 'client');
app.use('/api/prompts', routes.authenticatedOr401, routes.prompts);
app.use('/auth', routes.auth);
app.get('/api/models', function (req, res) {
const hasOpenAI = !!process.env.OPENAI_KEY;
const hasChatGpt = !!process.env.CHATGPT_TOKEN;
const hasBing = !!process.env.BING_TOKEN;
app.get('/api/endpoints', function (req, res) {
const azureOpenAI = !!process.env.AZURE_OPENAI_KEY;
const openAI = !!process.env.OPENAI_KEY;
const bingAI = !!process.env.BING_TOKEN;
const chatGPTBrowser = !!process.env.CHATGPT_TOKEN;
res.send(JSON.stringify({ hasOpenAI, hasChatGpt, hasBing }));
res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser }));
});
app.get('/*', routes.authenticatedOrRedirect, function (req, res) {

View file

@ -1,148 +1,13 @@
const express = require('express');
const crypto = require('crypto');
const router = express.Router();
const askBing = require('./askBing');
const askSydney = require('./askSydney');
const { titleConvo, askClient, browserClient, customClient } = require('../../app/');
const { saveMessage, getConvoTitle, saveConvo, updateConvo } = require('../../models');
const { handleError, sendMessage, createOnProgress, handleText } = require('./handlers');
// const askAzureOpenAI = require('./askAzureOpenAI';)
const askOpenAI = require('./askOpenAI');
const askBingAI = require('./askBingAI');
const askChatGPTBrowser = require('./askChatGPTBrowser');
router.use('/bing', askBing);
router.use('/sydney', askSydney);
router.post('/', async (req, res) => {
const {
model,
text,
overrideParentMessageId = null,
parentMessageId,
conversationId: oldConversationId,
...convo
} = req.body;
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
const conversationId = oldConversationId || crypto.randomUUID();
const userMessageId = crypto.randomUUID();
const userParentMessageId = parentMessageId || '00000000-0000-0000-0000-000000000000';
const userMessage = {
messageId: userMessageId,
sender: 'User',
text,
parentMessageId: userParentMessageId,
conversationId,
isCreatedByUser: true
};
console.log('ask log', {
model,
...userMessage,
...convo
});
if (!overrideParentMessageId) {
await saveMessage(userMessage);
await saveConvo(req?.session?.user?.username, { ...userMessage, model, ...convo });
}
return await ask({ userMessage, model, convo, preSendRequest: true, overrideParentMessageId, req, res });
});
const ask = async ({
userMessage,
overrideParentMessageId = null,
model,
convo,
preSendRequest = true,
req,
res
}) => {
const {
text,
parentMessageId: userParentMessageId,
conversationId,
messageId: userMessageId
} = userMessage;
const client = model === 'chatgpt' ? askClient : model === 'chatgptCustom' ? customClient : browserClient;
res.writeHead(200, {
Connection: 'keep-alive',
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache, no-transform',
'Access-Control-Allow-Origin': '*',
'X-Accel-Buffering': 'no'
});
if (preSendRequest) sendMessage(res, { message: userMessage, created: true });
try {
const progressCallback = createOnProgress();
const abortController = new AbortController();
res.on('close', () => abortController.abort());
let gptResponse = await client({
text,
onProgress: progressCallback.call(null, model, { res, text }),
convo: { parentMessageId: userParentMessageId, conversationId, ...convo },
...convo,
abortController
});
gptResponse.text = gptResponse.response;
console.log('CLIENT RESPONSE', gptResponse);
if (!gptResponse.parentMessageId) {
gptResponse.parentMessageId = overrideParentMessageId || userMessageId;
delete gptResponse.response;
}
gptResponse.sender = model === 'chatgptCustom' ? convo.chatGptLabel : model;
gptResponse.model = model;
gptResponse.text = await handleText(gptResponse);
if (convo.chatGptLabel?.length > 0 && model === 'chatgptCustom') {
gptResponse.chatGptLabel = convo.chatGptLabel;
}
if (convo.promptPrefix?.length > 0 && model === 'chatgptCustom') {
gptResponse.promptPrefix = convo.promptPrefix;
}
gptResponse.parentMessageId = overrideParentMessageId || userMessageId;
if (model === 'chatgptBrowser' && userParentMessageId.startsWith('000')) {
await saveMessage({ ...userMessage, conversationId: gptResponse.conversationId });
}
await saveMessage(gptResponse);
await updateConvo(req?.session?.user?.username, {
...gptResponse,
oldConvoId: model === 'chatgptBrowser' && conversationId
});
sendMessage(res, {
title: await getConvoTitle(req?.session?.user?.username, conversationId),
final: true,
requestMessage: userMessage,
responseMessage: gptResponse
});
res.end();
if (userParentMessageId == '00000000-0000-0000-0000-000000000000') {
const title = await titleConvo({ model, text, response: gptResponse });
await updateConvo(req?.session?.user?.username, {
conversationId: model === 'chatgptBrowser' ? gptResponse.conversationId : conversationId,
title
});
}
} catch (error) {
const errorMessage = {
messageId: crypto.randomUUID(),
sender: model,
conversationId,
parentMessageId: overrideParentMessageId || userMessageId,
error: true,
text: error.message
};
await saveMessage(errorMessage);
handleError(res, errorMessage);
}
};
// router.use('/azureOpenAI', askAzureOpenAI);
router.use('/openAI', askOpenAI);
router.use('/bingAI', askBingAI);
router.use('/chatGPTBrowser', askChatGPTBrowser);
module.exports = router;

View file

@ -1,27 +1,26 @@
const express = require('express');
const crypto = require('crypto');
const router = express.Router();
const { titleConvo, askBing } = require('../../app/');
const { saveBingMessage, getConvoTitle, saveConvo } = require('../../models');
const { titleConvo, askBing } = require('../../app');
const { saveBingMessage, getConvoTitle, saveConvo, getConvo } = require('../../models');
const { handleError, sendMessage, createOnProgress, handleText } = require('./handlers');
router.post('/', async (req, res) => {
const {
model,
endpoint,
text,
overrideParentMessageId=null,
messageId,
overrideParentMessageId = null,
parentMessageId,
conversationId: oldConversationId,
...convo
conversationId: oldConversationId
} = req.body;
if (text.length === 0) {
return handleError(res, { text: 'Prompt empty or too short' });
}
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
if (endpoint !== 'bingAI') return handleError(res, { text: 'Illegal request' });
// build user message
const conversationId = oldConversationId || crypto.randomUUID();
const isNewConversation = !oldConversationId;
const userMessageId = convo.messageId;
const userMessageId = messageId;
const userParentMessageId = parentMessageId || '00000000-0000-0000-0000-000000000000';
let userMessage = {
messageId: userMessageId,
@ -32,22 +31,33 @@ router.post('/', async (req, res) => {
isCreatedByUser: true
};
// build endpoint option
const endpointOption = {
jailbreak: req.body?.jailbreak || false,
jailbreakConversationId: req.body?.jailbreakConversationId || null,
conversationSignature: req.body?.conversationSignature || null,
clientId: req.body?.clientId || null,
invocationId: req.body?.invocationId || null,
toneStyle: req.body?.toneStyle || 'fast',
suggestions: req.body?.suggestions || []
};
console.log('ask log', {
model,
...convo,
...userMessage
userMessage,
endpointOption,
conversationId
});
if (!overrideParentMessageId) {
await saveBingMessage(userMessage);
await saveConvo(req?.session?.user?.username, { model, ...convo, ...userMessage });
await saveConvo(req?.session?.user?.username, { ...userMessage, ...endpointOption, conversationId });
}
return await ask({
isNewConversation,
userMessage,
model,
convo,
endpointOption,
conversationId,
preSendRequest: true,
overrideParentMessageId,
req,
@ -57,20 +67,15 @@ router.post('/', async (req, res) => {
const ask = async ({
isNewConversation,
overrideParentMessageId = null,
userMessage,
model,
convo,
endpointOption,
conversationId,
preSendRequest = true,
overrideParentMessageId = null,
req,
res
}) => {
let {
text,
parentMessageId: userParentMessageId,
conversationId,
messageId: userMessageId
} = userMessage;
let { text, parentMessageId: userParentMessageId, messageId: userMessageId } = userMessage;
res.writeHead(200, {
Connection: 'keep-alive',
@ -84,99 +89,82 @@ const ask = async ({
try {
const progressCallback = createOnProgress();
const abortController = new AbortController();
res.on('close', () => {
console.log('The client has disconnected.');
// 执行其他操作
abortController.abort();
})
res.on('close', () => abortController.abort());
let response = await askBing({
text,
onProgress: progressCallback.call(null, model, {
parentMessageId: userParentMessageId,
conversationId,
...endpointOption,
onProgress: progressCallback.call(null, {
res,
text,
parentMessageId: overrideParentMessageId || userMessageId
}),
convo: {
...convo,
parentMessageId: userParentMessageId,
conversationId
},
abortController
});
console.log('BING RESPONSE', response);
// console.dir(response, { depth: null });
userMessage.conversationSignature =
convo.conversationSignature || response.conversationSignature;
endpointOption.conversationSignature || response.conversationSignature;
userMessage.conversationId = response.conversationId || conversationId;
userMessage.invocationId = response.invocationId;
userMessage.invocationId = endpointOption.invocationId;
userMessage.messageId = response.details.requestId || userMessageId;
if (!overrideParentMessageId)
await saveBingMessage({ oldMessageId: userMessageId, ...userMessage });
if (!overrideParentMessageId) await saveBingMessage({ oldMessageId: userMessageId, ...userMessage });
// Bing API will not use our conversationId at the first time,
// so change the placeholder conversationId to the real one.
// Attition: the api will also create new conversationId while using invalid userMessage.parentMessageId,
// but in this situation, don't change the conversationId, but create new convo.
if (conversationId != userMessage.conversationId && isNewConversation)
await saveConvo(
req?.session?.user?.username,
{
conversationId: conversationId,
newConversationId: userMessage.conversationId
}
);
await saveConvo(req?.session?.user?.username, {
conversationId: conversationId,
newConversationId: userMessage.conversationId
});
conversationId = userMessage.conversationId;
response.text = response.response || response.details.spokenText || '**Bing refused to answer.**';
// delete response.response;
// response.id = response.details.messageId;
response.suggestions =
response.details.suggestedResponses &&
response.details.suggestedResponses.map((s) => s.text);
response.sender = model;
response.details.suggestedResponses && response.details.suggestedResponses.map(s => s.text);
response.sender = endpointOption?.jailbreak ? 'Sydney' : 'BingAI';
// response.final = true;
response.messageId = response.details.messageId;
// override the parentMessageId, for the regeneration.
response.parentMessageId =
overrideParentMessageId || response.details.requestId || userMessageId;
response.parentMessageId = overrideParentMessageId || response.details.requestId || userMessageId;
response.text = await handleText(response, true);
await saveBingMessage(response);
await saveConvo(req?.session?.user?.username, { model, chatGptLabel: null, promptPrefix: null, ...convo, ...response });
await saveConvo(req?.session?.user?.username, {
...endpointOption,
...response
});
sendMessage(res, {
title: await getConvoTitle(req?.session?.user?.username, conversationId),
final: true,
conversation: await getConvo(req?.session?.user?.username, conversationId),
requestMessage: userMessage,
responseMessage: response
});
res.end();
if (userParentMessageId == '00000000-0000-0000-0000-000000000000') {
const title = await titleConvo({ model, text, response });
const title = await titleConvo({ endpoint: endpointOption?.endpoint, text, response });
await saveConvo(
req?.session?.user?.username,
{
...convo,
...response,
conversationId,
title
}
);
await saveConvo(req?.session?.user?.username, {
conversationId: conversationId,
title
});
}
} catch (error) {
console.log(error);
// await deleteMessages({ messageId: userMessageId });
const errorMessage = {
messageId: crypto.randomUUID(),
sender: model,
sender: endpointOption?.jailbreak ? 'Sydney' : 'BingAI',
conversationId,
parentMessageId: overrideParentMessageId || userMessageId,
error: true,
@ -187,4 +175,4 @@ const ask = async ({
}
};
module.exports = router;
module.exports = router;

View file

@ -0,0 +1,156 @@
const express = require('express');
const crypto = require('crypto');
const router = express.Router();
const { titleConvo, browserClient } = require('../../app/');
const { saveMessage, getConvoTitle, saveConvo, updateConvo, getConvo } = require('../../models');
const { handleError, sendMessage, createOnProgress, handleText } = require('./handlers');
router.post('/', async (req, res) => {
const {
endpoint,
text,
overrideParentMessageId = null,
parentMessageId,
conversationId: oldConversationId
} = req.body;
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
if (endpoint !== 'chatGPTBrowser') return handleError(res, { text: 'Illegal request' });
// build user message
const conversationId = oldConversationId || crypto.randomUUID();
const userMessageId = crypto.randomUUID();
const userParentMessageId = parentMessageId || '00000000-0000-0000-0000-000000000000';
const userMessage = {
messageId: userMessageId,
sender: 'User',
text,
parentMessageId: userParentMessageId,
conversationId,
isCreatedByUser: true
};
// build endpoint option
const endpointOption = {
model: req.body?.model || 'text-davinci-002-render-sha'
};
console.log('ask log', {
userMessage,
endpointOption,
conversationId
});
if (!overrideParentMessageId) {
await saveMessage(userMessage);
await saveConvo(req?.session?.user?.username, { ...userMessage, ...endpointOption, conversationId });
}
return await ask({
userMessage,
endpointOption,
conversationId,
preSendRequest: true,
overrideParentMessageId,
req,
res
});
});
const ask = async ({
userMessage,
endpointOption,
conversationId,
preSendRequest = true,
overrideParentMessageId = null,
req,
res
}) => {
const { text, parentMessageId: userParentMessageId, messageId: userMessageId } = userMessage;
const client = browserClient;
res.writeHead(200, {
Connection: 'keep-alive',
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache, no-transform',
'Access-Control-Allow-Origin': '*',
'X-Accel-Buffering': 'no'
});
if (preSendRequest) sendMessage(res, { message: userMessage, created: true });
try {
const progressCallback = createOnProgress();
const abortController = new AbortController();
res.on('close', () => abortController.abort());
let gptResponse = await client({
text,
parentMessageId: userParentMessageId,
conversationId,
...endpointOption,
onProgress: progressCallback.call(null, { res, text }),
abortController
});
gptResponse.text = gptResponse.response;
console.log('CLIENT RESPONSE', gptResponse);
if (!gptResponse.parentMessageId) {
gptResponse.parentMessageId = overrideParentMessageId || userMessageId;
delete gptResponse.response;
}
gptResponse.sender = 'ChatGPT';
// gptResponse.model = model;
gptResponse.text = await handleText(gptResponse);
// if (convo.chatGptLabel?.length > 0 && model === 'chatgptCustom') {
// gptResponse.chatGptLabel = convo.chatGptLabel;
// }
// if (convo.promptPrefix?.length > 0 && model === 'chatgptCustom') {
// gptResponse.promptPrefix = convo.promptPrefix;
// }
gptResponse.parentMessageId = overrideParentMessageId || userMessageId;
if (userParentMessageId.startsWith('000')) {
await saveMessage({ ...userMessage, conversationId: gptResponse.conversationId });
}
await saveMessage(gptResponse);
await updateConvo(req?.session?.user?.username, {
...gptResponse,
oldConvoId: conversationId
});
sendMessage(res, {
title: await getConvoTitle(req?.session?.user?.username, conversationId),
final: true,
conversation: await getConvo(req?.session?.user?.username, conversationId),
requestMessage: userMessage,
responseMessage: gptResponse
});
res.end();
if (userParentMessageId == '00000000-0000-0000-0000-000000000000') {
const title = await titleConvo({ endpoint: endpointOption?.endpoint, text, response: gptResponse });
await updateConvo(req?.session?.user?.username, {
conversationId: gptResponse.conversationId,
title
});
}
} catch (error) {
const errorMessage = {
messageId: crypto.randomUUID(),
sender: 'ChatGPT',
conversationId,
parentMessageId: overrideParentMessageId || userMessageId,
error: true,
text: error.message
};
await saveMessage(errorMessage);
handleError(res, errorMessage);
}
};
module.exports = router;

View file

@ -0,0 +1,157 @@
const express = require('express');
const crypto = require('crypto');
const router = express.Router();
const { titleConvo, askClient } = require('../../app/');
const { saveMessage, getConvoTitle, saveConvo, updateConvo, getConvo } = require('../../models');
const { handleError, sendMessage, createOnProgress, handleText } = require('./handlers');
router.post('/', async (req, res) => {
const {
endpoint,
text,
overrideParentMessageId = null,
parentMessageId,
conversationId: oldConversationId
} = req.body;
if (text.length === 0) return handleError(res, { text: 'Prompt empty or too short' });
if (endpoint !== 'openAI') return handleError(res, { text: 'Illegal request' });
// build user message
const conversationId = oldConversationId || crypto.randomUUID();
const userMessageId = crypto.randomUUID();
const userParentMessageId = parentMessageId || '00000000-0000-0000-0000-000000000000';
const userMessage = {
messageId: userMessageId,
sender: 'User',
text,
parentMessageId: userParentMessageId,
conversationId,
isCreatedByUser: true
};
// build endpoint option
const endpointOption = {
model: req.body?.model || 'gpt-3.5-turbo',
chatGptLabel: req.body?.chatGptLabel || null,
promptPrefix: req.body?.promptPrefix || null,
temperature: req.body?.temperature || 0.8,
top_p: req.body?.top_p || 1,
presence_penalty: req.body?.presence_penalty || 1
};
console.log('ask log', {
userMessage,
endpointOption,
conversationId
});
if (!overrideParentMessageId) {
await saveMessage(userMessage);
await saveConvo(req?.session?.user?.username, { ...userMessage, ...endpointOption, conversationId });
}
return await ask({
userMessage,
endpointOption,
conversationId,
preSendRequest: true,
overrideParentMessageId,
req,
res
});
});
const ask = async ({
userMessage,
endpointOption,
conversationId,
preSendRequest = true,
overrideParentMessageId = null,
req,
res
}) => {
const { text, parentMessageId: userParentMessageId, messageId: userMessageId } = userMessage;
const client = askClient;
res.writeHead(200, {
Connection: 'keep-alive',
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache, no-transform',
'Access-Control-Allow-Origin': '*',
'X-Accel-Buffering': 'no'
});
if (preSendRequest) sendMessage(res, { message: userMessage, created: true });
try {
const progressCallback = createOnProgress();
const abortController = new AbortController();
res.on('close', () => abortController.abort());
let gptResponse = await client({
text,
parentMessageId: userParentMessageId,
conversationId,
...endpointOption,
onProgress: progressCallback.call(null, { res, text }),
abortController
});
gptResponse.text = gptResponse.response;
console.log('CLIENT RESPONSE', gptResponse);
if (!gptResponse.parentMessageId) {
gptResponse.parentMessageId = overrideParentMessageId || userMessageId;
delete gptResponse.response;
}
gptResponse.sender = endpointOption?.chatGptLabel || 'ChatGPT';
// gptResponse.model = model;
gptResponse.text = await handleText(gptResponse);
// if (convo.chatGptLabel?.length > 0 && model === 'chatgptCustom') {
// gptResponse.chatGptLabel = convo.chatGptLabel;
// }
// if (convo.promptPrefix?.length > 0 && model === 'chatgptCustom') {
// gptResponse.promptPrefix = convo.promptPrefix;
// }
gptResponse.parentMessageId = overrideParentMessageId || userMessageId;
await saveMessage(gptResponse);
await updateConvo(req?.session?.user?.username, {
...gptResponse,
oldConvoId: conversationId
});
sendMessage(res, {
title: await getConvoTitle(req?.session?.user?.username, conversationId),
final: true,
conversation: await getConvo(req?.session?.user?.username, conversationId),
requestMessage: userMessage,
responseMessage: gptResponse
});
res.end();
if (userParentMessageId == '00000000-0000-0000-0000-000000000000') {
const title = await titleConvo({ endpoint: endpointOption?.endpoint, text, response: gptResponse });
await updateConvo(req?.session?.user?.username, {
conversationId: conversationId,
title
});
}
} catch (error) {
const errorMessage = {
messageId: crypto.randomUUID(),
sender: endpointOption?.chatGptLabel || 'ChatGPT',
conversationId,
parentMessageId: overrideParentMessageId || userMessageId,
error: true,
text: error.message
};
await saveMessage(errorMessage);
handleError(res, errorMessage);
}
};
module.exports = router;

View file

@ -68,9 +68,8 @@ const createOnProgress = () => {
i++;
};
const onProgress = (model, opts) => {
const bingModels = new Set(['bingai', 'sydney']);
return _.partialRight(progressCallback, { ...opts, bing: bingModels.has(model) });
const onProgress = opts => {
return _.partialRight(progressCallback, opts);
};
return onProgress;

View file

@ -38,7 +38,7 @@ const router = createBrowserRouter([
const App = () => {
const [user, setUser] = useRecoilState(store.user);
const setIsSearchEnabled = useSetRecoilState(store.isSearchEnabled);
const setModelsFilter = useSetRecoilState(store.modelsFilter);
const setEndpointsFilter = useSetRecoilState(store.endpointsFilter);
useEffect(() => {
// fetch if seatch enabled
@ -58,19 +58,12 @@ const App = () => {
// fetch models
axios
.get('/api/models', {
.get('/api/endpoints', {
timeout: 1000,
withCredentials: true
})
.then(({ data }) => {
const filter = {
chatgpt: data?.hasOpenAI,
chatgptCustom: data?.hasOpenAI,
bingai: data?.hasBing,
sydney: data?.hasBing,
chatgptBrowser: data?.hasChatGpt
};
setModelsFilter(filter);
setEndpointsFilter(data);
})
.catch(error => {
console.error(error);

View file

@ -1,14 +1,13 @@
import React, { useEffect, useRef, useState } from 'react';
import { useRecoilState, useResetRecoilState, useSetRecoilState } from 'recoil';
import { useEffect } from 'react';
import { useRecoilValue, useResetRecoilState, useSetRecoilState } from 'recoil';
import { SSE } from '~/utils/sse';
import { useMessageHandler } from '../../utils/handleSubmit';
import createPayload from '~/utils/createPayload';
import store from '~/store';
export default function MessageHandler({ messages }) {
const [submission, setSubmission] = useRecoilState(store.submission);
const [isSubmitting, setIsSubmitting] = useRecoilState(store.isSubmitting);
export default function MessageHandler() {
const submission = useRecoilValue(store.submission);
const setIsSubmitting = useSetRecoilState(store.isSubmitting);
const setMessages = useSetRecoilState(store.messages);
const setConversation = useSetRecoilState(store.conversation);
const resetLatestMessage = useResetRecoilState(store.latestMessage);
@ -105,10 +104,9 @@ export default function MessageHandler({ messages }) {
};
const finalHandler = (data, submission) => {
const { conversation, messages, message, initialResponse, isRegenerate = false } = submission;
const { messages, isRegenerate = false } = submission;
const { requestMessage, responseMessage } = data;
const { conversationId } = requestMessage;
const { requestMessage, responseMessage, conversation } = data;
// update the messages
if (isRegenerate) setMessages([...messages, responseMessage]);
@ -127,66 +125,14 @@ export default function MessageHandler({ messages }) {
}, 5000);
}
const { model, chatGptLabel, promptPrefix } = conversation;
const isBing = model === 'bingai' || model === 'sydney';
if (!isBing) {
const { title } = data;
const { conversationId } = responseMessage;
setConversation(prevState => ({
...prevState,
title,
conversationId,
jailbreakConversationId: null,
conversationSignature: null,
clientId: null,
invocationId: null,
chatGptLabel,
promptPrefix,
latestMessage: null
}));
} else if (model === 'bingai') {
const { title } = data;
const { conversationSignature, clientId, conversationId, invocationId } = responseMessage;
setConversation(prevState => ({
...prevState,
title,
conversationId,
jailbreakConversationId: null,
conversationSignature,
clientId,
invocationId,
chatGptLabel,
promptPrefix,
latestMessage: null
}));
} else if (model === 'sydney') {
const { title } = data;
const {
jailbreakConversationId,
parentMessageId,
conversationSignature,
clientId,
conversationId,
invocationId
} = responseMessage;
setConversation(prevState => ({
...prevState,
title,
conversationId,
jailbreakConversationId,
conversationSignature,
clientId,
invocationId,
chatGptLabel,
promptPrefix,
latestMessage: null
}));
}
setConversation(prevState => ({
...prevState,
...conversation
}));
};
const errorHandler = (data, submission) => {
const { conversation, messages, message, initialResponse, isRegenerate = false } = submission;
const { messages, message } = submission;
console.log('Error:', data);
const errorResponse = {
@ -203,7 +149,6 @@ export default function MessageHandler({ messages }) {
if (submission === null) return;
if (Object.keys(submission).length === 0) return;
const { messages, initialResponse, isRegenerate = false } = submission;
let { message } = submission;
const { server, payload } = createPayload(submission);
@ -224,9 +169,6 @@ export default function MessageHandler({ messages }) {
if (data.created) {
message = {
...data.message,
model: message?.model,
chatGptLabel: message?.chatGptLabel,
promptPrefix: message?.promptPrefix,
overrideParentMessageId: message?.overrideParentMessageId
};
createdHandler(data, { ...submission, message });
@ -245,7 +187,7 @@ export default function MessageHandler({ messages }) {
events.onopen = () => console.log('connection is opened');
events.oncancel = e => cancelHandler(latestResponseText, { ...submission, message });
events.oncancel = () => cancelHandler(latestResponseText, { ...submission, message });
events.onerror = function (e) {
console.log('error in opening conn.');

View file

@ -97,6 +97,8 @@ export default function Messages({ isSearchView = false }) {
if (model) _title += `: ${model}`;
} else if (endpoint === null) {
null;
} else {
null;
}
return _title;
}

View file

@ -79,7 +79,7 @@ const useConversation = () => {
if (endpoint === null)
// get the default model
conversation = getDefaultConversation({ conversation, availableEndpoints, prevConversation });
console.log(conversation);
setConversation(conversation);
setMessages(messages);
resetLatestMessage();

View file

@ -1,6 +1,7 @@
import conversation from './conversation';
import conversations from './conversations';
import models from './models';
import endpoints from './endpoints';
import user from './user';
import text from './text';
import submission from './submission';
@ -10,6 +11,7 @@ export default {
...conversation,
...conversations,
...models,
...endpoints,
...user,
text,
...submission,

View file

@ -1,55 +1,22 @@
export default function createPayload(submission) {
const { conversation, messages, message, initialResponse, isRegenerate = false } = submission;
const { conversation, message, endpointOption } = submission;
const { conversationId } = conversation;
const { endpoint } = endpointOption;
const endpoint = `/api/ask`;
const {
model,
chatGptLabel,
promptPrefix,
jailbreakConversationId,
conversationId,
conversationSignature,
clientId,
invocationId,
toneStyle
} = conversation;
const endpointUrlMap = {
azureOpenAI: '/api/ask/azureOpenAI',
openAI: '/api/ask/openAI',
bingAI: '/api/ask/bingAI',
chatGPTBrowser: '/api/ask/chatGPTBrowser'
};
const server = endpointUrlMap[endpoint];
let payload = {
...message,
...{
model,
chatGptLabel,
promptPrefix,
conversationId
}
...endpointOption,
conversationId
};
// if (!payload.conversationId)
// if (convo?.conversationId && convo?.parentMessageId) {
// payload = {
// ...payload,
// conversationId: convo.conversationId,
// parentMessageId: convo.parentMessageId || '00000000-0000-0000-0000-000000000000'
// };
// }
const isBing = model === 'bingai' || model === 'sydney';
if (isBing && !conversationId) {
payload.toneStyle = toneStyle || 'fast';
}
if (isBing && conversationId) {
payload = {
...payload,
jailbreakConversationId,
conversationSignature,
clientId,
invocationId
};
}
let server = endpoint;
server = model === 'bingai' ? server + '/bing' : server;
server = model === 'sydney' ? server + '/sydney' : server;
return { server, payload };
}

View file

@ -19,7 +19,7 @@ const buildDefaultConversation = ({ conversation, endpoint, lastConversationSetu
conversationSignature: lastConversationSetup?.conversationSignature || null,
clientId: lastConversationSetup?.clientId || null,
invocationId: lastConversationSetup?.invocationId || null,
toneStyle: lastConversationSetup?.toneStyle || null,
toneStyle: lastConversationSetup?.toneStyle || 'fast',
suggestions: lastConversationSetup?.suggestions || []
};
} else if (endpoint === 'chatGPTBrowser') {
@ -33,6 +33,12 @@ const buildDefaultConversation = ({ conversation, endpoint, lastConversationSetu
...conversation,
endpoint
};
} else {
console.error(`Unknown endpoint ${endpoint}`);
conversation = {
...conversation,
endpoint: null
};
}
return conversation;

View file

@ -27,10 +27,7 @@ const getIcon = props => {
else if (!isCreatedByUser) {
const { endpoint, error } = props;
let icon = <GPTIcon size={size * 0.7} />;
let bg = 'grey';
let name = 'UNKNOWN';
let icon, bg, name;
if (endpoint === 'azureOpenAI') {
const { chatGptLabel } = props;
@ -59,6 +56,10 @@ const getIcon = props => {
icon = <GPTIcon size={size * 0.7} />;
bg = `grey`;
name = 'N/A';
} else {
icon = <GPTIcon size={size * 0.7} />;
bg = `grey`;
name = 'UNKNOWN';
}
return (

View file

@ -1,29 +1,14 @@
// import resetConvo from './resetConvo';
// import { useSelector, useDispatch } from 'react-redux';
// import { setNewConvo } from '~/store/convoSlice';
// import { setMessages } from '~/store/messageSlice';
// import { setSubmitState, setSubmission } from '~/store/submitSlice';
// import { setText } from '~/store/textSlice';
// import { setError } from '~/store/convoSlice';
import { v4 } from 'uuid';
import { useRecoilState, useRecoilValue, useSetRecoilState } from 'recoil';
import store from '~/store';
const useMessageHandler = () => {
// const dispatch = useDispatch();
// const convo = useSelector((state) => state.convo);
// const { initial } = useSelector((state) => state.models);
// const { messages } = useSelector((state) => state.messages);
// const { model, chatGptLabel, promptPrefix, isSubmitting } = useSelector((state) => state.submit);
// const { latestMessage, error } = convo;
const [currentConversation, setCurrentConversation] = useRecoilState(store.conversation) || {};
const currentConversation = useRecoilValue(store.conversation) || {};
const setSubmission = useSetRecoilState(store.submission);
const isSubmitting = useRecoilValue(store.isSubmitting);
const latestMessage = useRecoilValue(store.latestMessage);
const { error } = currentConversation;
const [messages, setMessages] = useRecoilState(store.messages);
@ -36,16 +21,53 @@ const useMessageHandler = () => {
}
// determine the model to be used
const { model = null, chatGptLabel = null, promptPrefix = null } = currentConversation;
const { endpoint } = currentConversation;
let endpointOption = {};
let responseSender = '';
if (endpoint === 'azureOpenAI' || endpoint === 'openAI') {
endpointOption = {
endpoint,
model: currentConversation?.model || 'gpt-3.5-turbo',
chatGptLabel: currentConversation?.chatGptLabel || null,
promptPrefix: currentConversation?.promptPrefix || null,
temperature: currentConversation?.temperature || 0.8,
top_p: currentConversation?.top_p || 1,
presence_penalty: currentConversation?.presence_penalty || 1
};
responseSender = endpointOption.chatGptLabel || 'ChatGPT';
} else if (endpoint === 'bingAI') {
endpointOption = {
endpoint,
jailbreak: currentConversation?.jailbreak || false,
jailbreakConversationId: currentConversation?.jailbreakConversationId || null,
conversationSignature: currentConversation?.conversationSignature || null,
clientId: currentConversation?.clientId || null,
invocationId: currentConversation?.invocationId || null,
toneStyle: currentConversation?.toneStyle || 'fast',
suggestions: currentConversation?.suggestions || []
};
responseSender = endpointOption.jailbreak ? 'Sydney' : 'BingAI';
} else if (endpoint === 'chatGPTBrowser') {
endpointOption = {
endpoint,
model: currentConversation?.model || 'text-davinci-002-render-sha'
};
responseSender = 'ChatGPT';
} else if (endpoint === null) {
console.error('No endpoint available');
return;
} else {
console.error(`Unknown endpoint ${endpoint}`);
return;
}
let currentMessages = messages;
// construct the query message
// this is not a real messageId, it is used as placeholder before real messageId returned
text = text.trim();
const fakeMessageId = v4();
// const isCustomModel = model === 'chatgptCustom' || !initial[model];
// const sender = model === 'chatgptCustom' ? chatGptLabel : model;
parentMessageId = parentMessageId || latestMessage?.messageId || '00000000-0000-0000-0000-000000000000';
let currentMessages = messages;
conversationId = conversationId || currentConversation?.conversationId;
if (conversationId == 'search') {
console.error('cannot send any message under search view!');
@ -68,7 +90,7 @@ const useMessageHandler = () => {
// construct the placeholder response message
const initialResponse = {
sender: chatGptLabel || model,
sender: responseSender,
text: '<span className="result-streaming">█</span>',
parentMessageId: isRegenerate ? messageId : fakeMessageId,
messageId: (isRegenerate ? messageId : fakeMessageId) + '_',
@ -79,16 +101,11 @@ const useMessageHandler = () => {
const submission = {
conversation: {
...currentConversation,
conversationId,
model,
chatGptLabel,
promptPrefix
conversationId
},
endpointOption,
message: {
...currentMsg,
model,
chatGptLabel,
promptPrefix,
overrideParentMessageId: isRegenerate ? messageId : null
},
messages: currentMessages,