From 16932b37c04b9b40ac66f4d27bbfe94c88555ebc Mon Sep 17 00:00:00 2001 From: Daniel Avila Date: Tue, 21 Feb 2023 21:30:56 -0500 Subject: [PATCH] fix bingai bugs and add chatgptbrowser client --- app/bingai.js | 13 +------------ app/chatgpt-client.js | 33 +++++++++++++++++++++----------- app/chatgpt.js | 4 ++-- server/routes/ask.js | 24 ++++++++++------------- server/routes/convos.js | 2 +- src/components/main/TextChat.jsx | 4 ++-- src/utils/handleSubmit.js | 8 ++++---- 7 files changed, 42 insertions(+), 46 deletions(-) diff --git a/app/bingai.js b/app/bingai.js index 86e88f11fb..2ce2712099 100644 --- a/app/bingai.js +++ b/app/bingai.js @@ -1,4 +1,3 @@ -// import { BingAIClient } from '@waylaidwanderer/chatgpt-api'; require('dotenv').config(); const { KeyvFile } = require('keyv-file'); @@ -18,21 +17,11 @@ const askBing = async ({ text, progressCallback, convo }) => { onProgress: async (partialRes) => await progressCallback(partialRes), }; - if (!!convo) { + if (convo) { options = { ...options, ...convo }; } const res = await bingAIClient.sendMessage(text, options - // Options for reference - // { - // conversationSignature: response.conversationSignature, - // conversationId: response.conversationId, - // clientId: response.clientId, - // invocationId: response.invocationId, - // onProgress: (token) => { - // process.stdout.write(token); - // }, - // } ); return res; diff --git a/app/chatgpt-client.js b/app/chatgpt-client.js index 3bc629b1d8..4bc3cf4363 100644 --- a/app/chatgpt-client.js +++ b/app/chatgpt-client.js @@ -1,13 +1,11 @@ require('dotenv').config(); -const Keyv = require('keyv'); const { KeyvFile } = require('keyv-file'); const proxyOptions = { - reverseProxyUrl: 'https://chatgpt.pawan.krd/api/completions', - modelOptions: { - model: 'text-davinci-002-render' - }, - debug: false + // Warning: This will expose your access token to a third party. Consider the risks before using this. + reverseProxyUrl: 'https://chatgpt.duti.tech/api/conversation', + // Access token from https://chat.openai.com/api/auth/session + accessToken: process.env.CHATGPT_TOKEN }; const davinciOptions = { @@ -18,14 +16,27 @@ const davinciOptions = { }; const askClient = async ({ model, text, progressCallback, convo }) => { - // const clientOptions = model === 'chatgpt' ? proxyOptions : davinciOptions; - const ChatGPTClient = (await import('@waylaidwanderer/chatgpt-api')).default; - const client = new ChatGPTClient(process.env.OPENAI_KEY, davinciOptions, { + const davinciClient = (await import('@waylaidwanderer/chatgpt-api')).default; + const { ChatGPTBrowserClient } = await import('@waylaidwanderer/chatgpt-api'); + const clientOptions = model === 'chatgpt' ? proxyOptions : davinciOptions; + const modelClient = model === 'chatgpt' ? ChatGPTBrowserClient : davinciClient; + const store = { store: new KeyvFile({ filename: 'cache.json' }) - }); + }; + + const params = + model === 'chatgpt' + ? [clientOptions, store] + : [ + process.env.OPENAI_KEY, + clientOptions, + store + ]; + + const client = new modelClient(...params); + let options = { onProgress: async (partialRes) => await progressCallback(partialRes) - // onProgress: progressCallback }; if (!!convo.parentMessageId && !!convo.conversationId) { diff --git a/app/chatgpt.js b/app/chatgpt.js index bd27976288..18edcfca83 100644 --- a/app/chatgpt.js +++ b/app/chatgpt.js @@ -22,14 +22,14 @@ const ask = async (question, progressCallback, convo) => { return res; }; -const titleConvo = async (message, response) => { +const titleConvo = async (message, response, model) => { const configuration = new Configuration({ apiKey: process.env.OPENAI_KEY }); const openai = new OpenAIApi(configuration); const completion = await openai.createCompletion({ model: 'text-davinci-002', - prompt: `Write a short title in title case, ideally in 5 words or less, and do not refer to the user or GPT, that summarizes this conversation:\nUser:"${message}"\nGPT:"${response}"\nTitle: ` + prompt: `Write a short title in title case, ideally in 5 words or less, and do not refer to the user or ${model}, that summarizes this conversation:\nUser:"${message}"\n${model}:"${response}"\nTitle: ` }); return completion.data.choices[0].text.replace(/\n/g, ''); diff --git a/server/routes/ask.js b/server/routes/ask.js index 02133748c3..b19e632640 100644 --- a/server/routes/ask.js +++ b/server/routes/ask.js @@ -1,7 +1,7 @@ const express = require('express'); const crypto = require('crypto'); const router = express.Router(); -const { ask, titleConvo } = require('../../app/chatgpt'); +const { titleConvo } = require('../../app/chatgpt'); const { askClient } = require('../../app/chatgpt-client'); const { askBing } = require('../../app/bingai'); const { saveMessage, deleteMessages } = require('../../models/Message'); @@ -36,7 +36,6 @@ router.post('/bing', async (req, res) => { }); try { - let i = 0; let tokens = ''; const progressCallback = async (partial) => { tokens += partial; @@ -58,21 +57,15 @@ router.post('/bing', async (req, res) => { userMessage.invocationId = response.invocationId; await saveMessage(userMessage); - // if ( - // (response.text.includes('2023') && !response.text.trim().includes(' ')) || - // response.text.toLowerCase().includes('no response') || - // response.text.toLowerCase().includes('no answer') - // ) { - // return handleError(res, 'Prompt empty or too short'); - // } - if (!convo.conversationSignature) { - response.title = await titleConvo(text, response.response); + response.title = await titleConvo(text, response.response, model); } response.text = response.response; response.id = response.details.messageId; - response.suggestions = response.details.suggestedResponses && response.details.suggestedResponses.map((s) => s.text); + response.suggestions = + response.details.suggestedResponses && + response.details.suggestedResponses.map((s) => s.text); response.sender = model; response.final = true; await saveMessage(response); @@ -121,6 +114,9 @@ router.post('/', async (req, res) => { sendMessage(res, { ...partial, message: true }); } else { tokens += partial; + if (tokens.includes('[DONE]')) { + tokens = tokens.replace('[DONE]', ''); + } sendMessage(res, { text: tokens, message: true }); } }; @@ -158,7 +154,7 @@ router.post('/', async (req, res) => { } if (!parentMessageId) { - gptResponse.title = await titleConvo(text, gptResponse.text); + gptResponse.title = await titleConvo(text, gptResponse.text, model); } gptResponse.sender = model; gptResponse.final = true; @@ -173,4 +169,4 @@ router.post('/', async (req, res) => { } }); -module.exports = router; \ No newline at end of file +module.exports = router; diff --git a/server/routes/convos.js b/server/routes/convos.js index 6c1538be63..31749fa048 100644 --- a/server/routes/convos.js +++ b/server/routes/convos.js @@ -9,7 +9,7 @@ router.get('/', async (req, res) => { router.post('/clear', async (req, res) => { let filter = {}; const { conversationId } = req.body.arg; - if (!!conversationId) { + if (conversationId) { filter = { conversationId }; } diff --git a/src/components/main/TextChat.jsx b/src/components/main/TextChat.jsx index 53ed0e2f29..b2df78e6a5 100644 --- a/src/components/main/TextChat.jsx +++ b/src/components/main/TextChat.jsx @@ -38,7 +38,7 @@ export default function TextChat({ messages }) { }; const convoHandler = (data) => { console.log('in convo handler'); - if (model !== 'bingai' && convo.parentMessageId === null) { + if (model !== 'bingai' && convo.conversationId && convo.parentMessageId === null) { const { title, conversationId, id } = data; console.log('parentMessageId is null'); console.log('title, convoId, id', title, conversationId, id); @@ -52,7 +52,7 @@ export default function TextChat({ messages }) { invocationId: null }) ); - } else if (convo.invocationId === null) { + } else if (model === 'bingai' && convo.invocationId === null) { const { title, conversationSignature, clientId, conversationId, invocationId } = data; console.log('convoSig is null'); console.log( diff --git a/src/utils/handleSubmit.js b/src/utils/handleSubmit.js index a892c4c588..d5e9d110da 100644 --- a/src/utils/handleSubmit.js +++ b/src/utils/handleSubmit.js @@ -41,9 +41,9 @@ export default function handleSubmit({ events.onmessage = function (e) { const data = JSON.parse(e.data); const text = data.text || data.response; - if (!!data.message) { - messageHandler(text.replace(/^\n/, '')); - } else if (!!data.final) { + if (data.message) { + messageHandler(text); + } else if (data.final) { console.log(data); convoHandler(data); } else { @@ -58,4 +58,4 @@ export default function handleSubmit({ }; events.stream(); -} +} \ No newline at end of file