fix bingai bugs and add chatgptbrowser client

This commit is contained in:
Daniel Avila 2023-02-21 21:30:56 -05:00
parent 168d5e8075
commit 16932b37c0
7 changed files with 42 additions and 46 deletions

View file

@ -1,4 +1,3 @@
// import { BingAIClient } from '@waylaidwanderer/chatgpt-api';
require('dotenv').config();
const { KeyvFile } = require('keyv-file');
@ -18,21 +17,11 @@ const askBing = async ({ text, progressCallback, convo }) => {
onProgress: async (partialRes) => await progressCallback(partialRes),
};
if (!!convo) {
if (convo) {
options = { ...options, ...convo };
}
const res = await bingAIClient.sendMessage(text, options
// Options for reference
// {
// conversationSignature: response.conversationSignature,
// conversationId: response.conversationId,
// clientId: response.clientId,
// invocationId: response.invocationId,
// onProgress: (token) => {
// process.stdout.write(token);
// },
// }
);
return res;

View file

@ -1,13 +1,11 @@
require('dotenv').config();
const Keyv = require('keyv');
const { KeyvFile } = require('keyv-file');
const proxyOptions = {
reverseProxyUrl: 'https://chatgpt.pawan.krd/api/completions',
modelOptions: {
model: 'text-davinci-002-render'
},
debug: false
// Warning: This will expose your access token to a third party. Consider the risks before using this.
reverseProxyUrl: 'https://chatgpt.duti.tech/api/conversation',
// Access token from https://chat.openai.com/api/auth/session
accessToken: process.env.CHATGPT_TOKEN
};
const davinciOptions = {
@ -18,14 +16,27 @@ const davinciOptions = {
};
const askClient = async ({ model, text, progressCallback, convo }) => {
// const clientOptions = model === 'chatgpt' ? proxyOptions : davinciOptions;
const ChatGPTClient = (await import('@waylaidwanderer/chatgpt-api')).default;
const client = new ChatGPTClient(process.env.OPENAI_KEY, davinciOptions, {
const davinciClient = (await import('@waylaidwanderer/chatgpt-api')).default;
const { ChatGPTBrowserClient } = await import('@waylaidwanderer/chatgpt-api');
const clientOptions = model === 'chatgpt' ? proxyOptions : davinciOptions;
const modelClient = model === 'chatgpt' ? ChatGPTBrowserClient : davinciClient;
const store = {
store: new KeyvFile({ filename: 'cache.json' })
});
};
const params =
model === 'chatgpt'
? [clientOptions, store]
: [
process.env.OPENAI_KEY,
clientOptions,
store
];
const client = new modelClient(...params);
let options = {
onProgress: async (partialRes) => await progressCallback(partialRes)
// onProgress: progressCallback
};
if (!!convo.parentMessageId && !!convo.conversationId) {

View file

@ -22,14 +22,14 @@ const ask = async (question, progressCallback, convo) => {
return res;
};
const titleConvo = async (message, response) => {
const titleConvo = async (message, response, model) => {
const configuration = new Configuration({
apiKey: process.env.OPENAI_KEY
});
const openai = new OpenAIApi(configuration);
const completion = await openai.createCompletion({
model: 'text-davinci-002',
prompt: `Write a short title in title case, ideally in 5 words or less, and do not refer to the user or GPT, that summarizes this conversation:\nUser:"${message}"\nGPT:"${response}"\nTitle: `
prompt: `Write a short title in title case, ideally in 5 words or less, and do not refer to the user or ${model}, that summarizes this conversation:\nUser:"${message}"\n${model}:"${response}"\nTitle: `
});
return completion.data.choices[0].text.replace(/\n/g, '');

View file

@ -1,7 +1,7 @@
const express = require('express');
const crypto = require('crypto');
const router = express.Router();
const { ask, titleConvo } = require('../../app/chatgpt');
const { titleConvo } = require('../../app/chatgpt');
const { askClient } = require('../../app/chatgpt-client');
const { askBing } = require('../../app/bingai');
const { saveMessage, deleteMessages } = require('../../models/Message');
@ -36,7 +36,6 @@ router.post('/bing', async (req, res) => {
});
try {
let i = 0;
let tokens = '';
const progressCallback = async (partial) => {
tokens += partial;
@ -58,21 +57,15 @@ router.post('/bing', async (req, res) => {
userMessage.invocationId = response.invocationId;
await saveMessage(userMessage);
// if (
// (response.text.includes('2023') && !response.text.trim().includes(' ')) ||
// response.text.toLowerCase().includes('no response') ||
// response.text.toLowerCase().includes('no answer')
// ) {
// return handleError(res, 'Prompt empty or too short');
// }
if (!convo.conversationSignature) {
response.title = await titleConvo(text, response.response);
response.title = await titleConvo(text, response.response, model);
}
response.text = response.response;
response.id = response.details.messageId;
response.suggestions = response.details.suggestedResponses && response.details.suggestedResponses.map((s) => s.text);
response.suggestions =
response.details.suggestedResponses &&
response.details.suggestedResponses.map((s) => s.text);
response.sender = model;
response.final = true;
await saveMessage(response);
@ -121,6 +114,9 @@ router.post('/', async (req, res) => {
sendMessage(res, { ...partial, message: true });
} else {
tokens += partial;
if (tokens.includes('[DONE]')) {
tokens = tokens.replace('[DONE]', '');
}
sendMessage(res, { text: tokens, message: true });
}
};
@ -158,7 +154,7 @@ router.post('/', async (req, res) => {
}
if (!parentMessageId) {
gptResponse.title = await titleConvo(text, gptResponse.text);
gptResponse.title = await titleConvo(text, gptResponse.text, model);
}
gptResponse.sender = model;
gptResponse.final = true;
@ -173,4 +169,4 @@ router.post('/', async (req, res) => {
}
});
module.exports = router;
module.exports = router;

View file

@ -9,7 +9,7 @@ router.get('/', async (req, res) => {
router.post('/clear', async (req, res) => {
let filter = {};
const { conversationId } = req.body.arg;
if (!!conversationId) {
if (conversationId) {
filter = { conversationId };
}

View file

@ -38,7 +38,7 @@ export default function TextChat({ messages }) {
};
const convoHandler = (data) => {
console.log('in convo handler');
if (model !== 'bingai' && convo.parentMessageId === null) {
if (model !== 'bingai' && convo.conversationId && convo.parentMessageId === null) {
const { title, conversationId, id } = data;
console.log('parentMessageId is null');
console.log('title, convoId, id', title, conversationId, id);
@ -52,7 +52,7 @@ export default function TextChat({ messages }) {
invocationId: null
})
);
} else if (convo.invocationId === null) {
} else if (model === 'bingai' && convo.invocationId === null) {
const { title, conversationSignature, clientId, conversationId, invocationId } = data;
console.log('convoSig is null');
console.log(

View file

@ -41,9 +41,9 @@ export default function handleSubmit({
events.onmessage = function (e) {
const data = JSON.parse(e.data);
const text = data.text || data.response;
if (!!data.message) {
messageHandler(text.replace(/^\n/, ''));
} else if (!!data.final) {
if (data.message) {
messageHandler(text);
} else if (data.final) {
console.log(data);
convoHandler(data);
} else {
@ -58,4 +58,4 @@ export default function handleSubmit({
};
events.stream();
}
}