adds functionality for waylaidwanderer chatgpt client

This commit is contained in:
Daniel Avila 2023-02-12 22:24:36 -05:00
parent 5b8c4452cc
commit 500b6c9fa8
7 changed files with 2541 additions and 177 deletions

43
app/chatgpt-client.js Normal file
View file

@ -0,0 +1,43 @@
require('dotenv').config();
// const store = new Keyv(process.env.MONGODB_URI);
const Keyv = require('keyv');
const { KeyvFile } = require('keyv-file');
const clientOptions = {
// (Optional) Support for a reverse proxy for the completions endpoint (private API server).
// Warning: This will expose your `openaiApiKey` to a third-party. Consider the risks before using this.
reverseProxyUrl: 'https://chatgpt.pawan.krd/api/completions',
// (Optional) Parameters as described in https://platform.openai.com/docs/api-reference/completions
modelOptions: {
// You can override the model name and any other parameters here.
model: 'text-davinci-002-render'
},
// (Optional) Set custom instructions instead of "You are ChatGPT...".
// promptPrefix: 'You are Bob, a cowboy in Western times...',
// (Optional) Set a custom name for the user
// userLabel: 'User',
// (Optional) Set a custom name for ChatGPT
// chatGptLabel: 'ChatGPT',
// (Optional) Set to true to enable `console.debug()` logging
debug: false
};
const askClient = async (question, progressCallback, convo) => {
const ChatGPTClient = (await import('@waylaidwanderer/chatgpt-api')).default;
const client = new ChatGPTClient(process.env.CHATGPT_TOKEN, clientOptions, {
store: new KeyvFile({ filename: 'cache.json' })
});
let options = {
onProgress: async (partialRes) => await progressCallback(partialRes)
// onProgress: progressCallback
};
if (!!convo.parentMessageId && !!convo.conversationId) {
options = { ...options, ...convo };
}
const res = await client.sendMessage(question, options);
return res;
};
module.exports = { askClient };

1
cache.json Normal file

File diff suppressed because one or more lines are too long

5
nodemon.json Normal file
View file

@ -0,0 +1,5 @@
{
"ignore": [
"cache.json"
]
}

2623
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -23,11 +23,13 @@
"dependencies": {
"@keyv/mongo": "^2.1.8",
"@reduxjs/toolkit": "^1.9.2",
"@waylaidwanderer/chatgpt-api": "^1.15.1",
"chatgpt": "^4.2.0",
"cors": "^2.8.5",
"crypto-browserify": "^3.12.0",
"dotenv": "^16.0.3",
"keyv": "^4.5.2",
"keyv-file": "^0.2.0",
"mongoose": "^6.9.0",
"openai": "^3.1.0",
"react": "^18.2.0",

View file

@ -2,6 +2,7 @@ const express = require('express');
const crypto = require('crypto');
const router = express.Router();
const { ask, titleConvo } = require('../../app/chatgpt');
const { askClient } = require('../../app/chatgpt-client');
const { saveMessage, deleteMessages } = require('../../models/Message');
const { saveConvo } = require('../../models/Conversation');
@ -28,8 +29,9 @@ router.post('/', async (req, res) => {
try {
let i = 0;
let tokens = '';
const progressCallback = async (partial) => {
if (i === 0) {
if (i === 0 && typeof partial === 'object') {
userMessage.parentMessageId = parentMessageId ? parentMessageId : partial.id;
userMessage.conversationId = conversationId ? conversationId : partial.conversationId;
await saveMessage(userMessage);
@ -38,21 +40,45 @@ router.post('/', async (req, res) => {
);
i++;
}
if (typeof partial === 'object') {
const data = JSON.stringify({ ...partial, message: true });
res.write(`event: message\ndata: ${data}\n\n`);
} else {
tokens += partial;
res.write(`event: message\ndata: ${JSON.stringify({ text: tokens, message: true })}\n\n`);
}
};
let gptResponse = await ask(text, progressCallback, { parentMessageId, conversationId });
let gptResponse = await askClient(text, progressCallback, { parentMessageId, conversationId });
console.log('CLIENT RESPONSE', gptResponse);
if (!!parentMessageId) {
gptResponse = { ...gptResponse, parentMessageId };
} else {
gptResponse.title = await titleConvo(text, gptResponse.text);
}
if (!gptResponse.parentMessageId && !parentMessageId) {
userMessage.parentMessageId = gptResponse.messageId;
gptResponse.parentMessageId = gptResponse.messageId;
userMessage.conversationId = gptResponse.conversationId;
}
const response = gptResponse.text || gptResponse.response;
if (gptResponse.response) {
await saveMessage(userMessage);
gptResponse.text = gptResponse.response;
gptResponse.id = gptResponse.messageId;
delete gptResponse.response;
}
if (
(gptResponse.text.includes('2023') && !gptResponse.text.trim().includes(' ')) ||
gptResponse.text.toLowerCase().includes('no response') ||
gptResponse.text.toLowerCase().includes('no answer')
(response.includes('2023') && !response.trim().includes(' ')) ||
response.toLowerCase().includes('no response') ||
response.toLowerCase().includes('no answer')
) {
res.status(500).write('event: error\ndata: Prompt empty or too short');
res.end();
@ -60,6 +86,9 @@ router.post('/', async (req, res) => {
}
gptResponse.sender = 'GPT';
console.log('gptResponse', gptResponse);
await saveMessage(gptResponse);
await saveConvo(gptResponse);

View file

@ -28,6 +28,7 @@ export default function handleSubmit({
events.onmessage = function (e) {
const data = JSON.parse(e.data);
if (!!data.message) {
console.log('messageHandler data object', data);
messageHandler(data.text.replace(/^\n/, ''));
} else if (!data.initial) {
console.log(data);