mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 17:00:15 +01:00
fix bingai bugs and add chatgptbrowser client
This commit is contained in:
parent
168d5e8075
commit
16932b37c0
7 changed files with 42 additions and 46 deletions
|
|
@ -1,4 +1,3 @@
|
||||||
// import { BingAIClient } from '@waylaidwanderer/chatgpt-api';
|
|
||||||
require('dotenv').config();
|
require('dotenv').config();
|
||||||
const { KeyvFile } = require('keyv-file');
|
const { KeyvFile } = require('keyv-file');
|
||||||
|
|
||||||
|
|
@ -18,21 +17,11 @@ const askBing = async ({ text, progressCallback, convo }) => {
|
||||||
onProgress: async (partialRes) => await progressCallback(partialRes),
|
onProgress: async (partialRes) => await progressCallback(partialRes),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!!convo) {
|
if (convo) {
|
||||||
options = { ...options, ...convo };
|
options = { ...options, ...convo };
|
||||||
}
|
}
|
||||||
|
|
||||||
const res = await bingAIClient.sendMessage(text, options
|
const res = await bingAIClient.sendMessage(text, options
|
||||||
// Options for reference
|
|
||||||
// {
|
|
||||||
// conversationSignature: response.conversationSignature,
|
|
||||||
// conversationId: response.conversationId,
|
|
||||||
// clientId: response.clientId,
|
|
||||||
// invocationId: response.invocationId,
|
|
||||||
// onProgress: (token) => {
|
|
||||||
// process.stdout.write(token);
|
|
||||||
// },
|
|
||||||
// }
|
|
||||||
);
|
);
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,11 @@
|
||||||
require('dotenv').config();
|
require('dotenv').config();
|
||||||
const Keyv = require('keyv');
|
|
||||||
const { KeyvFile } = require('keyv-file');
|
const { KeyvFile } = require('keyv-file');
|
||||||
|
|
||||||
const proxyOptions = {
|
const proxyOptions = {
|
||||||
reverseProxyUrl: 'https://chatgpt.pawan.krd/api/completions',
|
// Warning: This will expose your access token to a third party. Consider the risks before using this.
|
||||||
modelOptions: {
|
reverseProxyUrl: 'https://chatgpt.duti.tech/api/conversation',
|
||||||
model: 'text-davinci-002-render'
|
// Access token from https://chat.openai.com/api/auth/session
|
||||||
},
|
accessToken: process.env.CHATGPT_TOKEN
|
||||||
debug: false
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const davinciOptions = {
|
const davinciOptions = {
|
||||||
|
|
@ -18,14 +16,27 @@ const davinciOptions = {
|
||||||
};
|
};
|
||||||
|
|
||||||
const askClient = async ({ model, text, progressCallback, convo }) => {
|
const askClient = async ({ model, text, progressCallback, convo }) => {
|
||||||
// const clientOptions = model === 'chatgpt' ? proxyOptions : davinciOptions;
|
const davinciClient = (await import('@waylaidwanderer/chatgpt-api')).default;
|
||||||
const ChatGPTClient = (await import('@waylaidwanderer/chatgpt-api')).default;
|
const { ChatGPTBrowserClient } = await import('@waylaidwanderer/chatgpt-api');
|
||||||
const client = new ChatGPTClient(process.env.OPENAI_KEY, davinciOptions, {
|
const clientOptions = model === 'chatgpt' ? proxyOptions : davinciOptions;
|
||||||
|
const modelClient = model === 'chatgpt' ? ChatGPTBrowserClient : davinciClient;
|
||||||
|
const store = {
|
||||||
store: new KeyvFile({ filename: 'cache.json' })
|
store: new KeyvFile({ filename: 'cache.json' })
|
||||||
});
|
};
|
||||||
|
|
||||||
|
const params =
|
||||||
|
model === 'chatgpt'
|
||||||
|
? [clientOptions, store]
|
||||||
|
: [
|
||||||
|
process.env.OPENAI_KEY,
|
||||||
|
clientOptions,
|
||||||
|
store
|
||||||
|
];
|
||||||
|
|
||||||
|
const client = new modelClient(...params);
|
||||||
|
|
||||||
let options = {
|
let options = {
|
||||||
onProgress: async (partialRes) => await progressCallback(partialRes)
|
onProgress: async (partialRes) => await progressCallback(partialRes)
|
||||||
// onProgress: progressCallback
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!!convo.parentMessageId && !!convo.conversationId) {
|
if (!!convo.parentMessageId && !!convo.conversationId) {
|
||||||
|
|
|
||||||
|
|
@ -22,14 +22,14 @@ const ask = async (question, progressCallback, convo) => {
|
||||||
return res;
|
return res;
|
||||||
};
|
};
|
||||||
|
|
||||||
const titleConvo = async (message, response) => {
|
const titleConvo = async (message, response, model) => {
|
||||||
const configuration = new Configuration({
|
const configuration = new Configuration({
|
||||||
apiKey: process.env.OPENAI_KEY
|
apiKey: process.env.OPENAI_KEY
|
||||||
});
|
});
|
||||||
const openai = new OpenAIApi(configuration);
|
const openai = new OpenAIApi(configuration);
|
||||||
const completion = await openai.createCompletion({
|
const completion = await openai.createCompletion({
|
||||||
model: 'text-davinci-002',
|
model: 'text-davinci-002',
|
||||||
prompt: `Write a short title in title case, ideally in 5 words or less, and do not refer to the user or GPT, that summarizes this conversation:\nUser:"${message}"\nGPT:"${response}"\nTitle: `
|
prompt: `Write a short title in title case, ideally in 5 words or less, and do not refer to the user or ${model}, that summarizes this conversation:\nUser:"${message}"\n${model}:"${response}"\nTitle: `
|
||||||
});
|
});
|
||||||
|
|
||||||
return completion.data.choices[0].text.replace(/\n/g, '');
|
return completion.data.choices[0].text.replace(/\n/g, '');
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
const { ask, titleConvo } = require('../../app/chatgpt');
|
const { titleConvo } = require('../../app/chatgpt');
|
||||||
const { askClient } = require('../../app/chatgpt-client');
|
const { askClient } = require('../../app/chatgpt-client');
|
||||||
const { askBing } = require('../../app/bingai');
|
const { askBing } = require('../../app/bingai');
|
||||||
const { saveMessage, deleteMessages } = require('../../models/Message');
|
const { saveMessage, deleteMessages } = require('../../models/Message');
|
||||||
|
|
@ -36,7 +36,6 @@ router.post('/bing', async (req, res) => {
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let i = 0;
|
|
||||||
let tokens = '';
|
let tokens = '';
|
||||||
const progressCallback = async (partial) => {
|
const progressCallback = async (partial) => {
|
||||||
tokens += partial;
|
tokens += partial;
|
||||||
|
|
@ -58,21 +57,15 @@ router.post('/bing', async (req, res) => {
|
||||||
userMessage.invocationId = response.invocationId;
|
userMessage.invocationId = response.invocationId;
|
||||||
await saveMessage(userMessage);
|
await saveMessage(userMessage);
|
||||||
|
|
||||||
// if (
|
|
||||||
// (response.text.includes('2023') && !response.text.trim().includes(' ')) ||
|
|
||||||
// response.text.toLowerCase().includes('no response') ||
|
|
||||||
// response.text.toLowerCase().includes('no answer')
|
|
||||||
// ) {
|
|
||||||
// return handleError(res, 'Prompt empty or too short');
|
|
||||||
// }
|
|
||||||
|
|
||||||
if (!convo.conversationSignature) {
|
if (!convo.conversationSignature) {
|
||||||
response.title = await titleConvo(text, response.response);
|
response.title = await titleConvo(text, response.response, model);
|
||||||
}
|
}
|
||||||
|
|
||||||
response.text = response.response;
|
response.text = response.response;
|
||||||
response.id = response.details.messageId;
|
response.id = response.details.messageId;
|
||||||
response.suggestions = response.details.suggestedResponses && response.details.suggestedResponses.map((s) => s.text);
|
response.suggestions =
|
||||||
|
response.details.suggestedResponses &&
|
||||||
|
response.details.suggestedResponses.map((s) => s.text);
|
||||||
response.sender = model;
|
response.sender = model;
|
||||||
response.final = true;
|
response.final = true;
|
||||||
await saveMessage(response);
|
await saveMessage(response);
|
||||||
|
|
@ -121,6 +114,9 @@ router.post('/', async (req, res) => {
|
||||||
sendMessage(res, { ...partial, message: true });
|
sendMessage(res, { ...partial, message: true });
|
||||||
} else {
|
} else {
|
||||||
tokens += partial;
|
tokens += partial;
|
||||||
|
if (tokens.includes('[DONE]')) {
|
||||||
|
tokens = tokens.replace('[DONE]', '');
|
||||||
|
}
|
||||||
sendMessage(res, { text: tokens, message: true });
|
sendMessage(res, { text: tokens, message: true });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
@ -158,7 +154,7 @@ router.post('/', async (req, res) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!parentMessageId) {
|
if (!parentMessageId) {
|
||||||
gptResponse.title = await titleConvo(text, gptResponse.text);
|
gptResponse.title = await titleConvo(text, gptResponse.text, model);
|
||||||
}
|
}
|
||||||
gptResponse.sender = model;
|
gptResponse.sender = model;
|
||||||
gptResponse.final = true;
|
gptResponse.final = true;
|
||||||
|
|
@ -173,4 +169,4 @@ router.post('/', async (req, res) => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = router;
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ router.get('/', async (req, res) => {
|
||||||
router.post('/clear', async (req, res) => {
|
router.post('/clear', async (req, res) => {
|
||||||
let filter = {};
|
let filter = {};
|
||||||
const { conversationId } = req.body.arg;
|
const { conversationId } = req.body.arg;
|
||||||
if (!!conversationId) {
|
if (conversationId) {
|
||||||
filter = { conversationId };
|
filter = { conversationId };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -38,7 +38,7 @@ export default function TextChat({ messages }) {
|
||||||
};
|
};
|
||||||
const convoHandler = (data) => {
|
const convoHandler = (data) => {
|
||||||
console.log('in convo handler');
|
console.log('in convo handler');
|
||||||
if (model !== 'bingai' && convo.parentMessageId === null) {
|
if (model !== 'bingai' && convo.conversationId && convo.parentMessageId === null) {
|
||||||
const { title, conversationId, id } = data;
|
const { title, conversationId, id } = data;
|
||||||
console.log('parentMessageId is null');
|
console.log('parentMessageId is null');
|
||||||
console.log('title, convoId, id', title, conversationId, id);
|
console.log('title, convoId, id', title, conversationId, id);
|
||||||
|
|
@ -52,7 +52,7 @@ export default function TextChat({ messages }) {
|
||||||
invocationId: null
|
invocationId: null
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
} else if (convo.invocationId === null) {
|
} else if (model === 'bingai' && convo.invocationId === null) {
|
||||||
const { title, conversationSignature, clientId, conversationId, invocationId } = data;
|
const { title, conversationSignature, clientId, conversationId, invocationId } = data;
|
||||||
console.log('convoSig is null');
|
console.log('convoSig is null');
|
||||||
console.log(
|
console.log(
|
||||||
|
|
|
||||||
|
|
@ -41,9 +41,9 @@ export default function handleSubmit({
|
||||||
events.onmessage = function (e) {
|
events.onmessage = function (e) {
|
||||||
const data = JSON.parse(e.data);
|
const data = JSON.parse(e.data);
|
||||||
const text = data.text || data.response;
|
const text = data.text || data.response;
|
||||||
if (!!data.message) {
|
if (data.message) {
|
||||||
messageHandler(text.replace(/^\n/, ''));
|
messageHandler(text);
|
||||||
} else if (!!data.final) {
|
} else if (data.final) {
|
||||||
console.log(data);
|
console.log(data);
|
||||||
convoHandler(data);
|
convoHandler(data);
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -58,4 +58,4 @@ export default function handleSubmit({
|
||||||
};
|
};
|
||||||
|
|
||||||
events.stream();
|
events.stream();
|
||||||
}
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue