chatgpt now titles conversations

This commit is contained in:
Danny Avila 2023-03-02 13:52:41 -05:00
parent 897c384ac9
commit c4d0787b49
4 changed files with 31 additions and 8 deletions

View file

@ -1,4 +1,4 @@
const { titleConvo } = require('./chatgpt'); const titleConvo = require('./titleConvo');
const { askClient } = require('./chatgpt-client'); const { askClient } = require('./chatgpt-client');
const { browserClient } = require('./chatgpt-browser'); const { browserClient } = require('./chatgpt-browser');
const { askBing } = require('./bingai'); const { askBing } = require('./bingai');

23
app/titleConvo.js Normal file
View file

@ -0,0 +1,23 @@
const { Configuration, OpenAIApi } = require('openai');
const titleConvo = async ({ message, response, model }) => {
const configuration = new Configuration({
apiKey: process.env.OPENAI_KEY
});
const openai = new OpenAIApi(configuration);
const completion = await openai.createChatCompletion({
model: 'gpt-3.5-turbo',
messages: [
{
role: 'system',
content:
'You are a helpful title-generator with one job: titling in title case the conversation provided by a user. You do not reply with anything but a succinct title that summarizes the conversation in title case, ideally around 5 words or less. You do not refer to the participants of the conversation by name. Do not include punctuation or quotation marks.'
},
{ role: 'user', content: `Please title this conversation: User:"${message}" ${model}:"${response}"` },
]
});
return completion.data.choices[0].message.content;
};
module.exports = titleConvo;

12
package-lock.json generated
View file

@ -10688,9 +10688,9 @@
} }
}, },
"node_modules/openai": { "node_modules/openai": {
"version": "3.1.0", "version": "3.2.1",
"resolved": "https://registry.npmjs.org/openai/-/openai-3.1.0.tgz", "resolved": "https://registry.npmjs.org/openai/-/openai-3.2.1.tgz",
"integrity": "sha512-v5kKFH5o+8ld+t0arudj833Mgm3GcgBnbyN9946bj6u7bvel4Yg6YFz2A4HLIYDzmMjIo0s6vSG9x73kOwvdCg==", "integrity": "sha512-762C9BNlJPbjjlWZi4WYK9iM2tAVAv0uUp1UmI34vb0CN5T2mjB/qM6RYBmNKMh/dN9fC+bxqPwWJZUTWW052A==",
"dependencies": { "dependencies": {
"axios": "^0.26.0", "axios": "^0.26.0",
"form-data": "^4.0.0" "form-data": "^4.0.0"
@ -23008,9 +23008,9 @@
} }
}, },
"openai": { "openai": {
"version": "3.1.0", "version": "3.2.1",
"resolved": "https://registry.npmjs.org/openai/-/openai-3.1.0.tgz", "resolved": "https://registry.npmjs.org/openai/-/openai-3.2.1.tgz",
"integrity": "sha512-v5kKFH5o+8ld+t0arudj833Mgm3GcgBnbyN9946bj6u7bvel4Yg6YFz2A4HLIYDzmMjIo0s6vSG9x73kOwvdCg==", "integrity": "sha512-762C9BNlJPbjjlWZi4WYK9iM2tAVAv0uUp1UmI34vb0CN5T2mjB/qM6RYBmNKMh/dN9fC+bxqPwWJZUTWW052A==",
"requires": { "requires": {
"axios": "^0.26.0", "axios": "^0.26.0",
"form-data": "^4.0.0" "form-data": "^4.0.0"

View file

@ -87,7 +87,7 @@ router.post('/', async (req, res) => {
} }
if (!parentMessageId) { if (!parentMessageId) {
gptResponse.title = await titleConvo(text, JSON.stringify(gptResponse.text), model); gptResponse.title = await titleConvo({ model, message: text, response: JSON.stringify(gptResponse.text) });
} }
gptResponse.sender = model; gptResponse.sender = model;
gptResponse.final = true; gptResponse.final = true;