diff --git a/api/.env.example b/api/.env.example index 4fa5a432a1..83da008446 100644 --- a/api/.env.example +++ b/api/.env.example @@ -25,8 +25,9 @@ MONGO_URI="mongodb://127.0.0.1:27017/chatgpt-clone" OPENAI_KEY= # Identify the available models, sperate by comma, and not space in it +# The first will be default # Leave it blank to use internal settings. -# OPENAI_MODELS=gpt-4,text-davinci-003,gpt-3.5-turbo,gpt-3.5-turbo-0301 +OPENAI_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-0301,text-davinci-003,gpt-4 # Reverse proxy setting for OpenAI # https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy @@ -39,6 +40,8 @@ OPENAI_KEY= # BingAI Tokens: the "_U" cookies value from bing.com # Leave it and BINGAI_USER_TOKEN blank to disable this endpoint. +# Set to "user_providered" to allow user provided token. +# BINGAI_TOKEN="user_providered" BINGAI_TOKEN= # BingAI Host: @@ -46,12 +49,6 @@ BINGAI_TOKEN= # Leave it blank to use default server. # BINGAI_HOST="https://cn.bing.com" -# BingAI User defined Token -# Allow user to set their own token by client -# Uncomment this to enable this feature. -# (Not implemented yet.) -# BINGAI_USER_TOKEN=1 - ############################# # Endpoint chatGPT: @@ -61,11 +58,14 @@ BINGAI_TOKEN= # Access token from https://chat.openai.com/api/auth/session # Exposes your access token to CHATGPT_REVERSE_PROXY # Leave it blank to disable this endpoint +# Set to "user_provide" to allow user provided token. +# CHATGPT_TOKEN="user_provide" CHATGPT_TOKEN= # Identify the available models, sperate by comma, and not space in it +# The first will be default # Leave it blank to use internal settings. -# CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4 +CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4 # Reverse proxy setting for OpenAI # https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy diff --git a/api/.prettierrc b/api/.prettierrc index 34e12e2f49..1c37ff0e5b 100644 --- a/api/.prettierrc +++ b/api/.prettierrc @@ -1,5 +1,5 @@ { - "arrowParens": "avoid", + "arrowParens": "always", "bracketSpacing": true, "endOfLine": "lf", "htmlWhitespaceSensitivity": "css", diff --git a/api/app/clients/bingai.js b/api/app/clients/bingai.js index ca790ec43d..700c7b72a9 100644 --- a/api/app/clients/bingai.js +++ b/api/app/clients/bingai.js @@ -13,6 +13,7 @@ const askBing = async ({ clientId, invocationId, toneStyle, + token, onProgress }) => { const { BingAIClient } = await import('@waylaidwanderer/chatgpt-api'); @@ -22,7 +23,7 @@ const askBing = async ({ const bingAIClient = new BingAIClient({ // "_U" cookie from bing.com - userToken: process.env.BINGAI_TOKEN, + userToken: process.env.BINGAI_TOKEN == 'user_provide' ? token : process.env.BINGAI_TOKEN ?? null, // If the above doesn't work, provide all your cookies as a string instead // cookies: '', debug: false, diff --git a/api/app/clients/chatgpt-browser.js b/api/app/clients/chatgpt-browser.js index 4eb50a1476..0844e5c173 100644 --- a/api/app/clients/chatgpt-browser.js +++ b/api/app/clients/chatgpt-browser.js @@ -6,6 +6,7 @@ const browserClient = async ({ parentMessageId, conversationId, model, + token, onProgress, abortController }) => { @@ -18,7 +19,7 @@ const browserClient = async ({ // Warning: This will expose your access token to a third party. Consider the risks before using this. reverseProxyUrl: process.env.CHATGPT_REVERSE_PROXY || 'https://bypass.churchless.tech/api/conversation', // Access token from https://chat.openai.com/api/auth/session - accessToken: process.env.CHATGPT_TOKEN, + accessToken: process.env.CHATGPT_TOKEN == 'user_provide' ? token : process.env.CHATGPT_TOKEN ?? null, model: model, // debug: true proxy: process.env.PROXY || null diff --git a/api/models/Conversation.js b/api/models/Conversation.js index 6e7e1ceaab..580a07f6e1 100644 --- a/api/models/Conversation.js +++ b/api/models/Conversation.js @@ -13,45 +13,21 @@ const getConvo = async (user, conversationId) => { module.exports = { Conversation, - saveConvo: async (user, { conversationId, newConversationId, title, ...convo }) => { + saveConvo: async (user, { conversationId, newConversationId, ...convo }) => { try { const messages = await getMessages({ conversationId }); - const update = { ...convo, messages }; - if (title) { - update.title = title; - update.user = user; - } + const update = { ...convo, messages, user }; if (newConversationId) { update.conversationId = newConversationId; } - if (!update.jailbreakConversationId) { - update.jailbreakConversationId = null; - } - return await Conversation.findOneAndUpdate( - { conversationId: conversationId, user }, - { $set: update }, - { new: true, upsert: true } - ).exec(); - } catch (error) { - console.log(error); - return { message: 'Error saving conversation' }; - } - }, - updateConvo: async (user, { conversationId, oldConvoId, ...update }) => { - try { - let convoId = conversationId; - if (oldConvoId) { - convoId = oldConvoId; - update.conversationId = conversationId; - } - - return await Conversation.findOneAndUpdate({ conversationId: convoId, user }, update, { - new: true + return await Conversation.findOneAndUpdate({ conversationId: conversationId, user }, update, { + new: true, + upsert: true }).exec(); } catch (error) { console.log(error); - return { message: 'Error updating conversation' }; + return { message: 'Error saving conversation' }; } }, getConvosByPage: async (user, pageNumber = 1, pageSize = 12) => { @@ -82,7 +58,7 @@ module.exports = { // will handle a syncing solution soon const deletedConvoIds = []; - convoIds.forEach(convo => + convoIds.forEach((convo) => promises.push( Conversation.findOne({ user, @@ -145,7 +121,7 @@ module.exports = { }, deleteConvos: async (user, filter) => { let toRemove = await Conversation.find({ ...filter, user }).select('conversationId'); - const ids = toRemove.map(instance => instance.conversationId); + const ids = toRemove.map((instance) => instance.conversationId); let deleteCount = await Conversation.deleteMany({ ...filter, user }).exec(); deleteCount.messages = await deleteMessages({ conversationId: { $in: ids } }); return deleteCount; diff --git a/api/models/Message.js b/api/models/Message.js index c0b280a85b..89dde7b128 100644 --- a/api/models/Message.js +++ b/api/models/Message.js @@ -9,9 +9,12 @@ module.exports = { sender, text, isCreatedByUser = false, - error + error, + unfinished, + cancelled }) => { try { + // may also need to update the conversation here await Message.findOneAndUpdate( { messageId }, { @@ -21,7 +24,9 @@ module.exports = { sender, text, isCreatedByUser, - error + error, + unfinished, + cancelled }, { upsert: true, new: true } ); @@ -44,7 +49,7 @@ module.exports = { return { message: 'Error deleting messages' }; } }, - getMessages: async filter => { + getMessages: async (filter) => { try { return await Message.find(filter).sort({ createdAt: 1 }).exec(); } catch (error) { @@ -52,7 +57,7 @@ module.exports = { return { message: 'Error getting messages' }; } }, - deleteMessages: async filter => { + deleteMessages: async (filter) => { try { return await Message.deleteMany(filter).exec(); } catch (error) { diff --git a/api/models/index.js b/api/models/index.js index 8ae2eba854..bda6239d7b 100644 --- a/api/models/index.js +++ b/api/models/index.js @@ -1,5 +1,5 @@ const { getMessages, saveMessage, deleteMessagesSince, deleteMessages } = require('./Message'); -const { getConvoTitle, getConvo, saveConvo, updateConvo } = require('./Conversation'); +const { getConvoTitle, getConvo, saveConvo } = require('./Conversation'); const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset'); module.exports = { @@ -11,7 +11,6 @@ module.exports = { getConvoTitle, getConvo, saveConvo, - updateConvo, getPreset, getPresets, diff --git a/api/models/schema/messageSchema.js b/api/models/schema/messageSchema.js index 9d21dd5b92..a8de751278 100644 --- a/api/models/schema/messageSchema.js +++ b/api/models/schema/messageSchema.js @@ -43,6 +43,14 @@ const messageSchema = mongoose.Schema( required: true, default: false }, + unfinished: { + type: Boolean, + default: false + }, + cancelled: { + type: Boolean, + default: false + }, error: { type: Boolean, default: false diff --git a/api/package-lock.json b/api/package-lock.json index 67bfce5947..f455638f09 100644 --- a/api/package-lock.json +++ b/api/package-lock.json @@ -1,6 +1,6 @@ { "name": "chatgpt-clone", - "version": "0.3.2", + "version": "0.3.3", "lockfileVersion": 2, "requires": true, "packages": { diff --git a/api/package.json b/api/package.json index afc581cacf..28e1790cfa 100644 --- a/api/package.json +++ b/api/package.json @@ -1,6 +1,6 @@ { "name": "chatgpt-clone", - "version": "0.3.2", + "version": "0.3.3", "description": "", "main": "server/index.js", "scripts": { diff --git a/api/server/routes/ask/addToCache.js b/api/server/routes/ask/addToCache.js new file mode 100644 index 0000000000..e2214ba178 --- /dev/null +++ b/api/server/routes/ask/addToCache.js @@ -0,0 +1,65 @@ +const Keyv = require('keyv'); +const { KeyvFile } = require('keyv-file'); +const { saveMessage } = require('../../../models'); + +const addToCache = async ({ endpoint, endpointOption, userMessage, responseMessage }) => { + try { + const conversationsCache = new Keyv({ + store: new KeyvFile({ filename: './data/cache.json' }), + namespace: 'chatgpt' // should be 'bing' for bing/sydney + }); + + const { + conversationId, + messageId: userMessageId, + parentMessageId: userParentMessageId, + text: userText + } = userMessage; + const { + messageId: responseMessageId, + parentMessageId: responseParentMessageId, + text: responseText + } = responseMessage; + + let conversation = await conversationsCache.get(conversationId); + // used to generate a title for the conversation if none exists + // let isNewConversation = false; + if (!conversation) { + conversation = { + messages: [], + createdAt: Date.now() + }; + // isNewConversation = true; + } + + const roles = (options) => { + if (endpoint === 'openAI') { + return options?.chatGptLabel || 'ChatGPT'; + } else if (endpoint === 'bingAI') { + return options?.jailbreak ? 'Sydney' : 'BingAI'; + } + }; + + let _userMessage = { + id: userMessageId, + parentMessageId: userParentMessageId, + role: 'User', + message: userText + }; + + let _responseMessage = { + id: responseMessageId, + parentMessageId: responseParentMessageId, + role: roles(endpointOption), + message: responseText + }; + + conversation.messages.push(_userMessage, _responseMessage); + + await conversationsCache.set(conversationId, conversation); + } catch (error) { + console.error('Trouble adding to cache', error); + } +}; + +module.exports = addToCache; diff --git a/api/server/routes/ask/askBingAI.js b/api/server/routes/ask/askBingAI.js index 05c2220285..5d21ec4d27 100644 --- a/api/server/routes/ask/askBingAI.js +++ b/api/server/routes/ask/askBingAI.js @@ -2,7 +2,7 @@ const express = require('express'); const crypto = require('crypto'); const router = express.Router(); const { titleConvo, askBing } = require('../../../app'); -const { saveMessage, getConvoTitle, saveConvo, updateConvo, getConvo } = require('../../../models'); +const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models'); const { handleError, sendMessage, createOnProgress, handleText } = require('./handlers'); router.post('/', async (req, res) => { @@ -39,7 +39,8 @@ router.post('/', async (req, res) => { jailbreakConversationId: req.body?.jailbreakConversationId ?? null, systemMessage: req.body?.systemMessage ?? null, context: req.body?.context ?? null, - toneStyle: req.body?.toneStyle ?? 'fast' + toneStyle: req.body?.toneStyle ?? 'fast', + token: req.body?.token ?? null }; else endpointOption = { @@ -49,7 +50,8 @@ router.post('/', async (req, res) => { conversationSignature: req.body?.conversationSignature ?? null, clientId: req.body?.clientId ?? null, invocationId: req.body?.invocationId ?? null, - toneStyle: req.body?.toneStyle ?? 'fast' + toneStyle: req.body?.toneStyle ?? 'fast', + token: req.body?.token ?? null }; console.log('ask log', { @@ -93,6 +95,8 @@ const ask = async ({ }) => { let { text, parentMessageId: userParentMessageId, messageId: userMessageId } = userMessage; + let responseMessageId = crypto.randomUUID(); + res.writeHead(200, { Connection: 'keep-alive', 'Content-Type': 'text/event-stream', @@ -104,9 +108,26 @@ const ask = async ({ if (preSendRequest) sendMessage(res, { message: userMessage, created: true }); try { - const progressCallback = createOnProgress(); + let lastSavedTimestamp = 0; + const { onProgress: progressCallback, getPartialText } = createOnProgress({ + onProgress: ({ text }) => { + const currentTimestamp = Date.now(); + if (currentTimestamp - lastSavedTimestamp > 500) { + lastSavedTimestamp = currentTimestamp; + saveMessage({ + messageId: responseMessageId, + sender: endpointOption?.jailbreak ? 'Sydney' : 'BingAI', + conversationId, + parentMessageId: overrideParentMessageId || userMessageId, + text: text, + unfinished: true, + cancelled: false, + error: false + }); + } + } + }); const abortController = new AbortController(); - res.on('close', () => abortController.abort()); let response = await askBing({ text, parentMessageId: userParentMessageId, @@ -133,14 +154,20 @@ const ask = async ({ let responseMessage = { conversationId: newConversationId, - messageId: newResponseMessageId, + messageId: responseMessageId, + newMessageId: newResponseMessageId, parentMessageId: overrideParentMessageId || newUserMassageId, sender: endpointOption?.jailbreak ? 'Sydney' : 'BingAI', text: await handleText(response, true), - suggestions: response.details.suggestedResponses && response.details.suggestedResponses.map(s => s.text) + suggestions: + response.details.suggestedResponses && response.details.suggestedResponses.map((s) => s.text), + unfinished: false, + cancelled: false, + error: false }; await saveMessage(responseMessage); + responseMessage.messageId = newResponseMessageId; // STEP2 update the convosation. @@ -202,7 +229,7 @@ const ask = async ({ if (userParentMessageId == '00000000-0000-0000-0000-000000000000') { const title = await titleConvo({ endpoint: endpointOption?.endpoint, text, response: responseMessage }); - await updateConvo(req?.session?.user?.username, { + await saveConvo(req?.session?.user?.username, { conversationId: conversationId, title }); @@ -210,10 +237,12 @@ const ask = async ({ } catch (error) { console.log(error); const errorMessage = { - messageId: crypto.randomUUID(), + messageId: responseMessageId, sender: endpointOption?.jailbreak ? 'Sydney' : 'BingAI', conversationId, parentMessageId: overrideParentMessageId || userMessageId, + unfinished: false, + cancelled: false, error: true, text: error.message }; diff --git a/api/server/routes/ask/askChatGPTBrowser.js b/api/server/routes/ask/askChatGPTBrowser.js index 4592ab98b4..25b6f6b737 100644 --- a/api/server/routes/ask/askChatGPTBrowser.js +++ b/api/server/routes/ask/askChatGPTBrowser.js @@ -3,7 +3,7 @@ const crypto = require('crypto'); const router = express.Router(); const { getChatGPTBrowserModels } = require('../endpoints'); const { browserClient } = require('../../../app/'); -const { saveMessage, getConvoTitle, saveConvo, updateConvo, getConvo } = require('../../../models'); +const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models'); const { handleError, sendMessage, createOnProgress, handleText } = require('./handlers'); router.post('/', async (req, res) => { @@ -33,11 +33,12 @@ router.post('/', async (req, res) => { // build endpoint option const endpointOption = { - model: req.body?.model ?? 'text-davinci-002-render-sha' + model: req.body?.model ?? 'text-davinci-002-render-sha', + token: req.body?.token ?? null }; const availableModels = getChatGPTBrowserModels(); - if (availableModels.find(model => model === endpointOption.model) === undefined) + if (availableModels.find((model) => model === endpointOption.model) === undefined) return handleError(res, { text: 'Illegal request: model' }); console.log('ask log', { @@ -91,10 +92,29 @@ const ask = async ({ if (preSendRequest) sendMessage(res, { message: userMessage, created: true }); + let responseMessageId = crypto.randomUUID(); + try { - const progressCallback = createOnProgress(); + let lastSavedTimestamp = 0; + const { onProgress: progressCallback, getPartialText } = createOnProgress({ + onProgress: ({ text }) => { + const currentTimestamp = Date.now(); + if (currentTimestamp - lastSavedTimestamp > 500) { + lastSavedTimestamp = currentTimestamp; + saveMessage({ + messageId: responseMessageId, + sender: endpointOption?.jailbreak ? 'Sydney' : 'BingAI', + conversationId, + parentMessageId: overrideParentMessageId || userMessageId, + text: text, + unfinished: true, + cancelled: false, + error: false + }); + } + } + }); const abortController = new AbortController(); - res.on('close', () => abortController.abort()); let response = await browserClient({ text, parentMessageId: userParentMessageId, @@ -115,13 +135,18 @@ const ask = async ({ let responseMessage = { conversationId: newConversationId, - messageId: newResponseMessageId, + messageId: responseMessageId, + newMessageId: newResponseMessageId, parentMessageId: overrideParentMessageId || newUserMassageId, text: await handleText(response), - sender: endpointOption?.chatGptLabel || 'ChatGPT' + sender: endpointOption?.chatGptLabel || 'ChatGPT', + unfinished: false, + cancelled: false, + error: false }; await saveMessage(responseMessage); + responseMessage.messageId = newResponseMessageId; // STEP2 update the conversation @@ -167,17 +192,19 @@ const ask = async ({ if (userParentMessageId == '00000000-0000-0000-0000-000000000000') { // const title = await titleConvo({ endpoint: endpointOption?.endpoint, text, response: responseMessage }); const title = await response.details.title; - await updateConvo(req?.session?.user?.username, { + await saveConvo(req?.session?.user?.username, { conversationId: conversationId, title }); } } catch (error) { const errorMessage = { - messageId: crypto.randomUUID(), + messageId: responseMessageId, sender: 'ChatGPT', conversationId, parentMessageId: overrideParentMessageId || userMessageId, + unfinished: false, + cancelled: false, error: true, text: error.message }; diff --git a/api/server/routes/ask/askOpenAI.js b/api/server/routes/ask/askOpenAI.js index 9c5d121639..550518c2c0 100644 --- a/api/server/routes/ask/askOpenAI.js +++ b/api/server/routes/ask/askOpenAI.js @@ -1,11 +1,31 @@ const express = require('express'); const crypto = require('crypto'); const router = express.Router(); +const addToCache = require('./addToCache'); const { getOpenAIModels } = require('../endpoints'); const { titleConvo, askClient } = require('../../../app/'); -const { saveMessage, getConvoTitle, saveConvo, updateConvo, getConvo } = require('../../../models'); +const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models'); const { handleError, sendMessage, createOnProgress, handleText } = require('./handlers'); +const abortControllers = new Map(); + +router.post('/abort', async (req, res) => { + const { abortKey } = req.body; + console.log(`req.body`, req.body); + if (!abortControllers.has(abortKey)) { + return res.status(404).send('Request not found'); + } + + const { abortController } = abortControllers.get(abortKey); + + abortControllers.delete(abortKey); + const ret = await abortController.abortAsk(); + console.log('Aborted request', abortKey); + console.log('Aborted message:', ret); + + res.send(JSON.stringify(ret)); +}); + router.post('/', async (req, res) => { const { endpoint, @@ -43,7 +63,7 @@ router.post('/', async (req, res) => { }; const availableModels = getOpenAIModels(); - if (availableModels.find(model => model === endpointOption.model) === undefined) + if (availableModels.find((model) => model === endpointOption.model) === undefined) return handleError(res, { text: 'Illegal request: model' }); console.log('ask log', { @@ -87,6 +107,8 @@ const ask = async ({ }) => { let { text, parentMessageId: userParentMessageId, messageId: userMessageId } = userMessage; + let responseMessageId = crypto.randomUUID(); + res.writeHead(200, { Connection: 'keep-alive', 'Content-Type': 'text/event-stream', @@ -98,9 +120,55 @@ const ask = async ({ if (preSendRequest) sendMessage(res, { message: userMessage, created: true }); try { - const progressCallback = createOnProgress(); - const abortController = new AbortController(); - res.on('close', () => abortController.abort()); + let lastSavedTimestamp = 0; + const { onProgress: progressCallback, getPartialText } = createOnProgress({ + onProgress: ({ text }) => { + const currentTimestamp = Date.now(); + if (currentTimestamp - lastSavedTimestamp > 500) { + lastSavedTimestamp = currentTimestamp; + saveMessage({ + messageId: responseMessageId, + sender: endpointOption?.chatGptLabel || 'ChatGPT', + conversationId, + parentMessageId: overrideParentMessageId || userMessageId, + text: text, + unfinished: true, + cancelled: false, + error: false + }); + } + } + }); + + let abortController = new AbortController(); + abortController.abortAsk = async function () { + this.abort(); + + const responseMessage = { + messageId: responseMessageId, + sender: endpointOption?.chatGptLabel || 'ChatGPT', + conversationId, + parentMessageId: overrideParentMessageId || userMessageId, + text: getPartialText(), + unfinished: false, + cancelled: true, + error: false + }; + + saveMessage(responseMessage); + await addToCache({ endpoint: 'openAI', endpointOption, userMessage, responseMessage }); + + return { + title: await getConvoTitle(req?.session?.user?.username, conversationId), + final: true, + conversation: await getConvo(req?.session?.user?.username, conversationId), + requestMessage: userMessage, + responseMessage: responseMessage + }; + }; + const abortKey = conversationId; + abortControllers.set(abortKey, { abortController, ...endpointOption }); + let response = await askClient({ text, parentMessageId: userParentMessageId, @@ -114,6 +182,7 @@ const ask = async ({ abortController }); + abortControllers.delete(abortKey); console.log('CLIENT RESPONSE', response); const newConversationId = response.conversationId || conversationId; @@ -125,13 +194,18 @@ const ask = async ({ let responseMessage = { conversationId: newConversationId, - messageId: newResponseMessageId, + messageId: responseMessageId, + newMessageId: newResponseMessageId, parentMessageId: overrideParentMessageId || newUserMassageId, text: await handleText(response), - sender: endpointOption?.chatGptLabel || 'ChatGPT' + sender: endpointOption?.chatGptLabel || 'ChatGPT', + unfinished: false, + cancelled: false, + error: false }; await saveMessage(responseMessage); + responseMessage.messageId = newResponseMessageId; // STEP2 update the conversation let conversationUpdate = { conversationId: newConversationId, endpoint: 'openAI' }; @@ -174,7 +248,7 @@ const ask = async ({ if (userParentMessageId == '00000000-0000-0000-0000-000000000000') { const title = await titleConvo({ endpoint: endpointOption?.endpoint, text, response: responseMessage }); - await updateConvo(req?.session?.user?.username, { + await saveConvo(req?.session?.user?.username, { conversationId: conversationId, title }); @@ -182,10 +256,12 @@ const ask = async ({ } catch (error) { console.error(error); const errorMessage = { - messageId: crypto.randomUUID(), + messageId: responseMessageId, sender: endpointOption?.chatGptLabel || 'ChatGPT', conversationId, parentMessageId: overrideParentMessageId || userMessageId, + unfinished: false, + cancelled: false, error: true, text: error.message }; diff --git a/api/server/routes/ask/handlers.js b/api/server/routes/ask/handlers.js index 9efcd292c9..08b9e8008e 100644 --- a/api/server/routes/ask/handlers.js +++ b/api/server/routes/ask/handlers.js @@ -17,7 +17,7 @@ const sendMessage = (res, message) => { res.write(`event: message\ndata: ${JSON.stringify(message)}\n\n`); }; -const createOnProgress = () => { +const createOnProgress = ({ onProgress: _onProgress }) => { let i = 0; let code = ''; let tokens = ''; @@ -65,14 +65,21 @@ const createOnProgress = () => { } sendMessage(res, { text: tokens + cursor, message: true, initial: i === 0, ...rest }); + + _onProgress && _onProgress({ text: tokens, message: true, initial: i === 0, ...rest }); + i++; }; - const onProgress = opts => { + const onProgress = (opts) => { return _.partialRight(progressCallback, opts); }; - return onProgress; + const getPartialText = () => { + return tokens; + }; + + return { onProgress, getPartialText }; }; const handleText = async (response, bing = false) => { diff --git a/api/server/routes/convos.js b/api/server/routes/convos.js index ed3f6db15c..03ba85a1a6 100644 --- a/api/server/routes/convos.js +++ b/api/server/routes/convos.js @@ -2,7 +2,7 @@ const express = require('express'); const router = express.Router(); const { titleConvo } = require('../../app/'); const { getConvo, saveConvo, getConvoTitle } = require('../../models'); -const { getConvosByPage, deleteConvos, updateConvo } = require('../../models/Conversation'); +const { getConvosByPage, deleteConvos } = require('../../models/Conversation'); const { getMessages } = require('../../models/Message'); router.get('/', async (req, res) => { @@ -44,7 +44,7 @@ router.post('/update', async (req, res) => { const update = req.body.arg; try { - const dbResponse = await updateConvo(req?.session?.user?.username, update); + const dbResponse = await saveConvo(req?.session?.user?.username, update); res.status(201).send(dbResponse); } catch (error) { console.error(error); diff --git a/api/server/routes/endpoints.js b/api/server/routes/endpoints.js index bcdd051e2f..bd23d52a4a 100644 --- a/api/server/routes/endpoints.js +++ b/api/server/routes/endpoints.js @@ -18,8 +18,15 @@ const getChatGPTBrowserModels = () => { router.get('/', function (req, res) { const azureOpenAI = !!process.env.AZURE_OPENAI_KEY; const openAI = process.env.OPENAI_KEY ? { availableModels: getOpenAIModels() } : false; - const bingAI = !!process.env.BINGAI_TOKEN; - const chatGPTBrowser = process.env.CHATGPT_TOKEN ? { availableModels: getChatGPTBrowserModels() } : false; + const bingAI = process.env.BINGAI_TOKEN + ? { userProvide: process.env.BINGAI_TOKEN == 'user_provide' } + : false; + const chatGPTBrowser = process.env.CHATGPT_TOKEN + ? { + userProvide: process.env.CHATGPT_TOKEN == 'user_provide', + availableModels: getChatGPTBrowserModels() + } + : false; res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser })); }); diff --git a/api/server/routes/tokenizer.js b/api/server/routes/tokenizer.js index 9e3007cbad..a5b95530dc 100644 --- a/api/server/routes/tokenizer.js +++ b/api/server/routes/tokenizer.js @@ -6,13 +6,20 @@ const registry = require('@dqbd/tiktoken/registry.json'); const models = require('@dqbd/tiktoken/model_to_encoding.json'); router.post('/', async (req, res) => { - const { arg } = req.body; - // console.log(typeof req.body === 'object' ? { ...req.body, ...req.query } : req.query); - const model = await load(registry[models['gpt-3.5-turbo']]); - const encoder = new Tiktoken(model.bpe_ranks, model.special_tokens, model.pat_str); - const tokens = encoder.encode(arg.text); - encoder.free(); - res.send({ count: tokens.length }); + try { + const { arg } = req.body; + + // console.log('context:', arg, req.body); + // console.log(typeof req.body === 'object' ? { ...req.body, ...req.query } : req.query); + const model = await load(registry[models['gpt-3.5-turbo']]); + const encoder = new Tiktoken(model.bpe_ranks, model.special_tokens, model.pat_str); + const tokens = encoder.encode(arg.text); + encoder.free(); + res.send({ count: tokens.length }); + } catch (e) { + console.error(e); + res.status(500).send(e.message); + } }); module.exports = router; diff --git a/client/package-lock.json b/client/package-lock.json index f589d24776..27846da768 100644 --- a/client/package-lock.json +++ b/client/package-lock.json @@ -1,6 +1,6 @@ { "name": "chatgpt-clone", - "version": "0.3.2", + "version": "0.3.3", "lockfileVersion": 2, "requires": true, "packages": { diff --git a/client/package.json b/client/package.json index e83470cc1e..09b83341f2 100644 --- a/client/package.json +++ b/client/package.json @@ -1,6 +1,6 @@ { "name": "chatgpt-clone", - "version": "0.3.2", + "version": "0.3.3", "description": "", "type": "module", "scripts": { diff --git a/client/src/components/Endpoints/EditPresetDialog.jsx b/client/src/components/Endpoints/EditPresetDialog.jsx index e264920772..c9d618dbd2 100644 --- a/client/src/components/Endpoints/EditPresetDialog.jsx +++ b/client/src/components/Endpoints/EditPresetDialog.jsx @@ -21,7 +21,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => { const setPresets = useSetRecoilState(store.presets); const availableEndpoints = useRecoilValue(store.availableEndpoints); - const endpointsFilter = useRecoilValue(store.endpointsFilter); + const endpointsConfig = useRecoilValue(store.endpointsConfig); const setOption = param => newValue => { let update = {}; @@ -32,7 +32,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => { ...prevState, ...update }, - endpointsFilter + endpointsConfig }) ); }; @@ -44,7 +44,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => { axios({ method: 'post', url: '/api/presets', - data: cleanupPreset({ preset, endpointsFilter }), + data: cleanupPreset({ preset, endpointsConfig }), withCredentials: true }).then(res => { setPresets(res?.data); @@ -54,7 +54,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => { const exportPreset = () => { const fileName = filenamify(preset?.title || 'preset'); exportFromJSON({ - data: cleanupPreset({ preset, endpointsFilter }), + data: cleanupPreset({ preset, endpointsConfig }), fileName, exportType: exportFromJSON.types.json }); diff --git a/client/src/components/Endpoints/EndpointOptionsDialog.jsx b/client/src/components/Endpoints/EndpointOptionsDialog.jsx index dc62499584..17b9528f64 100644 --- a/client/src/components/Endpoints/EndpointOptionsDialog.jsx +++ b/client/src/components/Endpoints/EndpointOptionsDialog.jsx @@ -16,7 +16,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) = const [preset, setPreset] = useState(_preset); const [saveAsDialogShow, setSaveAsDialogShow] = useState(false); - const endpointsFilter = useRecoilValue(store.endpointsFilter); + const endpointsConfig = useRecoilValue(store.endpointsConfig); const setOption = param => newValue => { let update = {}; @@ -33,7 +33,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) = const exportPreset = () => { exportFromJSON({ - data: cleanupPreset({ preset, endpointsFilter }), + data: cleanupPreset({ preset, endpointsConfig }), fileName: `${preset?.title}.json`, exportType: exportFromJSON.types.json }); diff --git a/client/src/components/Endpoints/SaveAsPresetDialog.jsx b/client/src/components/Endpoints/SaveAsPresetDialog.jsx index 9fd51153e9..9e85b568cd 100644 --- a/client/src/components/Endpoints/SaveAsPresetDialog.jsx +++ b/client/src/components/Endpoints/SaveAsPresetDialog.jsx @@ -1,4 +1,4 @@ -import { useEffect, useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { useRecoilValue } from 'recoil'; import DialogTemplate from '../ui/DialogTemplate'; import { Dialog } from '../ui/Dialog.tsx'; @@ -11,7 +11,7 @@ import store from '~/store'; const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => { const [title, setTitle] = useState(preset?.title || 'My Preset'); - const endpointsFilter = useRecoilValue(store.endpointsFilter); + const endpointsConfig = useRecoilValue(store.endpointsConfig); const createPresetMutation = useCreatePresetMutation(); const defaultTextProps = @@ -23,7 +23,7 @@ const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => { ...preset, title }, - endpointsFilter + endpointsConfig }); createPresetMutation.mutate(_preset); }; diff --git a/client/src/components/Input/NewConversationMenu/EndpointItem.jsx b/client/src/components/Input/NewConversationMenu/EndpointItem.jsx index a783750227..690e120e82 100644 --- a/client/src/components/Input/NewConversationMenu/EndpointItem.jsx +++ b/client/src/components/Input/NewConversationMenu/EndpointItem.jsx @@ -1,8 +1,16 @@ -import React from 'react'; +import React, { useState } from 'react'; import { DropdownMenuRadioItem } from '../../ui/DropdownMenu.tsx'; +import { Settings } from 'lucide-react'; import getIcon from '~/utils/getIcon'; +import { useRecoilValue } from 'recoil'; +import SetTokenDialog from '../SetTokenDialog'; + +import store from '../../../store'; export default function ModelItem({ endpoint, value, onSelect }) { + const [setTokenDialogOpen, setSetTokenDialogOpen] = useState(false); + const endpointsConfig = useRecoilValue(store.endpointsConfig); + const icon = getIcon({ size: 20, endpoint, @@ -10,15 +18,37 @@ export default function ModelItem({ endpoint, value, onSelect }) { className: 'mr-2' }); + const isuserProvide = endpointsConfig?.[endpoint]?.userProvide; + // regular model return ( - - {icon} - {endpoint} - {!!['azureOpenAI', 'openAI'].find(e => e === endpoint) && $} - + <> + + {icon} + {endpoint} + {!!['azureOpenAI', 'openAI'].find(e => e === endpoint) && $} +
+ {isuserProvide ? ( + + ) : null} + + + ); } diff --git a/client/src/components/Input/NewConversationMenu/FileUpload.jsx b/client/src/components/Input/NewConversationMenu/FileUpload.jsx index 69f2bf1dba..15b8e1a743 100644 --- a/client/src/components/Input/NewConversationMenu/FileUpload.jsx +++ b/client/src/components/Input/NewConversationMenu/FileUpload.jsx @@ -7,7 +7,7 @@ import store from '~/store'; const FileUpload = ({ onFileSelected }) => { // const setPresets = useSetRecoilState(store.presets); - const endpointsFilter = useRecoilValue(store.endpointsFilter); + const endpointsConfig = useRecoilValue(store.endpointsConfig); const handleFileChange = event => { const file = event.target.files[0]; @@ -16,7 +16,7 @@ const FileUpload = ({ onFileSelected }) => { const reader = new FileReader(); reader.onload = e => { const jsonData = JSON.parse(e.target.result); - onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsFilter }), presetId: null }); + onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsConfig }), presetId: null }); }; reader.readAsText(file); }; @@ -24,10 +24,10 @@ const FileUpload = ({ onFileSelected }) => { return ( { + const [token, setToken] = useState(''); + const { getToken, saveToken } = store.useToken(endpoint); + + const defaultTextProps = + 'rounded-md border border-gray-300 bg-transparent text-sm shadow-[0_0_10px_rgba(0,0,0,0.10)] outline-none placeholder:text-gray-400 focus:outline-none focus:ring-gray-400 focus:ring-opacity-20 focus:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 dark:border-gray-400 dark:bg-gray-700 dark:text-gray-50 dark:shadow-[0_0_15px_rgba(0,0,0,0.10)] dark:focus:border-gray-400 dark:focus:outline-none dark:focus:ring-0 dark:focus:ring-gray-400 dark:focus:ring-offset-0'; + + const submit = () => { + saveToken(token); + onOpenChange(false); + }; + + useEffect(() => { + setToken(getToken() ?? ''); + }, [open]); + + const helpText = { + bingAI: ( + + The Bing Access Token is the "_U" cookie from bing.com. Use dev tools or an extension while logged + into the site to view it. + + ), + chatGPTBrowser: ( + + To get your Access token For ChatGPT 'Free Version', login to{' '} + + https://chat.openai.com + + , then visit{' '} + + https://chat.openai.com/api/auth/session + + . Copy access token. + + ) + }; + + return ( + + + + setToken(e.target.value || '')} + placeholder="Set the token." + className={cn( + defaultTextProps, + 'flex h-10 max-h-10 w-full resize-none px-3 py-2 focus:outline-none focus:ring-0 focus:ring-opacity-0 focus:ring-offset-0' + )} + /> + + Your token will be sent to the server, but not saved. + + {helpText?.[endpoint]} +
+ } + selection={{ + selectHandler: submit, + selectClasses: 'bg-green-600 hover:bg-green-700 dark:hover:bg-green-800 text-white', + selectText: 'Submit' + }} + /> + + ); +}; + +export default SetTokenDialog; diff --git a/client/src/components/Input/SubmitButton.jsx b/client/src/components/Input/SubmitButton.jsx index 1198a4eee2..0773208d36 100644 --- a/client/src/components/Input/SubmitButton.jsx +++ b/client/src/components/Input/SubmitButton.jsx @@ -1,12 +1,31 @@ -import React from 'react'; +import React, { useState } from 'react'; import StopGeneratingIcon from '../svg/StopGeneratingIcon'; +import { Settings } from 'lucide-react'; +import SetTokenDialog from './SetTokenDialog'; +import store from '../../store'; + +export default function SubmitButton({ + endpoint, + submitMessage, + handleStopGenerating, + disabled, + isSubmitting, + endpointsConfig +}) { + const [setTokenDialogOpen, setSetTokenDialogOpen] = useState(false); + const { getToken } = store.useToken(endpoint); + + const isTokenProvided = endpointsConfig?.[endpoint]?.userProvide ? !!getToken() : true; -export default function SubmitButton({ submitMessage, handleStopGenerating, disabled, isSubmitting }) { const clickHandler = e => { e.preventDefault(); submitMessage(); }; + const setToken = () => { + setSetTokenDialogOpen(true); + }; + if (isSubmitting) return ( // ); - else + else if (!isTokenProvided) { + return ( + <> + + + + ); + } else return (