mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-26 21:28:50 +01:00
Merge pull request #177 from danny-avila/user-providered-key
User providered key and unfinished messages.
This commit is contained in:
commit
163388b8a9
44 changed files with 748 additions and 247 deletions
|
|
@ -25,8 +25,9 @@ MONGO_URI="mongodb://127.0.0.1:27017/chatgpt-clone"
|
|||
OPENAI_KEY=
|
||||
|
||||
# Identify the available models, sperate by comma, and not space in it
|
||||
# The first will be default
|
||||
# Leave it blank to use internal settings.
|
||||
# OPENAI_MODELS=gpt-4,text-davinci-003,gpt-3.5-turbo,gpt-3.5-turbo-0301
|
||||
OPENAI_MODELS=gpt-3.5-turbo,gpt-3.5-turbo-0301,text-davinci-003,gpt-4
|
||||
|
||||
# Reverse proxy setting for OpenAI
|
||||
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
|
||||
|
|
@ -39,6 +40,8 @@ OPENAI_KEY=
|
|||
|
||||
# BingAI Tokens: the "_U" cookies value from bing.com
|
||||
# Leave it and BINGAI_USER_TOKEN blank to disable this endpoint.
|
||||
# Set to "user_providered" to allow user provided token.
|
||||
# BINGAI_TOKEN="user_providered"
|
||||
BINGAI_TOKEN=
|
||||
|
||||
# BingAI Host:
|
||||
|
|
@ -46,12 +49,6 @@ BINGAI_TOKEN=
|
|||
# Leave it blank to use default server.
|
||||
# BINGAI_HOST="https://cn.bing.com"
|
||||
|
||||
# BingAI User defined Token
|
||||
# Allow user to set their own token by client
|
||||
# Uncomment this to enable this feature.
|
||||
# (Not implemented yet.)
|
||||
# BINGAI_USER_TOKEN=1
|
||||
|
||||
|
||||
#############################
|
||||
# Endpoint chatGPT:
|
||||
|
|
@ -61,11 +58,14 @@ BINGAI_TOKEN=
|
|||
# Access token from https://chat.openai.com/api/auth/session
|
||||
# Exposes your access token to CHATGPT_REVERSE_PROXY
|
||||
# Leave it blank to disable this endpoint
|
||||
# Set to "user_provide" to allow user provided token.
|
||||
# CHATGPT_TOKEN="user_provide"
|
||||
CHATGPT_TOKEN=
|
||||
|
||||
# Identify the available models, sperate by comma, and not space in it
|
||||
# The first will be default
|
||||
# Leave it blank to use internal settings.
|
||||
# CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4
|
||||
CHATGPT_MODELS=text-davinci-002-render-sha,text-davinci-002-render-paid,gpt-4
|
||||
|
||||
# Reverse proxy setting for OpenAI
|
||||
# https://github.com/waylaidwanderer/node-chatgpt-api#using-a-reverse-proxy
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"arrowParens": "avoid",
|
||||
"arrowParens": "always",
|
||||
"bracketSpacing": true,
|
||||
"endOfLine": "lf",
|
||||
"htmlWhitespaceSensitivity": "css",
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ const askBing = async ({
|
|||
clientId,
|
||||
invocationId,
|
||||
toneStyle,
|
||||
token,
|
||||
onProgress
|
||||
}) => {
|
||||
const { BingAIClient } = await import('@waylaidwanderer/chatgpt-api');
|
||||
|
|
@ -22,7 +23,7 @@ const askBing = async ({
|
|||
|
||||
const bingAIClient = new BingAIClient({
|
||||
// "_U" cookie from bing.com
|
||||
userToken: process.env.BINGAI_TOKEN,
|
||||
userToken: process.env.BINGAI_TOKEN == 'user_provide' ? token : process.env.BINGAI_TOKEN ?? null,
|
||||
// If the above doesn't work, provide all your cookies as a string instead
|
||||
// cookies: '',
|
||||
debug: false,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ const browserClient = async ({
|
|||
parentMessageId,
|
||||
conversationId,
|
||||
model,
|
||||
token,
|
||||
onProgress,
|
||||
abortController
|
||||
}) => {
|
||||
|
|
@ -18,7 +19,7 @@ const browserClient = async ({
|
|||
// Warning: This will expose your access token to a third party. Consider the risks before using this.
|
||||
reverseProxyUrl: process.env.CHATGPT_REVERSE_PROXY || 'https://bypass.churchless.tech/api/conversation',
|
||||
// Access token from https://chat.openai.com/api/auth/session
|
||||
accessToken: process.env.CHATGPT_TOKEN,
|
||||
accessToken: process.env.CHATGPT_TOKEN == 'user_provide' ? token : process.env.CHATGPT_TOKEN ?? null,
|
||||
model: model,
|
||||
// debug: true
|
||||
proxy: process.env.PROXY || null
|
||||
|
|
|
|||
|
|
@ -13,45 +13,21 @@ const getConvo = async (user, conversationId) => {
|
|||
|
||||
module.exports = {
|
||||
Conversation,
|
||||
saveConvo: async (user, { conversationId, newConversationId, title, ...convo }) => {
|
||||
saveConvo: async (user, { conversationId, newConversationId, ...convo }) => {
|
||||
try {
|
||||
const messages = await getMessages({ conversationId });
|
||||
const update = { ...convo, messages };
|
||||
if (title) {
|
||||
update.title = title;
|
||||
update.user = user;
|
||||
}
|
||||
const update = { ...convo, messages, user };
|
||||
if (newConversationId) {
|
||||
update.conversationId = newConversationId;
|
||||
}
|
||||
if (!update.jailbreakConversationId) {
|
||||
update.jailbreakConversationId = null;
|
||||
}
|
||||
|
||||
return await Conversation.findOneAndUpdate(
|
||||
{ conversationId: conversationId, user },
|
||||
{ $set: update },
|
||||
{ new: true, upsert: true }
|
||||
).exec();
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return { message: 'Error saving conversation' };
|
||||
}
|
||||
},
|
||||
updateConvo: async (user, { conversationId, oldConvoId, ...update }) => {
|
||||
try {
|
||||
let convoId = conversationId;
|
||||
if (oldConvoId) {
|
||||
convoId = oldConvoId;
|
||||
update.conversationId = conversationId;
|
||||
}
|
||||
|
||||
return await Conversation.findOneAndUpdate({ conversationId: convoId, user }, update, {
|
||||
new: true
|
||||
return await Conversation.findOneAndUpdate({ conversationId: conversationId, user }, update, {
|
||||
new: true,
|
||||
upsert: true
|
||||
}).exec();
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return { message: 'Error updating conversation' };
|
||||
return { message: 'Error saving conversation' };
|
||||
}
|
||||
},
|
||||
getConvosByPage: async (user, pageNumber = 1, pageSize = 12) => {
|
||||
|
|
@ -82,7 +58,7 @@ module.exports = {
|
|||
// will handle a syncing solution soon
|
||||
const deletedConvoIds = [];
|
||||
|
||||
convoIds.forEach(convo =>
|
||||
convoIds.forEach((convo) =>
|
||||
promises.push(
|
||||
Conversation.findOne({
|
||||
user,
|
||||
|
|
@ -145,7 +121,7 @@ module.exports = {
|
|||
},
|
||||
deleteConvos: async (user, filter) => {
|
||||
let toRemove = await Conversation.find({ ...filter, user }).select('conversationId');
|
||||
const ids = toRemove.map(instance => instance.conversationId);
|
||||
const ids = toRemove.map((instance) => instance.conversationId);
|
||||
let deleteCount = await Conversation.deleteMany({ ...filter, user }).exec();
|
||||
deleteCount.messages = await deleteMessages({ conversationId: { $in: ids } });
|
||||
return deleteCount;
|
||||
|
|
|
|||
|
|
@ -9,9 +9,12 @@ module.exports = {
|
|||
sender,
|
||||
text,
|
||||
isCreatedByUser = false,
|
||||
error
|
||||
error,
|
||||
unfinished,
|
||||
cancelled
|
||||
}) => {
|
||||
try {
|
||||
// may also need to update the conversation here
|
||||
await Message.findOneAndUpdate(
|
||||
{ messageId },
|
||||
{
|
||||
|
|
@ -21,7 +24,9 @@ module.exports = {
|
|||
sender,
|
||||
text,
|
||||
isCreatedByUser,
|
||||
error
|
||||
error,
|
||||
unfinished,
|
||||
cancelled
|
||||
},
|
||||
{ upsert: true, new: true }
|
||||
);
|
||||
|
|
@ -44,7 +49,7 @@ module.exports = {
|
|||
return { message: 'Error deleting messages' };
|
||||
}
|
||||
},
|
||||
getMessages: async filter => {
|
||||
getMessages: async (filter) => {
|
||||
try {
|
||||
return await Message.find(filter).sort({ createdAt: 1 }).exec();
|
||||
} catch (error) {
|
||||
|
|
@ -52,7 +57,7 @@ module.exports = {
|
|||
return { message: 'Error getting messages' };
|
||||
}
|
||||
},
|
||||
deleteMessages: async filter => {
|
||||
deleteMessages: async (filter) => {
|
||||
try {
|
||||
return await Message.deleteMany(filter).exec();
|
||||
} catch (error) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const { getMessages, saveMessage, deleteMessagesSince, deleteMessages } = require('./Message');
|
||||
const { getConvoTitle, getConvo, saveConvo, updateConvo } = require('./Conversation');
|
||||
const { getConvoTitle, getConvo, saveConvo } = require('./Conversation');
|
||||
const { getPreset, getPresets, savePreset, deletePresets } = require('./Preset');
|
||||
|
||||
module.exports = {
|
||||
|
|
@ -11,7 +11,6 @@ module.exports = {
|
|||
getConvoTitle,
|
||||
getConvo,
|
||||
saveConvo,
|
||||
updateConvo,
|
||||
|
||||
getPreset,
|
||||
getPresets,
|
||||
|
|
|
|||
|
|
@ -43,6 +43,14 @@ const messageSchema = mongoose.Schema(
|
|||
required: true,
|
||||
default: false
|
||||
},
|
||||
unfinished: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
cancelled: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
error: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
|
|
|
|||
2
api/package-lock.json
generated
2
api/package-lock.json
generated
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "chatgpt-clone",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "chatgpt-clone",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"description": "",
|
||||
"main": "server/index.js",
|
||||
"scripts": {
|
||||
|
|
|
|||
65
api/server/routes/ask/addToCache.js
Normal file
65
api/server/routes/ask/addToCache.js
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
const Keyv = require('keyv');
|
||||
const { KeyvFile } = require('keyv-file');
|
||||
const { saveMessage } = require('../../../models');
|
||||
|
||||
const addToCache = async ({ endpoint, endpointOption, userMessage, responseMessage }) => {
|
||||
try {
|
||||
const conversationsCache = new Keyv({
|
||||
store: new KeyvFile({ filename: './data/cache.json' }),
|
||||
namespace: 'chatgpt' // should be 'bing' for bing/sydney
|
||||
});
|
||||
|
||||
const {
|
||||
conversationId,
|
||||
messageId: userMessageId,
|
||||
parentMessageId: userParentMessageId,
|
||||
text: userText
|
||||
} = userMessage;
|
||||
const {
|
||||
messageId: responseMessageId,
|
||||
parentMessageId: responseParentMessageId,
|
||||
text: responseText
|
||||
} = responseMessage;
|
||||
|
||||
let conversation = await conversationsCache.get(conversationId);
|
||||
// used to generate a title for the conversation if none exists
|
||||
// let isNewConversation = false;
|
||||
if (!conversation) {
|
||||
conversation = {
|
||||
messages: [],
|
||||
createdAt: Date.now()
|
||||
};
|
||||
// isNewConversation = true;
|
||||
}
|
||||
|
||||
const roles = (options) => {
|
||||
if (endpoint === 'openAI') {
|
||||
return options?.chatGptLabel || 'ChatGPT';
|
||||
} else if (endpoint === 'bingAI') {
|
||||
return options?.jailbreak ? 'Sydney' : 'BingAI';
|
||||
}
|
||||
};
|
||||
|
||||
let _userMessage = {
|
||||
id: userMessageId,
|
||||
parentMessageId: userParentMessageId,
|
||||
role: 'User',
|
||||
message: userText
|
||||
};
|
||||
|
||||
let _responseMessage = {
|
||||
id: responseMessageId,
|
||||
parentMessageId: responseParentMessageId,
|
||||
role: roles(endpointOption),
|
||||
message: responseText
|
||||
};
|
||||
|
||||
conversation.messages.push(_userMessage, _responseMessage);
|
||||
|
||||
await conversationsCache.set(conversationId, conversation);
|
||||
} catch (error) {
|
||||
console.error('Trouble adding to cache', error);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = addToCache;
|
||||
|
|
@ -2,7 +2,7 @@ const express = require('express');
|
|||
const crypto = require('crypto');
|
||||
const router = express.Router();
|
||||
const { titleConvo, askBing } = require('../../../app');
|
||||
const { saveMessage, getConvoTitle, saveConvo, updateConvo, getConvo } = require('../../../models');
|
||||
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models');
|
||||
const { handleError, sendMessage, createOnProgress, handleText } = require('./handlers');
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
|
|
@ -39,7 +39,8 @@ router.post('/', async (req, res) => {
|
|||
jailbreakConversationId: req.body?.jailbreakConversationId ?? null,
|
||||
systemMessage: req.body?.systemMessage ?? null,
|
||||
context: req.body?.context ?? null,
|
||||
toneStyle: req.body?.toneStyle ?? 'fast'
|
||||
toneStyle: req.body?.toneStyle ?? 'fast',
|
||||
token: req.body?.token ?? null
|
||||
};
|
||||
else
|
||||
endpointOption = {
|
||||
|
|
@ -49,7 +50,8 @@ router.post('/', async (req, res) => {
|
|||
conversationSignature: req.body?.conversationSignature ?? null,
|
||||
clientId: req.body?.clientId ?? null,
|
||||
invocationId: req.body?.invocationId ?? null,
|
||||
toneStyle: req.body?.toneStyle ?? 'fast'
|
||||
toneStyle: req.body?.toneStyle ?? 'fast',
|
||||
token: req.body?.token ?? null
|
||||
};
|
||||
|
||||
console.log('ask log', {
|
||||
|
|
@ -93,6 +95,8 @@ const ask = async ({
|
|||
}) => {
|
||||
let { text, parentMessageId: userParentMessageId, messageId: userMessageId } = userMessage;
|
||||
|
||||
let responseMessageId = crypto.randomUUID();
|
||||
|
||||
res.writeHead(200, {
|
||||
Connection: 'keep-alive',
|
||||
'Content-Type': 'text/event-stream',
|
||||
|
|
@ -104,9 +108,26 @@ const ask = async ({
|
|||
if (preSendRequest) sendMessage(res, { message: userMessage, created: true });
|
||||
|
||||
try {
|
||||
const progressCallback = createOnProgress();
|
||||
let lastSavedTimestamp = 0;
|
||||
const { onProgress: progressCallback, getPartialText } = createOnProgress({
|
||||
onProgress: ({ text }) => {
|
||||
const currentTimestamp = Date.now();
|
||||
if (currentTimestamp - lastSavedTimestamp > 500) {
|
||||
lastSavedTimestamp = currentTimestamp;
|
||||
saveMessage({
|
||||
messageId: responseMessageId,
|
||||
sender: endpointOption?.jailbreak ? 'Sydney' : 'BingAI',
|
||||
conversationId,
|
||||
parentMessageId: overrideParentMessageId || userMessageId,
|
||||
text: text,
|
||||
unfinished: true,
|
||||
cancelled: false,
|
||||
error: false
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
const abortController = new AbortController();
|
||||
res.on('close', () => abortController.abort());
|
||||
let response = await askBing({
|
||||
text,
|
||||
parentMessageId: userParentMessageId,
|
||||
|
|
@ -133,14 +154,20 @@ const ask = async ({
|
|||
|
||||
let responseMessage = {
|
||||
conversationId: newConversationId,
|
||||
messageId: newResponseMessageId,
|
||||
messageId: responseMessageId,
|
||||
newMessageId: newResponseMessageId,
|
||||
parentMessageId: overrideParentMessageId || newUserMassageId,
|
||||
sender: endpointOption?.jailbreak ? 'Sydney' : 'BingAI',
|
||||
text: await handleText(response, true),
|
||||
suggestions: response.details.suggestedResponses && response.details.suggestedResponses.map(s => s.text)
|
||||
suggestions:
|
||||
response.details.suggestedResponses && response.details.suggestedResponses.map((s) => s.text),
|
||||
unfinished: false,
|
||||
cancelled: false,
|
||||
error: false
|
||||
};
|
||||
|
||||
await saveMessage(responseMessage);
|
||||
responseMessage.messageId = newResponseMessageId;
|
||||
|
||||
// STEP2 update the convosation.
|
||||
|
||||
|
|
@ -202,7 +229,7 @@ const ask = async ({
|
|||
if (userParentMessageId == '00000000-0000-0000-0000-000000000000') {
|
||||
const title = await titleConvo({ endpoint: endpointOption?.endpoint, text, response: responseMessage });
|
||||
|
||||
await updateConvo(req?.session?.user?.username, {
|
||||
await saveConvo(req?.session?.user?.username, {
|
||||
conversationId: conversationId,
|
||||
title
|
||||
});
|
||||
|
|
@ -210,10 +237,12 @@ const ask = async ({
|
|||
} catch (error) {
|
||||
console.log(error);
|
||||
const errorMessage = {
|
||||
messageId: crypto.randomUUID(),
|
||||
messageId: responseMessageId,
|
||||
sender: endpointOption?.jailbreak ? 'Sydney' : 'BingAI',
|
||||
conversationId,
|
||||
parentMessageId: overrideParentMessageId || userMessageId,
|
||||
unfinished: false,
|
||||
cancelled: false,
|
||||
error: true,
|
||||
text: error.message
|
||||
};
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ const crypto = require('crypto');
|
|||
const router = express.Router();
|
||||
const { getChatGPTBrowserModels } = require('../endpoints');
|
||||
const { browserClient } = require('../../../app/');
|
||||
const { saveMessage, getConvoTitle, saveConvo, updateConvo, getConvo } = require('../../../models');
|
||||
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models');
|
||||
const { handleError, sendMessage, createOnProgress, handleText } = require('./handlers');
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
|
|
@ -33,11 +33,12 @@ router.post('/', async (req, res) => {
|
|||
|
||||
// build endpoint option
|
||||
const endpointOption = {
|
||||
model: req.body?.model ?? 'text-davinci-002-render-sha'
|
||||
model: req.body?.model ?? 'text-davinci-002-render-sha',
|
||||
token: req.body?.token ?? null
|
||||
};
|
||||
|
||||
const availableModels = getChatGPTBrowserModels();
|
||||
if (availableModels.find(model => model === endpointOption.model) === undefined)
|
||||
if (availableModels.find((model) => model === endpointOption.model) === undefined)
|
||||
return handleError(res, { text: 'Illegal request: model' });
|
||||
|
||||
console.log('ask log', {
|
||||
|
|
@ -91,10 +92,29 @@ const ask = async ({
|
|||
|
||||
if (preSendRequest) sendMessage(res, { message: userMessage, created: true });
|
||||
|
||||
let responseMessageId = crypto.randomUUID();
|
||||
|
||||
try {
|
||||
const progressCallback = createOnProgress();
|
||||
let lastSavedTimestamp = 0;
|
||||
const { onProgress: progressCallback, getPartialText } = createOnProgress({
|
||||
onProgress: ({ text }) => {
|
||||
const currentTimestamp = Date.now();
|
||||
if (currentTimestamp - lastSavedTimestamp > 500) {
|
||||
lastSavedTimestamp = currentTimestamp;
|
||||
saveMessage({
|
||||
messageId: responseMessageId,
|
||||
sender: endpointOption?.jailbreak ? 'Sydney' : 'BingAI',
|
||||
conversationId,
|
||||
parentMessageId: overrideParentMessageId || userMessageId,
|
||||
text: text,
|
||||
unfinished: true,
|
||||
cancelled: false,
|
||||
error: false
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
const abortController = new AbortController();
|
||||
res.on('close', () => abortController.abort());
|
||||
let response = await browserClient({
|
||||
text,
|
||||
parentMessageId: userParentMessageId,
|
||||
|
|
@ -115,13 +135,18 @@ const ask = async ({
|
|||
|
||||
let responseMessage = {
|
||||
conversationId: newConversationId,
|
||||
messageId: newResponseMessageId,
|
||||
messageId: responseMessageId,
|
||||
newMessageId: newResponseMessageId,
|
||||
parentMessageId: overrideParentMessageId || newUserMassageId,
|
||||
text: await handleText(response),
|
||||
sender: endpointOption?.chatGptLabel || 'ChatGPT'
|
||||
sender: endpointOption?.chatGptLabel || 'ChatGPT',
|
||||
unfinished: false,
|
||||
cancelled: false,
|
||||
error: false
|
||||
};
|
||||
|
||||
await saveMessage(responseMessage);
|
||||
responseMessage.messageId = newResponseMessageId;
|
||||
|
||||
// STEP2 update the conversation
|
||||
|
||||
|
|
@ -167,17 +192,19 @@ const ask = async ({
|
|||
if (userParentMessageId == '00000000-0000-0000-0000-000000000000') {
|
||||
// const title = await titleConvo({ endpoint: endpointOption?.endpoint, text, response: responseMessage });
|
||||
const title = await response.details.title;
|
||||
await updateConvo(req?.session?.user?.username, {
|
||||
await saveConvo(req?.session?.user?.username, {
|
||||
conversationId: conversationId,
|
||||
title
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = {
|
||||
messageId: crypto.randomUUID(),
|
||||
messageId: responseMessageId,
|
||||
sender: 'ChatGPT',
|
||||
conversationId,
|
||||
parentMessageId: overrideParentMessageId || userMessageId,
|
||||
unfinished: false,
|
||||
cancelled: false,
|
||||
error: true,
|
||||
text: error.message
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,11 +1,31 @@
|
|||
const express = require('express');
|
||||
const crypto = require('crypto');
|
||||
const router = express.Router();
|
||||
const addToCache = require('./addToCache');
|
||||
const { getOpenAIModels } = require('../endpoints');
|
||||
const { titleConvo, askClient } = require('../../../app/');
|
||||
const { saveMessage, getConvoTitle, saveConvo, updateConvo, getConvo } = require('../../../models');
|
||||
const { saveMessage, getConvoTitle, saveConvo, getConvo } = require('../../../models');
|
||||
const { handleError, sendMessage, createOnProgress, handleText } = require('./handlers');
|
||||
|
||||
const abortControllers = new Map();
|
||||
|
||||
router.post('/abort', async (req, res) => {
|
||||
const { abortKey } = req.body;
|
||||
console.log(`req.body`, req.body);
|
||||
if (!abortControllers.has(abortKey)) {
|
||||
return res.status(404).send('Request not found');
|
||||
}
|
||||
|
||||
const { abortController } = abortControllers.get(abortKey);
|
||||
|
||||
abortControllers.delete(abortKey);
|
||||
const ret = await abortController.abortAsk();
|
||||
console.log('Aborted request', abortKey);
|
||||
console.log('Aborted message:', ret);
|
||||
|
||||
res.send(JSON.stringify(ret));
|
||||
});
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
const {
|
||||
endpoint,
|
||||
|
|
@ -43,7 +63,7 @@ router.post('/', async (req, res) => {
|
|||
};
|
||||
|
||||
const availableModels = getOpenAIModels();
|
||||
if (availableModels.find(model => model === endpointOption.model) === undefined)
|
||||
if (availableModels.find((model) => model === endpointOption.model) === undefined)
|
||||
return handleError(res, { text: 'Illegal request: model' });
|
||||
|
||||
console.log('ask log', {
|
||||
|
|
@ -87,6 +107,8 @@ const ask = async ({
|
|||
}) => {
|
||||
let { text, parentMessageId: userParentMessageId, messageId: userMessageId } = userMessage;
|
||||
|
||||
let responseMessageId = crypto.randomUUID();
|
||||
|
||||
res.writeHead(200, {
|
||||
Connection: 'keep-alive',
|
||||
'Content-Type': 'text/event-stream',
|
||||
|
|
@ -98,9 +120,55 @@ const ask = async ({
|
|||
if (preSendRequest) sendMessage(res, { message: userMessage, created: true });
|
||||
|
||||
try {
|
||||
const progressCallback = createOnProgress();
|
||||
const abortController = new AbortController();
|
||||
res.on('close', () => abortController.abort());
|
||||
let lastSavedTimestamp = 0;
|
||||
const { onProgress: progressCallback, getPartialText } = createOnProgress({
|
||||
onProgress: ({ text }) => {
|
||||
const currentTimestamp = Date.now();
|
||||
if (currentTimestamp - lastSavedTimestamp > 500) {
|
||||
lastSavedTimestamp = currentTimestamp;
|
||||
saveMessage({
|
||||
messageId: responseMessageId,
|
||||
sender: endpointOption?.chatGptLabel || 'ChatGPT',
|
||||
conversationId,
|
||||
parentMessageId: overrideParentMessageId || userMessageId,
|
||||
text: text,
|
||||
unfinished: true,
|
||||
cancelled: false,
|
||||
error: false
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let abortController = new AbortController();
|
||||
abortController.abortAsk = async function () {
|
||||
this.abort();
|
||||
|
||||
const responseMessage = {
|
||||
messageId: responseMessageId,
|
||||
sender: endpointOption?.chatGptLabel || 'ChatGPT',
|
||||
conversationId,
|
||||
parentMessageId: overrideParentMessageId || userMessageId,
|
||||
text: getPartialText(),
|
||||
unfinished: false,
|
||||
cancelled: true,
|
||||
error: false
|
||||
};
|
||||
|
||||
saveMessage(responseMessage);
|
||||
await addToCache({ endpoint: 'openAI', endpointOption, userMessage, responseMessage });
|
||||
|
||||
return {
|
||||
title: await getConvoTitle(req?.session?.user?.username, conversationId),
|
||||
final: true,
|
||||
conversation: await getConvo(req?.session?.user?.username, conversationId),
|
||||
requestMessage: userMessage,
|
||||
responseMessage: responseMessage
|
||||
};
|
||||
};
|
||||
const abortKey = conversationId;
|
||||
abortControllers.set(abortKey, { abortController, ...endpointOption });
|
||||
|
||||
let response = await askClient({
|
||||
text,
|
||||
parentMessageId: userParentMessageId,
|
||||
|
|
@ -114,6 +182,7 @@ const ask = async ({
|
|||
abortController
|
||||
});
|
||||
|
||||
abortControllers.delete(abortKey);
|
||||
console.log('CLIENT RESPONSE', response);
|
||||
|
||||
const newConversationId = response.conversationId || conversationId;
|
||||
|
|
@ -125,13 +194,18 @@ const ask = async ({
|
|||
|
||||
let responseMessage = {
|
||||
conversationId: newConversationId,
|
||||
messageId: newResponseMessageId,
|
||||
messageId: responseMessageId,
|
||||
newMessageId: newResponseMessageId,
|
||||
parentMessageId: overrideParentMessageId || newUserMassageId,
|
||||
text: await handleText(response),
|
||||
sender: endpointOption?.chatGptLabel || 'ChatGPT'
|
||||
sender: endpointOption?.chatGptLabel || 'ChatGPT',
|
||||
unfinished: false,
|
||||
cancelled: false,
|
||||
error: false
|
||||
};
|
||||
|
||||
await saveMessage(responseMessage);
|
||||
responseMessage.messageId = newResponseMessageId;
|
||||
|
||||
// STEP2 update the conversation
|
||||
let conversationUpdate = { conversationId: newConversationId, endpoint: 'openAI' };
|
||||
|
|
@ -174,7 +248,7 @@ const ask = async ({
|
|||
|
||||
if (userParentMessageId == '00000000-0000-0000-0000-000000000000') {
|
||||
const title = await titleConvo({ endpoint: endpointOption?.endpoint, text, response: responseMessage });
|
||||
await updateConvo(req?.session?.user?.username, {
|
||||
await saveConvo(req?.session?.user?.username, {
|
||||
conversationId: conversationId,
|
||||
title
|
||||
});
|
||||
|
|
@ -182,10 +256,12 @@ const ask = async ({
|
|||
} catch (error) {
|
||||
console.error(error);
|
||||
const errorMessage = {
|
||||
messageId: crypto.randomUUID(),
|
||||
messageId: responseMessageId,
|
||||
sender: endpointOption?.chatGptLabel || 'ChatGPT',
|
||||
conversationId,
|
||||
parentMessageId: overrideParentMessageId || userMessageId,
|
||||
unfinished: false,
|
||||
cancelled: false,
|
||||
error: true,
|
||||
text: error.message
|
||||
};
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ const sendMessage = (res, message) => {
|
|||
res.write(`event: message\ndata: ${JSON.stringify(message)}\n\n`);
|
||||
};
|
||||
|
||||
const createOnProgress = () => {
|
||||
const createOnProgress = ({ onProgress: _onProgress }) => {
|
||||
let i = 0;
|
||||
let code = '';
|
||||
let tokens = '';
|
||||
|
|
@ -65,14 +65,21 @@ const createOnProgress = () => {
|
|||
}
|
||||
|
||||
sendMessage(res, { text: tokens + cursor, message: true, initial: i === 0, ...rest });
|
||||
|
||||
_onProgress && _onProgress({ text: tokens, message: true, initial: i === 0, ...rest });
|
||||
|
||||
i++;
|
||||
};
|
||||
|
||||
const onProgress = opts => {
|
||||
const onProgress = (opts) => {
|
||||
return _.partialRight(progressCallback, opts);
|
||||
};
|
||||
|
||||
return onProgress;
|
||||
const getPartialText = () => {
|
||||
return tokens;
|
||||
};
|
||||
|
||||
return { onProgress, getPartialText };
|
||||
};
|
||||
|
||||
const handleText = async (response, bing = false) => {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ const express = require('express');
|
|||
const router = express.Router();
|
||||
const { titleConvo } = require('../../app/');
|
||||
const { getConvo, saveConvo, getConvoTitle } = require('../../models');
|
||||
const { getConvosByPage, deleteConvos, updateConvo } = require('../../models/Conversation');
|
||||
const { getConvosByPage, deleteConvos } = require('../../models/Conversation');
|
||||
const { getMessages } = require('../../models/Message');
|
||||
|
||||
router.get('/', async (req, res) => {
|
||||
|
|
@ -44,7 +44,7 @@ router.post('/update', async (req, res) => {
|
|||
const update = req.body.arg;
|
||||
|
||||
try {
|
||||
const dbResponse = await updateConvo(req?.session?.user?.username, update);
|
||||
const dbResponse = await saveConvo(req?.session?.user?.username, update);
|
||||
res.status(201).send(dbResponse);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
|
|
|
|||
|
|
@ -18,8 +18,15 @@ const getChatGPTBrowserModels = () => {
|
|||
router.get('/', function (req, res) {
|
||||
const azureOpenAI = !!process.env.AZURE_OPENAI_KEY;
|
||||
const openAI = process.env.OPENAI_KEY ? { availableModels: getOpenAIModels() } : false;
|
||||
const bingAI = !!process.env.BINGAI_TOKEN;
|
||||
const chatGPTBrowser = process.env.CHATGPT_TOKEN ? { availableModels: getChatGPTBrowserModels() } : false;
|
||||
const bingAI = process.env.BINGAI_TOKEN
|
||||
? { userProvide: process.env.BINGAI_TOKEN == 'user_provide' }
|
||||
: false;
|
||||
const chatGPTBrowser = process.env.CHATGPT_TOKEN
|
||||
? {
|
||||
userProvide: process.env.CHATGPT_TOKEN == 'user_provide',
|
||||
availableModels: getChatGPTBrowserModels()
|
||||
}
|
||||
: false;
|
||||
|
||||
res.send(JSON.stringify({ azureOpenAI, openAI, bingAI, chatGPTBrowser }));
|
||||
});
|
||||
|
|
|
|||
|
|
@ -6,13 +6,20 @@ const registry = require('@dqbd/tiktoken/registry.json');
|
|||
const models = require('@dqbd/tiktoken/model_to_encoding.json');
|
||||
|
||||
router.post('/', async (req, res) => {
|
||||
const { arg } = req.body;
|
||||
// console.log(typeof req.body === 'object' ? { ...req.body, ...req.query } : req.query);
|
||||
const model = await load(registry[models['gpt-3.5-turbo']]);
|
||||
const encoder = new Tiktoken(model.bpe_ranks, model.special_tokens, model.pat_str);
|
||||
const tokens = encoder.encode(arg.text);
|
||||
encoder.free();
|
||||
res.send({ count: tokens.length });
|
||||
try {
|
||||
const { arg } = req.body;
|
||||
|
||||
// console.log('context:', arg, req.body);
|
||||
// console.log(typeof req.body === 'object' ? { ...req.body, ...req.query } : req.query);
|
||||
const model = await load(registry[models['gpt-3.5-turbo']]);
|
||||
const encoder = new Tiktoken(model.bpe_ranks, model.special_tokens, model.pat_str);
|
||||
const tokens = encoder.encode(arg.text);
|
||||
encoder.free();
|
||||
res.send({ count: tokens.length });
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
res.status(500).send(e.message);
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
|
|
|||
2
client/package-lock.json
generated
2
client/package-lock.json
generated
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "chatgpt-clone",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "chatgpt-clone",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
|
|||
const setPresets = useSetRecoilState(store.presets);
|
||||
|
||||
const availableEndpoints = useRecoilValue(store.availableEndpoints);
|
||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
|
||||
const setOption = param => newValue => {
|
||||
let update = {};
|
||||
|
|
@ -32,7 +32,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
|
|||
...prevState,
|
||||
...update
|
||||
},
|
||||
endpointsFilter
|
||||
endpointsConfig
|
||||
})
|
||||
);
|
||||
};
|
||||
|
|
@ -44,7 +44,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
|
|||
axios({
|
||||
method: 'post',
|
||||
url: '/api/presets',
|
||||
data: cleanupPreset({ preset, endpointsFilter }),
|
||||
data: cleanupPreset({ preset, endpointsConfig }),
|
||||
withCredentials: true
|
||||
}).then(res => {
|
||||
setPresets(res?.data);
|
||||
|
|
@ -54,7 +54,7 @@ const EditPresetDialog = ({ open, onOpenChange, preset: _preset, title }) => {
|
|||
const exportPreset = () => {
|
||||
const fileName = filenamify(preset?.title || 'preset');
|
||||
exportFromJSON({
|
||||
data: cleanupPreset({ preset, endpointsFilter }),
|
||||
data: cleanupPreset({ preset, endpointsConfig }),
|
||||
fileName,
|
||||
exportType: exportFromJSON.types.json
|
||||
});
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) =
|
|||
const [preset, setPreset] = useState(_preset);
|
||||
|
||||
const [saveAsDialogShow, setSaveAsDialogShow] = useState(false);
|
||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
|
||||
const setOption = param => newValue => {
|
||||
let update = {};
|
||||
|
|
@ -33,7 +33,7 @@ const EndpointOptionsDialog = ({ open, onOpenChange, preset: _preset, title }) =
|
|||
|
||||
const exportPreset = () => {
|
||||
exportFromJSON({
|
||||
data: cleanupPreset({ preset, endpointsFilter }),
|
||||
data: cleanupPreset({ preset, endpointsConfig }),
|
||||
fileName: `${preset?.title}.json`,
|
||||
exportType: exportFromJSON.types.json
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import DialogTemplate from '../ui/DialogTemplate';
|
||||
import { Dialog } from '../ui/Dialog.tsx';
|
||||
|
|
@ -11,7 +11,7 @@ import store from '~/store';
|
|||
|
||||
const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => {
|
||||
const [title, setTitle] = useState(preset?.title || 'My Preset');
|
||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
const createPresetMutation = useCreatePresetMutation();
|
||||
|
||||
const defaultTextProps =
|
||||
|
|
@ -23,7 +23,7 @@ const SaveAsPresetDialog = ({ open, onOpenChange, preset }) => {
|
|||
...preset,
|
||||
title
|
||||
},
|
||||
endpointsFilter
|
||||
endpointsConfig
|
||||
});
|
||||
createPresetMutation.mutate(_preset);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,8 +1,16 @@
|
|||
import React from 'react';
|
||||
import React, { useState } from 'react';
|
||||
import { DropdownMenuRadioItem } from '../../ui/DropdownMenu.tsx';
|
||||
import { Settings } from 'lucide-react';
|
||||
import getIcon from '~/utils/getIcon';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import SetTokenDialog from '../SetTokenDialog';
|
||||
|
||||
import store from '../../../store';
|
||||
|
||||
export default function ModelItem({ endpoint, value, onSelect }) {
|
||||
const [setTokenDialogOpen, setSetTokenDialogOpen] = useState(false);
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
|
||||
const icon = getIcon({
|
||||
size: 20,
|
||||
endpoint,
|
||||
|
|
@ -10,15 +18,37 @@ export default function ModelItem({ endpoint, value, onSelect }) {
|
|||
className: 'mr-2'
|
||||
});
|
||||
|
||||
const isuserProvide = endpointsConfig?.[endpoint]?.userProvide;
|
||||
|
||||
// regular model
|
||||
return (
|
||||
<DropdownMenuRadioItem
|
||||
value={value}
|
||||
className="dark:font-semibold dark:text-gray-100 dark:hover:bg-gray-800"
|
||||
>
|
||||
{icon}
|
||||
{endpoint}
|
||||
{!!['azureOpenAI', 'openAI'].find(e => e === endpoint) && <sup>$</sup>}
|
||||
</DropdownMenuRadioItem>
|
||||
<>
|
||||
<DropdownMenuRadioItem
|
||||
value={value}
|
||||
className="group dark:font-semibold dark:text-gray-100 dark:hover:bg-gray-800"
|
||||
>
|
||||
{icon}
|
||||
{endpoint}
|
||||
{!!['azureOpenAI', 'openAI'].find(e => e === endpoint) && <sup>$</sup>}
|
||||
<div className="flex w-4 flex-1" />
|
||||
{isuserProvide ? (
|
||||
<button
|
||||
className="invisible m-0 mr-1 flex-initial rounded-md p-0 text-xs font-medium text-gray-400 hover:text-gray-700 group-hover:visible dark:font-normal dark:text-gray-400 dark:hover:text-gray-200"
|
||||
onClick={e => {
|
||||
e.preventDefault();
|
||||
setSetTokenDialogOpen(true);
|
||||
}}
|
||||
>
|
||||
<Settings className="mr-1 inline-block w-[16px] items-center stroke-1" />
|
||||
Config Token
|
||||
</button>
|
||||
) : null}
|
||||
</DropdownMenuRadioItem>
|
||||
<SetTokenDialog
|
||||
open={setTokenDialogOpen}
|
||||
onOpenChange={setSetTokenDialogOpen}
|
||||
endpoint={endpoint}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import store from '~/store';
|
|||
|
||||
const FileUpload = ({ onFileSelected }) => {
|
||||
// const setPresets = useSetRecoilState(store.presets);
|
||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
|
||||
const handleFileChange = event => {
|
||||
const file = event.target.files[0];
|
||||
|
|
@ -16,7 +16,7 @@ const FileUpload = ({ onFileSelected }) => {
|
|||
const reader = new FileReader();
|
||||
reader.onload = e => {
|
||||
const jsonData = JSON.parse(e.target.result);
|
||||
onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsFilter }), presetId: null });
|
||||
onFileSelected({ ...cleanupPreset({ preset: jsonData, endpointsConfig }), presetId: null });
|
||||
};
|
||||
reader.readAsText(file);
|
||||
};
|
||||
|
|
@ -24,10 +24,10 @@ const FileUpload = ({ onFileSelected }) => {
|
|||
return (
|
||||
<label
|
||||
htmlFor="file-upload"
|
||||
className=" mr-1 flex h-auto cursor-pointer items-center rounded bg-transparent px-2 py-1 text-xs font-medium font-normal text-gray-600 hover:bg-slate-200 hover:text-green-700 dark:bg-transparent dark:text-gray-300 dark:hover:bg-gray-800 dark:hover:text-green-500"
|
||||
className=" mr-1 flex h-auto cursor-pointer items-center rounded bg-transparent px-2 py-1 text-xs font-medium font-normal text-gray-600 transition-colors hover:bg-slate-200 hover:text-green-700 dark:bg-transparent dark:text-gray-300 dark:hover:bg-gray-800 dark:hover:text-green-500"
|
||||
>
|
||||
<FileUp className="flex w-[22px] items-center stroke-1" />
|
||||
<span className="ml-1 flex text-xs ">Import</span>
|
||||
<FileUp className="mr-1 flex w-[22px] items-center stroke-1" />
|
||||
<span className="flex text-xs ">Import</span>
|
||||
<input
|
||||
id="file-upload"
|
||||
value=""
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
import { useState, useEffect } from 'react';
|
||||
import React, { useState, useEffect } from 'react';
|
||||
import { useRecoilValue, useRecoilState } from 'recoil';
|
||||
import EditPresetDialog from '../../Endpoints/EditPresetDialog';
|
||||
import EndpointItems from './EndpointItems';
|
||||
import PresetItems from './PresetItems';
|
||||
import { Trash2 } from 'lucide-react';
|
||||
import FileUpload from './FileUpload';
|
||||
import getIcon from '~/utils/getIcon';
|
||||
import { useDeletePresetMutation, useCreatePresetMutation } from '~/data-provider';
|
||||
|
|
@ -36,14 +37,17 @@ export default function NewConversationMenu() {
|
|||
const createPresetMutation = useCreatePresetMutation();
|
||||
|
||||
const importPreset = jsonData => {
|
||||
createPresetMutation.mutate({...jsonData}, {
|
||||
onSuccess: (data) => {
|
||||
setPresets(data);
|
||||
},
|
||||
onError: (error) => {
|
||||
console.error('Error uploading the preset:', error);
|
||||
createPresetMutation.mutate(
|
||||
{ ...jsonData },
|
||||
{
|
||||
onSuccess: data => {
|
||||
setPresets(data);
|
||||
},
|
||||
onError: error => {
|
||||
console.error('Error uploading the preset:', error);
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
// update the default model when availableModels changes
|
||||
|
|
@ -57,7 +61,11 @@ export default function NewConversationMenu() {
|
|||
|
||||
// save selected model to localstoreage
|
||||
useEffect(() => {
|
||||
if (endpoint) localStorage.setItem('lastConversationSetup', JSON.stringify(conversation));
|
||||
if (endpoint) {
|
||||
const lastSelectedModel = JSON.parse(localStorage.getItem('lastSelectedModel')) || {};
|
||||
localStorage.setItem('lastConversationSetup', JSON.stringify(conversation));
|
||||
localStorage.setItem('lastSelectedModel', JSON.stringify({ ...lastSelectedModel, [endpoint] : conversation.model }));
|
||||
}
|
||||
}, [conversation]);
|
||||
|
||||
// set the current model
|
||||
|
|
@ -85,11 +93,11 @@ export default function NewConversationMenu() {
|
|||
};
|
||||
|
||||
const clearAllPresets = () => {
|
||||
deletePresetsMutation.mutate({arg: {}});
|
||||
deletePresetsMutation.mutate({ arg: {} });
|
||||
};
|
||||
|
||||
const onDeletePreset = preset => {
|
||||
deletePresetsMutation.mutate({arg: preset});
|
||||
deletePresetsMutation.mutate({ arg: preset });
|
||||
};
|
||||
|
||||
const icon = getIcon({
|
||||
|
|
@ -109,7 +117,7 @@ export default function NewConversationMenu() {
|
|||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant="outline"
|
||||
className={`group relative mt-[-8px] mb-[-12px] ml-0 items-center rounded-md border-0 p-1 outline-none focus:ring-0 focus:ring-offset-0 dark:data-[state=open]:bg-opacity-50 md:left-1 md:ml-[-12px] md:pl-1`}
|
||||
className={`group relative mb-[-12px] ml-0 mt-[-8px] items-center rounded-md border-0 p-1 outline-none focus:ring-0 focus:ring-offset-0 dark:data-[state=open]:bg-opacity-50 md:left-1 md:ml-[-12px] md:pl-1`}
|
||||
>
|
||||
{icon}
|
||||
<span className="max-w-0 overflow-hidden whitespace-nowrap px-0 text-slate-600 transition-all group-hover:max-w-[80px] group-hover:px-2 group-data-[state=open]:max-w-[80px] group-data-[state=open]:px-2 dark:text-slate-300">
|
||||
|
|
@ -146,12 +154,18 @@ export default function NewConversationMenu() {
|
|||
<FileUpload onFileSelected={importPreset} />
|
||||
<Dialog>
|
||||
<DialogTrigger asChild>
|
||||
<Button
|
||||
<label
|
||||
htmlFor="file-upload"
|
||||
className=" mr-1 flex h-[32px] h-auto cursor-pointer items-center rounded bg-transparent px-2 py-1 text-xs font-medium font-normal text-gray-600 transition-colors hover:bg-slate-200 hover:text-red-700 dark:bg-transparent dark:text-gray-300 dark:hover:bg-gray-800 dark:hover:text-green-500"
|
||||
>
|
||||
{/* <Button
|
||||
type="button"
|
||||
className="h-auto bg-transparent px-2 py-1 text-xs font-medium font-normal text-red-700 hover:bg-slate-200 hover:text-red-700 dark:bg-transparent dark:text-red-400 dark:hover:bg-gray-800 dark:hover:text-red-400"
|
||||
>
|
||||
> */}
|
||||
<Trash2 className="mr-1 flex w-[22px] items-center stroke-1" />
|
||||
Clear All
|
||||
</Button>
|
||||
{/* </Button> */}
|
||||
</label>
|
||||
</DialogTrigger>
|
||||
<DialogTemplate
|
||||
title="Clear presets"
|
||||
|
|
|
|||
102
client/src/components/Input/SetTokenDialog/index.jsx
Normal file
102
client/src/components/Input/SetTokenDialog/index.jsx
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
import React, { useEffect, useState } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import DialogTemplate from '../../ui/DialogTemplate';
|
||||
import { Dialog } from '../../ui/Dialog.tsx';
|
||||
import { Input } from '../../ui/Input.tsx';
|
||||
import { Label } from '../../ui/Label.tsx';
|
||||
import { cn } from '~/utils/';
|
||||
import cleanupPreset from '~/utils/cleanupPreset';
|
||||
import { useCreatePresetMutation } from '~/data-provider';
|
||||
import store from '~/store';
|
||||
|
||||
const SetTokenDialog = ({ open, onOpenChange, endpoint }) => {
|
||||
const [token, setToken] = useState('');
|
||||
const { getToken, saveToken } = store.useToken(endpoint);
|
||||
|
||||
const defaultTextProps =
|
||||
'rounded-md border border-gray-300 bg-transparent text-sm shadow-[0_0_10px_rgba(0,0,0,0.10)] outline-none placeholder:text-gray-400 focus:outline-none focus:ring-gray-400 focus:ring-opacity-20 focus:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 dark:border-gray-400 dark:bg-gray-700 dark:text-gray-50 dark:shadow-[0_0_15px_rgba(0,0,0,0.10)] dark:focus:border-gray-400 dark:focus:outline-none dark:focus:ring-0 dark:focus:ring-gray-400 dark:focus:ring-offset-0';
|
||||
|
||||
const submit = () => {
|
||||
saveToken(token);
|
||||
onOpenChange(false);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setToken(getToken() ?? '');
|
||||
}, [open]);
|
||||
|
||||
const helpText = {
|
||||
bingAI: (
|
||||
<small className="break-all text-gray-600">
|
||||
The Bing Access Token is the "_U" cookie from bing.com. Use dev tools or an extension while logged
|
||||
into the site to view it.
|
||||
</small>
|
||||
),
|
||||
chatGPTBrowser: (
|
||||
<small className="break-all text-gray-600">
|
||||
To get your Access token For ChatGPT 'Free Version', login to{' '}
|
||||
<a
|
||||
target="_blank"
|
||||
href="https://chat.openai.com"
|
||||
rel="noreferrer"
|
||||
className="text-blue-600 underline"
|
||||
>
|
||||
https://chat.openai.com
|
||||
</a>
|
||||
, then visit{' '}
|
||||
<a
|
||||
target="_blank"
|
||||
href="https://chat.openai.com/api/auth/session"
|
||||
rel="noreferrer"
|
||||
className="text-blue-600 underline"
|
||||
>
|
||||
https://chat.openai.com/api/auth/session
|
||||
</a>
|
||||
. Copy access token.
|
||||
</small>
|
||||
)
|
||||
};
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
open={open}
|
||||
onOpenChange={onOpenChange}
|
||||
>
|
||||
<DialogTemplate
|
||||
title={`Set Token of ${endpoint}`}
|
||||
main={
|
||||
<div className="grid w-full items-center gap-2">
|
||||
<Label
|
||||
htmlFor="chatGptLabel"
|
||||
className="text-left text-sm font-medium"
|
||||
>
|
||||
Token Name
|
||||
<br />
|
||||
</Label>
|
||||
<Input
|
||||
id="chatGptLabel"
|
||||
value={token || ''}
|
||||
onChange={e => setToken(e.target.value || '')}
|
||||
placeholder="Set the token."
|
||||
className={cn(
|
||||
defaultTextProps,
|
||||
'flex h-10 max-h-10 w-full resize-none px-3 py-2 focus:outline-none focus:ring-0 focus:ring-opacity-0 focus:ring-offset-0'
|
||||
)}
|
||||
/>
|
||||
<small className="text-red-600">
|
||||
Your token will be sent to the server, but not saved.
|
||||
</small>
|
||||
{helpText?.[endpoint]}
|
||||
</div>
|
||||
}
|
||||
selection={{
|
||||
selectHandler: submit,
|
||||
selectClasses: 'bg-green-600 hover:bg-green-700 dark:hover:bg-green-800 text-white',
|
||||
selectText: 'Submit'
|
||||
}}
|
||||
/>
|
||||
</Dialog>
|
||||
);
|
||||
};
|
||||
|
||||
export default SetTokenDialog;
|
||||
|
|
@ -1,12 +1,31 @@
|
|||
import React from 'react';
|
||||
import React, { useState } from 'react';
|
||||
import StopGeneratingIcon from '../svg/StopGeneratingIcon';
|
||||
import { Settings } from 'lucide-react';
|
||||
import SetTokenDialog from './SetTokenDialog';
|
||||
import store from '../../store';
|
||||
|
||||
export default function SubmitButton({
|
||||
endpoint,
|
||||
submitMessage,
|
||||
handleStopGenerating,
|
||||
disabled,
|
||||
isSubmitting,
|
||||
endpointsConfig
|
||||
}) {
|
||||
const [setTokenDialogOpen, setSetTokenDialogOpen] = useState(false);
|
||||
const { getToken } = store.useToken(endpoint);
|
||||
|
||||
const isTokenProvided = endpointsConfig?.[endpoint]?.userProvide ? !!getToken() : true;
|
||||
|
||||
export default function SubmitButton({ submitMessage, handleStopGenerating, disabled, isSubmitting }) {
|
||||
const clickHandler = e => {
|
||||
e.preventDefault();
|
||||
submitMessage();
|
||||
};
|
||||
|
||||
const setToken = () => {
|
||||
setSetTokenDialogOpen(true);
|
||||
};
|
||||
|
||||
if (isSubmitting)
|
||||
return (
|
||||
<button
|
||||
|
|
@ -42,7 +61,27 @@ export default function SubmitButton({ submitMessage, handleStopGenerating, disa
|
|||
// </div>
|
||||
// </button>
|
||||
// );
|
||||
else
|
||||
else if (!isTokenProvided) {
|
||||
return (
|
||||
<>
|
||||
<button
|
||||
onClick={setToken}
|
||||
type="button"
|
||||
className="group absolute bottom-0 right-0 flex h-[100%] w-auto items-center justify-center bg-transparent p-1 text-gray-500"
|
||||
>
|
||||
<div className="m-1 mr-0 rounded-md p-2 pt-[10px] pb-[10px] align-middle text-xs group-hover:bg-gray-100 group-disabled:hover:bg-transparent dark:group-hover:bg-gray-900 dark:group-hover:text-gray-400 dark:group-disabled:hover:bg-transparent">
|
||||
<Settings className="mr-1 inline-block w-[18px]" />
|
||||
Set Token First
|
||||
</div>
|
||||
</button>
|
||||
<SetTokenDialog
|
||||
open={setTokenDialogOpen}
|
||||
onOpenChange={setSetTokenDialogOpen}
|
||||
endpoint={endpoint}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
} else
|
||||
return (
|
||||
<button
|
||||
onClick={clickHandler}
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ export default function TextChat({ isSearchView = false }) {
|
|||
const [text, setText] = useRecoilState(store.text);
|
||||
// const [text, setText] = useState('');
|
||||
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
const isSubmitting = useRecoilValue(store.isSubmitting);
|
||||
|
||||
// TODO: do we need this?
|
||||
|
|
@ -32,7 +33,7 @@ export default function TextChat({ isSearchView = false }) {
|
|||
// const bingStylesRef = useRef(null);
|
||||
const [showBingToneSetting, setShowBingToneSetting] = useState(false);
|
||||
|
||||
const isNotAppendable = latestMessage?.cancelled || latestMessage?.error;
|
||||
const isNotAppendable = (latestMessage?.unfinished & !isSubmitting) || latestMessage?.error;
|
||||
|
||||
// auto focus to input, when enter a conversation.
|
||||
useEffect(() => {
|
||||
|
|
@ -62,18 +63,22 @@ export default function TextChat({ isSearchView = false }) {
|
|||
setText('');
|
||||
};
|
||||
|
||||
const handleStopGenerating = (e) => {
|
||||
const handleStopGenerating = e => {
|
||||
e.preventDefault();
|
||||
stopGenerating();
|
||||
};
|
||||
|
||||
const handleKeyDown = e => {
|
||||
if (e.key === 'Enter' && isSubmitting) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
}
|
||||
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
if (!isComposing?.current) submitMessage();
|
||||
if (e.key === 'Enter' && !e.shiftKey && !isComposing?.current) {
|
||||
submitMessage();
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -169,6 +174,8 @@ export default function TextChat({ isSearchView = false }) {
|
|||
handleStopGenerating={handleStopGenerating}
|
||||
disabled={disabled || isNotAppendable}
|
||||
isSubmitting={isSubmitting}
|
||||
endpointsConfig={endpointsConfig}
|
||||
endpoint={conversation?.endpoint}
|
||||
/>
|
||||
{latestMessage && conversation?.jailbreak && conversation.endpoint === 'bingAI' ? (
|
||||
<AdjustToneButton onClick={handleBingToneSetting} />
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import { useEffect } from 'react';
|
||||
import { useRecoilValue, useResetRecoilState, useSetRecoilState } from 'recoil';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useRecoilValue, useRecoilState, useResetRecoilState, useSetRecoilState } from 'recoil';
|
||||
import { SSE } from '~/data-provider/sse.mjs';
|
||||
import createPayload from '~/data-provider/createPayload';
|
||||
|
||||
import { useAbortRequestWithMessage } from '~/data-provider';
|
||||
import store from '~/store';
|
||||
|
||||
export default function MessageHandler() {
|
||||
|
|
@ -17,7 +17,7 @@ export default function MessageHandler() {
|
|||
const messageHandler = (data, submission) => {
|
||||
const { messages, message, initialResponse, isRegenerate = false } = submission;
|
||||
|
||||
if (isRegenerate)
|
||||
if (isRegenerate) {
|
||||
setMessages([
|
||||
...messages,
|
||||
{
|
||||
|
|
@ -25,10 +25,11 @@ export default function MessageHandler() {
|
|||
text: data,
|
||||
parentMessageId: message?.overrideParentMessageId,
|
||||
messageId: message?.overrideParentMessageId + '_',
|
||||
submitting: true
|
||||
submitting: true,
|
||||
// unfinished: true
|
||||
}
|
||||
]);
|
||||
else
|
||||
} else {
|
||||
setMessages([
|
||||
...messages,
|
||||
message,
|
||||
|
|
@ -37,37 +38,42 @@ export default function MessageHandler() {
|
|||
text: data,
|
||||
parentMessageId: message?.messageId,
|
||||
messageId: message?.messageId + '_',
|
||||
submitting: true
|
||||
submitting: true,
|
||||
// unfinished: true
|
||||
}
|
||||
]);
|
||||
}
|
||||
};
|
||||
|
||||
const cancelHandler = (data, submission) => {
|
||||
const { messages, message, initialResponse, isRegenerate = false } = submission;
|
||||
const { messages, isRegenerate = false } = submission;
|
||||
|
||||
if (isRegenerate)
|
||||
setMessages([
|
||||
...messages,
|
||||
{
|
||||
...initialResponse,
|
||||
text: data,
|
||||
parentMessageId: message?.overrideParentMessageId,
|
||||
messageId: message?.overrideParentMessageId + '_',
|
||||
cancelled: true
|
||||
}
|
||||
]);
|
||||
else
|
||||
setMessages([
|
||||
...messages,
|
||||
message,
|
||||
{
|
||||
...initialResponse,
|
||||
text: data,
|
||||
parentMessageId: message?.messageId,
|
||||
messageId: message?.messageId + '_',
|
||||
cancelled: true
|
||||
}
|
||||
]);
|
||||
const { requestMessage, responseMessage, conversation } = data;
|
||||
|
||||
// update the messages
|
||||
if (isRegenerate) {
|
||||
setMessages([...messages, responseMessage]);
|
||||
} else {
|
||||
setMessages([...messages, requestMessage, responseMessage]);
|
||||
}
|
||||
setIsSubmitting(false);
|
||||
|
||||
// refresh title
|
||||
if (requestMessage.parentMessageId == '00000000-0000-0000-0000-000000000000') {
|
||||
setTimeout(() => {
|
||||
refreshConversations();
|
||||
}, 2000);
|
||||
|
||||
// in case it takes too long.
|
||||
setTimeout(() => {
|
||||
refreshConversations();
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
setConversation(prevState => ({
|
||||
...prevState,
|
||||
...conversation
|
||||
}));
|
||||
};
|
||||
|
||||
const createdHandler = (data, submission) => {
|
||||
|
|
@ -145,6 +151,32 @@ export default function MessageHandler() {
|
|||
return;
|
||||
};
|
||||
|
||||
const abortConversation = conversationId => {
|
||||
console.log(submission);
|
||||
const { endpoint } = submission?.conversation || {};
|
||||
|
||||
fetch(`/api/ask/${endpoint}/abort`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
abortKey: conversationId
|
||||
})
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
console.log('aborted', data);
|
||||
cancelHandler(data, submission);
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error aborting request');
|
||||
console.error(error);
|
||||
// errorHandler({ text: 'Error aborting request' }, { ...submission, message });
|
||||
});
|
||||
return;
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (submission === null) return;
|
||||
if (Object.keys(submission).length === 0) return;
|
||||
|
|
@ -158,7 +190,6 @@ export default function MessageHandler() {
|
|||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
|
||||
let latestResponseText = '';
|
||||
events.onmessage = e => {
|
||||
const data = JSON.parse(e.data);
|
||||
|
||||
|
|
@ -178,16 +209,14 @@ export default function MessageHandler() {
|
|||
if (data.initial) console.log(data);
|
||||
|
||||
if (data.message) {
|
||||
latestResponseText = text;
|
||||
messageHandler(text, { ...submission, message });
|
||||
}
|
||||
// console.log('dataStream', data);
|
||||
}
|
||||
};
|
||||
|
||||
events.onopen = () => console.log('connection is opened');
|
||||
|
||||
events.oncancel = () => cancelHandler(latestResponseText, { ...submission, message });
|
||||
events.oncancel = () => abortConversation(message?.conversationId || submission?.conversationId);
|
||||
|
||||
events.onerror = function (e) {
|
||||
console.log('error in opening conn.');
|
||||
|
|
@ -204,6 +233,7 @@ export default function MessageHandler() {
|
|||
return () => {
|
||||
const isCancelled = events.readyState <= 1;
|
||||
events.close();
|
||||
// setSource(null);
|
||||
if (isCancelled) {
|
||||
const e = new Event('cancel');
|
||||
events.dispatchEvent(e);
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { useState, useEffect, useRef } from 'react';
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
import { useRecoilValue, useSetRecoilState } from 'recoil';
|
||||
import copy from 'copy-to-clipboard';
|
||||
import SubRow from './Content/SubRow';
|
||||
|
|
@ -22,7 +22,7 @@ export default function Message({
|
|||
siblingCount,
|
||||
setSiblingIdx
|
||||
}) {
|
||||
const { text, searchResult, isCreatedByUser, error, submitting } = message;
|
||||
const { text, searchResult, isCreatedByUser, error, submitting, unfinished, cancelled } = message;
|
||||
const isSubmitting = useRecoilValue(store.isSubmitting);
|
||||
const setLatestMessage = useSetRecoilState(store.latestMessage);
|
||||
const [abortScroll, setAbort] = useState(false);
|
||||
|
|
@ -34,6 +34,12 @@ export default function Message({
|
|||
const blinker = submitting && isSubmitting;
|
||||
const getConversationQuery = useGetConversationByIdQuery(message.conversationId, { enabled: false });
|
||||
|
||||
// debugging
|
||||
// useEffect(() => {
|
||||
// console.log('isSubmitting:', isSubmitting);
|
||||
// console.log('unfinished:', unfinished);
|
||||
// }, [isSubmitting, unfinished]);
|
||||
|
||||
useEffect(() => {
|
||||
if (blinker && !abortScroll) {
|
||||
scrollToBottom();
|
||||
|
|
@ -98,7 +104,7 @@ export default function Message({
|
|||
|
||||
const clickSearchResult = async () => {
|
||||
if (!searchResult) return;
|
||||
getConversationQuery.refetch(message.conversationId).then((response) => {
|
||||
getConversationQuery.refetch(message.conversationId).then(response => {
|
||||
switchToConversation(response.data);
|
||||
});
|
||||
};
|
||||
|
|
@ -170,23 +176,39 @@ export default function Message({
|
|||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div
|
||||
className={cn(
|
||||
'flex min-h-[20px] flex-grow flex-col items-start gap-4 ',
|
||||
isCreatedByUser ? 'whitespace-pre-wrap' : ''
|
||||
)}
|
||||
>
|
||||
{/* <div className={`${blinker ? 'result-streaming' : ''} markdown prose dark:prose-invert light w-full break-words`}> */}
|
||||
<div className="markdown prose dark:prose-invert light w-full break-words">
|
||||
{!isCreatedByUser ? (
|
||||
<>
|
||||
<Content content={text} />
|
||||
</>
|
||||
) : (
|
||||
<>{text}</>
|
||||
<>
|
||||
<div
|
||||
className={cn(
|
||||
'flex min-h-[20px] flex-grow flex-col items-start gap-4 ',
|
||||
isCreatedByUser ? 'whitespace-pre-wrap' : ''
|
||||
)}
|
||||
>
|
||||
{/* <div className={`${blinker ? 'result-streaming' : ''} markdown prose dark:prose-invert light w-full break-words`}> */}
|
||||
<div className="markdown prose dark:prose-invert light w-full break-words">
|
||||
{!isCreatedByUser ? (
|
||||
<>
|
||||
<Content content={text} />
|
||||
</>
|
||||
) : (
|
||||
<>{text}</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/* {!isSubmitting && cancelled ? (
|
||||
<div className="flex flex min-h-[20px] flex-grow flex-col items-start gap-2 gap-4 text-red-500">
|
||||
<div className="rounded-md border border-blue-400 bg-blue-500/10 px-3 py-2 text-sm text-gray-600 dark:text-gray-100">
|
||||
{`This is a cancelled message.`}
|
||||
</div>
|
||||
</div>
|
||||
) : null} */}
|
||||
{!isSubmitting && unfinished ? (
|
||||
<div className="flex flex min-h-[20px] flex-grow flex-col items-start gap-2 gap-4 text-red-500">
|
||||
<div className="rounded-md border border-blue-400 bg-blue-500/10 px-3 py-2 text-sm text-gray-600 dark:text-gray-100">
|
||||
{`This is an unfinished message. The AI may still be generating a response or it was aborted. Refresh or visit later to see more updates.`}
|
||||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<HoverButtons
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ export default function ExportModel({ open, onOpenChange }) {
|
|||
|
||||
const conversation = useRecoilValue(store.conversation) || {};
|
||||
const messagesTree = useRecoilValue(store.messagesTree) || [];
|
||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
|
||||
const getSiblingIdx = useRecoilCallback(
|
||||
({ snapshot }) =>
|
||||
|
|
@ -37,16 +37,16 @@ export default function ExportModel({ open, onOpenChange }) {
|
|||
);
|
||||
|
||||
const typeOptions = [
|
||||
{ value: 'screenshot', display: 'screenshot (.png)' },
|
||||
{ value: 'text', display: 'text (.txt)' },
|
||||
{ value: 'markdown', display: 'markdown (.md)' },
|
||||
{ value: 'csv', display: 'csv (.csv)' },
|
||||
{ value: 'json', display: 'json (.json)' },
|
||||
{ value: 'screenshot', display: 'screenshot (.png)' }
|
||||
{ value: 'csv', display: 'csv (.csv)' }
|
||||
]; //,, 'webpage'];
|
||||
|
||||
useEffect(() => {
|
||||
setFileName(filenamify(String(conversation?.title || 'file')));
|
||||
setType('text');
|
||||
setType('screenshot');
|
||||
setIncludeOptions(true);
|
||||
setExportBranches(false);
|
||||
setRecursive(true);
|
||||
|
|
@ -144,6 +144,8 @@ export default function ExportModel({ open, onOpenChange }) {
|
|||
fieldValues: entries.find(e => e.fieldName == 'isCreatedByUser').fieldValues
|
||||
},
|
||||
{ fieldName: 'error', fieldValues: entries.find(e => e.fieldName == 'error').fieldValues },
|
||||
{ fieldName: 'unfinished', fieldValues: entries.find(e => e.fieldName == 'unfinished').fieldValues },
|
||||
{ fieldName: 'cancelled', fieldValues: entries.find(e => e.fieldName == 'cancelled').fieldValues },
|
||||
{ fieldName: 'messageId', fieldValues: entries.find(e => e.fieldName == 'messageId').fieldValues },
|
||||
{
|
||||
fieldName: 'parentMessageId',
|
||||
|
|
@ -164,7 +166,7 @@ export default function ExportModel({ open, onOpenChange }) {
|
|||
|
||||
if (includeOptions) {
|
||||
data += `\n## Options\n`;
|
||||
const options = cleanupPreset({ preset: conversation, endpointsFilter });
|
||||
const options = cleanupPreset({ preset: conversation, endpointsConfig });
|
||||
|
||||
for (const key of Object.keys(options)) {
|
||||
data += `- ${key}: ${options[key]}\n`;
|
||||
|
|
@ -181,7 +183,11 @@ export default function ExportModel({ open, onOpenChange }) {
|
|||
|
||||
data += `\n## History\n`;
|
||||
for (const message of messages) {
|
||||
data += `**${message?.sender}:**\n${message?.text}\n\n`;
|
||||
data += `**${message?.sender}:**\n${message?.text}\n`;
|
||||
if (message.error) data += `*(This is an error message)*\n`;
|
||||
if (message.unfinished) data += `*(This is an unfinished message)*\n`;
|
||||
if (message.cancelled) data += `*(This is a cancelled message)*\n`;
|
||||
data += '\n\n';
|
||||
}
|
||||
|
||||
exportFromJSON({
|
||||
|
|
@ -203,7 +209,7 @@ export default function ExportModel({ open, onOpenChange }) {
|
|||
|
||||
if (includeOptions) {
|
||||
data += `\nOptions\n########################\n`;
|
||||
const options = cleanupPreset({ preset: conversation, endpointsFilter });
|
||||
const options = cleanupPreset({ preset: conversation, endpointsConfig });
|
||||
|
||||
for (const key of Object.keys(options)) {
|
||||
data += `${key}: ${options[key]}\n`;
|
||||
|
|
@ -220,7 +226,11 @@ export default function ExportModel({ open, onOpenChange }) {
|
|||
|
||||
data += `\nHistory\n########################\n`;
|
||||
for (const message of messages) {
|
||||
data += `${message?.sender}:\n${message?.text}\n\n`;
|
||||
data += `>> ${message?.sender}:\n${message?.text}\n`;
|
||||
if (message.error) data += `(This is an error message)\n`;
|
||||
if (message.unfinished) data += `(This is an unfinished message)\n`;
|
||||
if (message.cancelled) data += `(This is a cancelled message)\n`;
|
||||
data += '\n\n';
|
||||
}
|
||||
|
||||
exportFromJSON({
|
||||
|
|
@ -241,7 +251,7 @@ export default function ExportModel({ open, onOpenChange }) {
|
|||
recursive: recursive
|
||||
};
|
||||
|
||||
if (includeOptions) data.options = cleanupPreset({ preset: conversation, endpointsFilter });
|
||||
if (includeOptions) data.options = cleanupPreset({ preset: conversation, endpointsConfig });
|
||||
|
||||
const messages = await buildMessageTree({
|
||||
messageId: conversation?.conversationId,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,10 @@ export const messages = (id: string) => {
|
|||
return `/api/messages/${id}`;
|
||||
};
|
||||
|
||||
export const abortRequest = (endpoint: string) => {
|
||||
return `/api/ask/${endpoint}/abort`;
|
||||
};
|
||||
|
||||
export const conversations = (pageNumber: string) => {
|
||||
return `/api/convos?pageNumber=${pageNumber}`;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -6,6 +6,10 @@ export function getConversations(pageNumber: string): Promise<t.TGetConversation
|
|||
return request.get(endpoints.conversations(pageNumber));
|
||||
}
|
||||
|
||||
export function abortRequestWithMessage(endpoint: string, abortKey: string, message: string): Promise<void> {
|
||||
return request.post(endpoints.abortRequest(endpoint), { arg: {abortKey, message} });
|
||||
}
|
||||
|
||||
export function deleteConversation(payload: t.TDeleteConversationRequest) {
|
||||
//todo: this should be a DELETE request
|
||||
return request.post(endpoints.deleteConversation(), {arg: payload});
|
||||
|
|
@ -62,5 +66,5 @@ export const getAIEndpoints = () => {
|
|||
}
|
||||
|
||||
export const updateTokenCount = (text: string) => {
|
||||
return request.post(endpoints.tokenizer(), {arg: {text}});
|
||||
return request.post(endpoints.tokenizer(), {arg: text});
|
||||
}
|
||||
|
|
@ -21,6 +21,10 @@ export enum QueryKeys {
|
|||
tokenCount = "tokenCount",
|
||||
}
|
||||
|
||||
export const useAbortRequestWithMessage = (): UseMutationResult<void, Error, { endpoint: string; abortKey: string; message: string }> => {
|
||||
return useMutation(({ endpoint, abortKey, message }) => dataService.abortRequestWithMessage(endpoint, abortKey, message));
|
||||
};
|
||||
|
||||
export const useGetUserQuery = (): QueryObserverResult<t.TUser> => {
|
||||
return useQuery<t.TUser>([QueryKeys.user], () => dataService.getUser(), {
|
||||
refetchOnWindowFocus: false,
|
||||
|
|
|
|||
|
|
@ -79,9 +79,9 @@ const useConversation = () => {
|
|||
({ snapshot }) =>
|
||||
async (_conversation, messages = null, preset = null) => {
|
||||
const prevConversation = await snapshot.getPromise(conversation);
|
||||
const endpointsFilter = await snapshot.getPromise(endpoints.endpointsFilter);
|
||||
const endpointsConfig = await snapshot.getPromise(endpoints.endpointsConfig);
|
||||
_switchToConversation(_conversation, messages, preset, {
|
||||
endpointsFilter,
|
||||
endpointsConfig,
|
||||
prevConversation
|
||||
});
|
||||
},
|
||||
|
|
@ -92,7 +92,7 @@ const useConversation = () => {
|
|||
conversation,
|
||||
messages = null,
|
||||
preset = null,
|
||||
{ endpointsFilter = {}, prevConversation = {} }
|
||||
{ endpointsConfig = {}, prevConversation = {} }
|
||||
) => {
|
||||
let { endpoint = null } = conversation;
|
||||
|
||||
|
|
@ -100,7 +100,7 @@ const useConversation = () => {
|
|||
// get the default model
|
||||
conversation = getDefaultConversation({
|
||||
conversation,
|
||||
endpointsFilter,
|
||||
endpointsConfig,
|
||||
prevConversation,
|
||||
preset
|
||||
});
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import text from './text';
|
|||
import submission from './submission';
|
||||
import search from './search';
|
||||
import preset from './preset';
|
||||
import token from './token';
|
||||
|
||||
export default {
|
||||
...conversation,
|
||||
|
|
@ -15,5 +16,6 @@ export default {
|
|||
...text,
|
||||
...submission,
|
||||
...search,
|
||||
...preset
|
||||
...preset,
|
||||
...token
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,14 +1,7 @@
|
|||
import React from "react";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import {
|
||||
RecoilRoot,
|
||||
atom,
|
||||
selector,
|
||||
useRecoilState,
|
||||
useRecoilValue,
|
||||
useSetRecoilState,
|
||||
} from "recoil";
|
||||
import buildTree from "~/utils/buildTree";
|
||||
import React from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { RecoilRoot, atom, selector, useRecoilState, useRecoilValue, useSetRecoilState } from 'recoil';
|
||||
import buildTree from '~/utils/buildTree';
|
||||
|
||||
// current submission
|
||||
// submit any new value to this state will cause new message to be send.
|
||||
|
|
@ -22,16 +15,16 @@ import buildTree from "~/utils/buildTree";
|
|||
// }
|
||||
|
||||
const submission = atom({
|
||||
key: "submission",
|
||||
default: null,
|
||||
key: 'submission',
|
||||
default: null
|
||||
});
|
||||
|
||||
const isSubmitting = atom({
|
||||
key: "isSubmitting",
|
||||
default: false,
|
||||
key: 'isSubmitting',
|
||||
default: false
|
||||
});
|
||||
|
||||
export default {
|
||||
submission,
|
||||
isSubmitting,
|
||||
isSubmitting
|
||||
};
|
||||
|
|
|
|||
21
client/src/store/token.js
Normal file
21
client/src/store/token.js
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
import { atom, useRecoilState } from 'recoil';
|
||||
|
||||
const tokenRefreshHints = atom({
|
||||
key: 'tokenRefreshHints',
|
||||
default: 1
|
||||
});
|
||||
|
||||
const useToken = endpoint => {
|
||||
const [hints, setHints] = useRecoilState(tokenRefreshHints);
|
||||
const getToken = () => localStorage.getItem(`${endpoint}_token`);
|
||||
const saveToken = value => {
|
||||
localStorage.setItem(`${endpoint}_token`, value);
|
||||
setHints(prev => prev + 1);
|
||||
};
|
||||
|
||||
return { token: getToken(), getToken, saveToken };
|
||||
};
|
||||
|
||||
export default {
|
||||
useToken
|
||||
};
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
|
||||
const cleanupPreset = ({ preset: _preset, endpointsConfig = {} }) => {
|
||||
const { endpoint } = _preset;
|
||||
|
||||
let preset = {};
|
||||
|
|
@ -6,7 +6,7 @@ const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
|
|||
preset = {
|
||||
endpoint,
|
||||
presetId: _preset?.presetId ?? null,
|
||||
model: _preset?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
||||
model: _preset?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
||||
chatGptLabel: _preset?.chatGptLabel ?? null,
|
||||
promptPrefix: _preset?.promptPrefix ?? null,
|
||||
temperature: _preset?.temperature ?? 1,
|
||||
|
|
@ -30,7 +30,7 @@ const cleanupPreset = ({ preset: _preset, endpointsFilter = {} }) => {
|
|||
endpoint,
|
||||
presetId: _preset?.presetId ?? null,
|
||||
model:
|
||||
_preset?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'text-davinci-002-render-sha',
|
||||
_preset?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'text-davinci-002-render-sha',
|
||||
title: _preset?.title ?? 'New Preset'
|
||||
};
|
||||
} else if (endpoint === null) {
|
||||
|
|
|
|||
|
|
@ -1,15 +1,20 @@
|
|||
const buildDefaultConversation = ({
|
||||
conversation,
|
||||
endpoint,
|
||||
endpointsFilter = {},
|
||||
endpointsConfig = {},
|
||||
lastConversationSetup = {}
|
||||
}) => {
|
||||
const lastSelectedModel = JSON.parse(localStorage.getItem('lastSelectedModel')) || {};
|
||||
|
||||
if (endpoint === 'azureOpenAI' || endpoint === 'openAI') {
|
||||
conversation = {
|
||||
...conversation,
|
||||
endpoint,
|
||||
model:
|
||||
lastConversationSetup?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
||||
lastConversationSetup?.model ??
|
||||
lastSelectedModel[endpoint] ??
|
||||
endpointsConfig[endpoint]?.availableModels?.[0] ??
|
||||
'gpt-3.5-turbo',
|
||||
chatGptLabel: lastConversationSetup?.chatGptLabel ?? null,
|
||||
promptPrefix: lastConversationSetup?.promptPrefix ?? null,
|
||||
temperature: lastConversationSetup?.temperature ?? 1,
|
||||
|
|
@ -36,7 +41,8 @@ const buildDefaultConversation = ({
|
|||
endpoint,
|
||||
model:
|
||||
lastConversationSetup?.model ??
|
||||
endpointsFilter[endpoint]?.availableModels?.[0] ??
|
||||
lastSelectedModel[endpoint] ??
|
||||
endpointsConfig[endpoint]?.availableModels?.[0] ??
|
||||
'text-davinci-002-render-sha'
|
||||
};
|
||||
} else if (endpoint === null) {
|
||||
|
|
@ -55,35 +61,35 @@ const buildDefaultConversation = ({
|
|||
return conversation;
|
||||
};
|
||||
|
||||
const getDefaultConversation = ({ conversation, prevConversation, endpointsFilter, preset }) => {
|
||||
const getDefaultConversation = ({ conversation, prevConversation, endpointsConfig, preset }) => {
|
||||
const { endpoint: targetEndpoint } = preset || {};
|
||||
|
||||
if (targetEndpoint) {
|
||||
// try to use preset
|
||||
const endpoint = targetEndpoint;
|
||||
if (endpointsFilter?.[endpoint]) {
|
||||
if (endpointsConfig?.[endpoint]) {
|
||||
conversation = buildDefaultConversation({
|
||||
conversation,
|
||||
endpoint,
|
||||
lastConversationSetup: preset,
|
||||
endpointsFilter
|
||||
endpointsConfig
|
||||
});
|
||||
return conversation;
|
||||
} else {
|
||||
console.log(endpoint);
|
||||
console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsFilter}`);
|
||||
console.warn(`Illegal target endpoint ${targetEndpoint} ${endpointsConfig}`);
|
||||
}
|
||||
}
|
||||
|
||||
// try {
|
||||
// // try to use current model
|
||||
// const { endpoint = null } = prevConversation || {};
|
||||
// if (endpointsFilter?.[endpoint]) {
|
||||
// if (endpointsConfig?.[endpoint]) {
|
||||
// conversation = buildDefaultConversation({
|
||||
// conversation,
|
||||
// endpoint,
|
||||
// lastConversationSetup: prevConversation,
|
||||
// endpointsFilter
|
||||
// endpointsConfig
|
||||
// });
|
||||
// return conversation;
|
||||
// }
|
||||
|
|
@ -94,20 +100,20 @@ const getDefaultConversation = ({ conversation, prevConversation, endpointsFilte
|
|||
const lastConversationSetup = JSON.parse(localStorage.getItem('lastConversationSetup'));
|
||||
const { endpoint = null } = lastConversationSetup;
|
||||
|
||||
if (endpointsFilter?.[endpoint]) {
|
||||
conversation = buildDefaultConversation({ conversation, endpoint, endpointsFilter });
|
||||
if (endpointsConfig?.[endpoint]) {
|
||||
conversation = buildDefaultConversation({ conversation, endpoint, endpointsConfig });
|
||||
return conversation;
|
||||
}
|
||||
} catch (error) {}
|
||||
|
||||
// if anything happens, reset to default model
|
||||
|
||||
const endpoint = ['openAI', 'azureOpenAI', 'bingAI', 'chatGPTBrowser'].find(e => endpointsFilter?.[e]);
|
||||
const endpoint = ['openAI', 'azureOpenAI', 'bingAI', 'chatGPTBrowser'].find(e => endpointsConfig?.[e]);
|
||||
if (endpoint) {
|
||||
conversation = buildDefaultConversation({ conversation, endpoint, endpointsFilter });
|
||||
conversation = buildDefaultConversation({ conversation, endpoint, endpointsConfig });
|
||||
return conversation;
|
||||
} else {
|
||||
conversation = buildDefaultConversation({ conversation, endpoint: null, endpointsFilter });
|
||||
conversation = buildDefaultConversation({ conversation, endpoint: null, endpointsConfig });
|
||||
return conversation;
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -7,7 +7,9 @@ const useMessageHandler = () => {
|
|||
const currentConversation = useRecoilValue(store.conversation) || {};
|
||||
const setSubmission = useSetRecoilState(store.submission);
|
||||
const isSubmitting = useRecoilValue(store.isSubmitting);
|
||||
const endpointsFilter = useRecoilValue(store.endpointsFilter);
|
||||
const endpointsConfig = useRecoilValue(store.endpointsConfig);
|
||||
|
||||
const { getToken } = store.useToken(currentConversation?.endpoint);
|
||||
|
||||
const latestMessage = useRecoilValue(store.latestMessage);
|
||||
|
||||
|
|
@ -29,7 +31,7 @@ const useMessageHandler = () => {
|
|||
endpointOption = {
|
||||
endpoint,
|
||||
model:
|
||||
currentConversation?.model ?? endpointsFilter[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
||||
currentConversation?.model ?? endpointsConfig[endpoint]?.availableModels?.[0] ?? 'gpt-3.5-turbo',
|
||||
chatGptLabel: currentConversation?.chatGptLabel ?? null,
|
||||
promptPrefix: currentConversation?.promptPrefix ?? null,
|
||||
temperature: currentConversation?.temperature ?? 1,
|
||||
|
|
@ -48,7 +50,8 @@ const useMessageHandler = () => {
|
|||
jailbreakConversationId: currentConversation?.jailbreakConversationId ?? null,
|
||||
conversationSignature: currentConversation?.conversationSignature ?? null,
|
||||
clientId: currentConversation?.clientId ?? null,
|
||||
invocationId: currentConversation?.invocationId ?? 1
|
||||
invocationId: currentConversation?.invocationId ?? 1,
|
||||
token: endpointsConfig[endpoint]?.userProvide ? getToken() : null
|
||||
};
|
||||
responseSender = endpointOption.jailbreak ? 'Sydney' : 'BingAI';
|
||||
} else if (endpoint === 'chatGPTBrowser') {
|
||||
|
|
@ -56,8 +59,9 @@ const useMessageHandler = () => {
|
|||
endpoint,
|
||||
model:
|
||||
currentConversation?.model ??
|
||||
endpointsFilter[endpoint]?.availableModels?.[0] ??
|
||||
'text-davinci-002-render-sha'
|
||||
endpointsConfig[endpoint]?.availableModels?.[0] ??
|
||||
'text-davinci-002-render-sha',
|
||||
token: endpointsConfig[endpoint]?.userProvide ? getToken() : null
|
||||
};
|
||||
responseSender = 'ChatGPT';
|
||||
} else if (endpoint === null) {
|
||||
|
|
@ -102,6 +106,7 @@ const useMessageHandler = () => {
|
|||
parentMessageId: isRegenerate ? messageId : fakeMessageId,
|
||||
messageId: (isRegenerate ? messageId : fakeMessageId) + '_',
|
||||
conversationId,
|
||||
unfinished: (endpoint === 'azureOpenAI' || endpoint === 'openAI') ? false : true,
|
||||
submitting: true
|
||||
};
|
||||
|
||||
|
|
|
|||
2
package-lock.json
generated
2
package-lock.json
generated
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "chatgpt-clone",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "chatgpt-clone",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"description": "",
|
||||
"scripts": {
|
||||
"e2e": "playwright test --config=e2e/playwright.config.js",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue