mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 06:00:56 +02:00
📥 feat: Import Conversations from LibreChat, ChatGPT, Chatbot UI (#2355)
* Basic implementation of ChatGPT conversation import * remove debug code * Handle citations * Fix updatedAt in import * update default model * Use job scheduler to handle import requests * import job status endpoint * Add wrapper around Agenda * Rate limits for import endpoint * rename import api path * Batch save import to mongo * Improve naming * Add documenting comments * Test for importers * Change button for importing conversations * Frontend changes * Import job status endpoint * Import endpoint response * Add translations to new phrases * Fix conversations refreshing * cleanup unused functions * set timeout for import job status polling * Add documentation * get extra spaces back * Improve error message * Fix translation files after merge * fix translation files 2 * Add zh translation for import functionality * Sync mailisearch index after import * chore: add dummy uri for jest tests, as MONGO_URI should only be real for E2E tests * docs: fix links * docs: fix conversationsImport section * fix: user role issue for librechat imports * refactor: import conversations from json - organize imports - add additional jsdocs - use multer with diskStorage to avoid loading file into memory outside of job - use filepath instead of loading data string for imports - replace console logs and some logger.info() with logger.debug - only use multer for import route * fix: undefined metadata edge case and replace ChatGtp -> ChatGpt * Refactor importChatGptConvo function to handle undefined metadata edge case and replace ChatGtp with ChatGpt * fix: chatgpt importer * feat: maintain tree relationship for librechat messages * chore: use enum * refactor: saveMessage to use single object arg, replace console logs, add userId to log message * chore: additional comment * chore: multer edge case * feat: first pass, maintain tree relationship * chore: organize * chore: remove log * ci: add heirarchy test for chatgpt * ci: test maintaining of heirarchy for librechat * wip: allow non-text content type messages * refactor: import content part object json string * refactor: more content types to format * chore: consolidate messageText formatting * docs: update on changes, bump data-provider/config versions, update readme * refactor(indexSync): singleton pattern for MeiliSearchClient * refactor: debug log after batch is done * chore: add back indexSync error handling --------- Co-authored-by: jakubmieszczak <jakub.mieszczak@zendesk.com> Co-authored-by: Danny Avila <danny@librechat.ai>
This commit is contained in:
parent
3b44741cf9
commit
ab6fbe48f1
64 changed files with 3795 additions and 98 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -76,6 +76,7 @@ config.local.ts
|
|||
**/storageState.json
|
||||
junit.xml
|
||||
**/.venv/
|
||||
**/venv/
|
||||
|
||||
# docker override file
|
||||
docker-compose.override.yaml
|
||||
|
|
|
@ -52,7 +52,9 @@
|
|||
- Русский, 日本語, Svenska, 한국어, Tiếng Việt, 繁體中文, العربية, Türkçe, Nederlands, עברית
|
||||
- 🤖 AI model selection: OpenAI, Azure OpenAI, BingAI, ChatGPT, Google Vertex AI, Anthropic (Claude), Plugins, Assistants API (including Azure Assistants)
|
||||
- 💾 Create, Save, & Share Custom Presets
|
||||
- 🎨 Customizable Dropdown & Interface: Adapts to both power users and newcomers.
|
||||
- 🔄 Edit, Resubmit, and Continue messages with conversation branching
|
||||
- 📥 Import Conversations from LibreChat, ChatGPT, Chatbot UI
|
||||
- 📤 Export conversations as screenshots, markdown, text, json.
|
||||
- 🔍 Search all messages/conversations
|
||||
- 🔌 Plugins, including web access, image generation with DALL-E-3 and more
|
||||
|
|
|
@ -1,11 +1,28 @@
|
|||
const { MeiliSearch } = require('meilisearch');
|
||||
const Message = require('~/models/schema/messageSchema');
|
||||
const Conversation = require('~/models/schema/convoSchema');
|
||||
const Message = require('~/models/schema/messageSchema');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const searchEnabled = process.env?.SEARCH?.toLowerCase() === 'true';
|
||||
let currentTimeout = null;
|
||||
|
||||
class MeiliSearchClient {
|
||||
static instance = null;
|
||||
|
||||
static getInstance() {
|
||||
if (!MeiliSearchClient.instance) {
|
||||
if (!process.env.MEILI_HOST || !process.env.MEILI_MASTER_KEY) {
|
||||
throw new Error('Meilisearch configuration is missing.');
|
||||
}
|
||||
MeiliSearchClient.instance = new MeiliSearch({
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
});
|
||||
}
|
||||
return MeiliSearchClient.instance;
|
||||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
async function indexSync(req, res, next) {
|
||||
if (!searchEnabled) {
|
||||
|
@ -13,20 +30,10 @@ async function indexSync(req, res, next) {
|
|||
}
|
||||
|
||||
try {
|
||||
if (!process.env.MEILI_HOST || !process.env.MEILI_MASTER_KEY || !searchEnabled) {
|
||||
throw new Error('Meilisearch not configured, search will be disabled.');
|
||||
}
|
||||
|
||||
const client = new MeiliSearch({
|
||||
host: process.env.MEILI_HOST,
|
||||
apiKey: process.env.MEILI_MASTER_KEY,
|
||||
});
|
||||
const client = MeiliSearchClient.getInstance();
|
||||
|
||||
const { status } = await client.health();
|
||||
// logger.debug(`[indexSync] Meilisearch: ${status}`);
|
||||
const result = status === 'available' && !!process.env.SEARCH;
|
||||
|
||||
if (!result) {
|
||||
if (status !== 'available' || !process.env.SEARCH) {
|
||||
throw new Error('Meilisearch not available');
|
||||
}
|
||||
|
||||
|
@ -37,12 +44,8 @@ async function indexSync(req, res, next) {
|
|||
const messagesIndexed = messages.numberOfDocuments;
|
||||
const convosIndexed = convos.numberOfDocuments;
|
||||
|
||||
logger.debug(
|
||||
`[indexSync] There are ${messageCount} messages in the database, ${messagesIndexed} indexed`,
|
||||
);
|
||||
logger.debug(
|
||||
`[indexSync] There are ${convoCount} convos in the database, ${convosIndexed} indexed`,
|
||||
);
|
||||
logger.debug(`[indexSync] There are ${messageCount} messages and ${messagesIndexed} indexed`);
|
||||
logger.debug(`[indexSync] There are ${convoCount} convos and ${convosIndexed} indexed`);
|
||||
|
||||
if (messageCount !== messagesIndexed) {
|
||||
logger.debug('[indexSync] Messages out of sync, indexing');
|
||||
|
@ -54,7 +57,6 @@ async function indexSync(req, res, next) {
|
|||
Conversation.syncWithMeili();
|
||||
}
|
||||
} catch (err) {
|
||||
// logger.debug('[indexSync] in index sync');
|
||||
if (err.message.includes('not found')) {
|
||||
logger.debug('[indexSync] Creating indices...');
|
||||
currentTimeout = setTimeout(async () => {
|
||||
|
|
|
@ -30,6 +30,24 @@ module.exports = {
|
|||
return { message: 'Error saving conversation' };
|
||||
}
|
||||
},
|
||||
bulkSaveConvos: async (conversations) => {
|
||||
try {
|
||||
const bulkOps = conversations.map((convo) => ({
|
||||
updateOne: {
|
||||
filter: { conversationId: convo.conversationId, user: convo.user },
|
||||
update: convo,
|
||||
upsert: true,
|
||||
timestamps: false,
|
||||
},
|
||||
}));
|
||||
|
||||
const result = await Conversation.bulkWrite(bulkOps);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('[saveBulkConversations] Error saving conversations in bulk', error);
|
||||
throw new Error('Failed to save conversations in bulk.');
|
||||
}
|
||||
},
|
||||
getConvosByPage: async (user, pageNumber = 1, pageSize = 25) => {
|
||||
try {
|
||||
const totalConvos = (await Conversation.countDocuments({ user })) || 1;
|
||||
|
|
|
@ -74,6 +74,25 @@ module.exports = {
|
|||
throw new Error('Failed to save message.');
|
||||
}
|
||||
},
|
||||
|
||||
async bulkSaveMessages(messages) {
|
||||
try {
|
||||
const bulkOps = messages.map((message) => ({
|
||||
updateOne: {
|
||||
filter: { messageId: message.messageId },
|
||||
update: message,
|
||||
upsert: true,
|
||||
},
|
||||
}));
|
||||
|
||||
const result = await Message.bulkWrite(bulkOps);
|
||||
return result;
|
||||
} catch (err) {
|
||||
logger.error('Error saving messages in bulk:', err);
|
||||
throw new Error('Failed to save messages in bulk.');
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Records a message in the database.
|
||||
*
|
||||
|
|
69
api/server/middleware/importLimiters.js
Normal file
69
api/server/middleware/importLimiters.js
Normal file
|
@ -0,0 +1,69 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const logViolation = require('~/cache/logViolation');
|
||||
|
||||
const getEnvironmentVariables = () => {
|
||||
const IMPORT_IP_MAX = parseInt(process.env.IMPORT_IP_MAX) || 100;
|
||||
const IMPORT_IP_WINDOW = parseInt(process.env.IMPORT_IP_WINDOW) || 15;
|
||||
const IMPORT_USER_MAX = parseInt(process.env.IMPORT_USER_MAX) || 50;
|
||||
const IMPORT_USER_WINDOW = parseInt(process.env.IMPORT_USER_WINDOW) || 15;
|
||||
|
||||
const importIpWindowMs = IMPORT_IP_WINDOW * 60 * 1000;
|
||||
const importIpMax = IMPORT_IP_MAX;
|
||||
const importIpWindowInMinutes = importIpWindowMs / 60000;
|
||||
|
||||
const importUserWindowMs = IMPORT_USER_WINDOW * 60 * 1000;
|
||||
const importUserMax = IMPORT_USER_MAX;
|
||||
const importUserWindowInMinutes = importUserWindowMs / 60000;
|
||||
|
||||
return {
|
||||
importIpWindowMs,
|
||||
importIpMax,
|
||||
importIpWindowInMinutes,
|
||||
importUserWindowMs,
|
||||
importUserMax,
|
||||
importUserWindowInMinutes,
|
||||
};
|
||||
};
|
||||
|
||||
const createImportHandler = (ip = true) => {
|
||||
const { importIpMax, importIpWindowInMinutes, importUserMax, importUserWindowInMinutes } =
|
||||
getEnvironmentVariables();
|
||||
|
||||
return async (req, res) => {
|
||||
const type = ViolationTypes.FILE_UPLOAD_LIMIT;
|
||||
const errorMessage = {
|
||||
type,
|
||||
max: ip ? importIpMax : importUserMax,
|
||||
limiter: ip ? 'ip' : 'user',
|
||||
windowInMinutes: ip ? importIpWindowInMinutes : importUserWindowInMinutes,
|
||||
};
|
||||
|
||||
await logViolation(req, res, type, errorMessage);
|
||||
res.status(429).json({ message: 'Too many conversation import requests. Try again later' });
|
||||
};
|
||||
};
|
||||
|
||||
const createImportLimiters = () => {
|
||||
const { importIpWindowMs, importIpMax, importUserWindowMs, importUserMax } =
|
||||
getEnvironmentVariables();
|
||||
|
||||
const importIpLimiter = rateLimit({
|
||||
windowMs: importIpWindowMs,
|
||||
max: importIpMax,
|
||||
handler: createImportHandler(),
|
||||
});
|
||||
|
||||
const importUserLimiter = rateLimit({
|
||||
windowMs: importUserWindowMs,
|
||||
max: importUserMax,
|
||||
handler: createImportHandler(false),
|
||||
keyGenerator: function (req) {
|
||||
return req.user?.id; // Use the user ID or NULL if not available
|
||||
},
|
||||
});
|
||||
|
||||
return { importIpLimiter, importUserLimiter };
|
||||
};
|
||||
|
||||
module.exports = { createImportLimiters };
|
|
@ -18,6 +18,7 @@ const validateRegistration = require('./validateRegistration');
|
|||
const validateImageRequest = require('./validateImageRequest');
|
||||
const moderateText = require('./moderateText');
|
||||
const noIndex = require('./noIndex');
|
||||
const importLimiters = require('./importLimiters');
|
||||
|
||||
module.exports = {
|
||||
...uploadLimiters,
|
||||
|
@ -39,5 +40,6 @@ module.exports = {
|
|||
validateModel,
|
||||
moderateText,
|
||||
noIndex,
|
||||
...importLimiters,
|
||||
checkDomainAllowed,
|
||||
};
|
||||
|
|
|
@ -1,8 +1,13 @@
|
|||
const multer = require('multer');
|
||||
const express = require('express');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||
const { getConvosByPage, deleteConvos, getConvo, saveConvo } = require('~/models/Conversation');
|
||||
const { IMPORT_CONVERSATION_JOB_NAME } = require('~/server/utils/import/jobDefinition');
|
||||
const { storage, importFileFilter } = require('~/server/routes/files/multer');
|
||||
const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
|
||||
const { createImportLimiters } = require('~/server/middleware');
|
||||
const jobScheduler = require('~/server/utils/jobScheduler');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { sleep } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
@ -99,4 +104,51 @@ router.post('/update', async (req, res) => {
|
|||
}
|
||||
});
|
||||
|
||||
const { importIpLimiter, importUserLimiter } = createImportLimiters();
|
||||
const upload = multer({ storage: storage, fileFilter: importFileFilter });
|
||||
|
||||
/**
|
||||
* Imports a conversation from a JSON file and saves it to the database.
|
||||
* @route POST /import
|
||||
* @param {Express.Multer.File} req.file - The JSON file to import.
|
||||
* @returns {object} 201 - success response - application/json
|
||||
*/
|
||||
router.post(
|
||||
'/import',
|
||||
importIpLimiter,
|
||||
importUserLimiter,
|
||||
upload.single('file'),
|
||||
async (req, res) => {
|
||||
try {
|
||||
const filepath = req.file.path;
|
||||
const job = await jobScheduler.now(IMPORT_CONVERSATION_JOB_NAME, filepath, req.user.id);
|
||||
|
||||
res.status(201).json({ message: 'Import started', jobId: job.id });
|
||||
} catch (error) {
|
||||
logger.error('Error processing file', error);
|
||||
res.status(500).send('Error processing file');
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Get the status of an import job for polling
|
||||
router.get('/import/jobs/:jobId', async (req, res) => {
|
||||
try {
|
||||
const { jobId } = req.params;
|
||||
const { userId, ...jobStatus } = await jobScheduler.getJobStatus(jobId);
|
||||
if (!jobStatus) {
|
||||
return res.status(404).json({ message: 'Job not found.' });
|
||||
}
|
||||
|
||||
if (userId !== req.user.id) {
|
||||
return res.status(403).json({ message: 'Unauthorized' });
|
||||
}
|
||||
|
||||
res.json(jobStatus);
|
||||
} catch (error) {
|
||||
logger.error('Error getting job details', error);
|
||||
res.status(500).send('Error getting job details');
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const express = require('express');
|
||||
const createMulterInstance = require('./multer');
|
||||
const { uaParser, checkBan, requireJwtAuth, createFileLimiters } = require('~/server/middleware');
|
||||
const { createMulterInstance } = require('./multer');
|
||||
|
||||
const files = require('./files');
|
||||
const images = require('./images');
|
||||
|
|
|
@ -20,6 +20,16 @@ const storage = multer.diskStorage({
|
|||
},
|
||||
});
|
||||
|
||||
const importFileFilter = (req, file, cb) => {
|
||||
if (file.mimetype === 'application/json') {
|
||||
cb(null, true);
|
||||
} else if (path.extname(file.originalname).toLowerCase() === '.json') {
|
||||
cb(null, true);
|
||||
} else {
|
||||
cb(new Error('Only JSON files are allowed'), false);
|
||||
}
|
||||
};
|
||||
|
||||
const fileFilter = (req, file, cb) => {
|
||||
if (!file) {
|
||||
return cb(new Error('No file provided'), false);
|
||||
|
@ -42,4 +52,4 @@ const createMulterInstance = async () => {
|
|||
});
|
||||
};
|
||||
|
||||
module.exports = createMulterInstance;
|
||||
module.exports = { createMulterInstance, storage, importFileFilter };
|
||||
|
|
|
@ -347,6 +347,69 @@ describe('AppService', () => {
|
|||
expect(process.env.FILE_UPLOAD_USER_MAX).toEqual('initialUserMax');
|
||||
expect(process.env.FILE_UPLOAD_USER_WINDOW).toEqual('initialUserWindow');
|
||||
});
|
||||
|
||||
it('should not modify IMPORT environment variables without rate limits', async () => {
|
||||
// Setup initial environment variables
|
||||
process.env.IMPORT_IP_MAX = '10';
|
||||
process.env.IMPORT_IP_WINDOW = '15';
|
||||
process.env.IMPORT_USER_MAX = '5';
|
||||
process.env.IMPORT_USER_WINDOW = '20';
|
||||
|
||||
const initialEnv = { ...process.env };
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Expect environment variables to remain unchanged
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual(initialEnv.IMPORT_IP_MAX);
|
||||
expect(process.env.IMPORT_IP_WINDOW).toEqual(initialEnv.IMPORT_IP_WINDOW);
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual(initialEnv.IMPORT_USER_MAX);
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual(initialEnv.IMPORT_USER_WINDOW);
|
||||
});
|
||||
|
||||
it('should correctly set IMPORT environment variables based on rate limits', async () => {
|
||||
// Define and mock a custom configuration with rate limits
|
||||
const importLimitsConfig = {
|
||||
rateLimits: {
|
||||
conversationsImport: {
|
||||
ipMax: '150',
|
||||
ipWindowInMinutes: '60',
|
||||
userMax: '50',
|
||||
userWindowInMinutes: '30',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
|
||||
Promise.resolve(importLimitsConfig),
|
||||
);
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Verify that process.env has been updated according to the rate limits config
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual('150');
|
||||
expect(process.env.IMPORT_IP_WINDOW).toEqual('60');
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual('50');
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual('30');
|
||||
});
|
||||
|
||||
it('should fallback to default IMPORT environment variables when rate limits are unspecified', async () => {
|
||||
// Setup initial environment variables to non-default values
|
||||
process.env.IMPORT_IP_MAX = 'initialMax';
|
||||
process.env.IMPORT_IP_WINDOW = 'initialWindow';
|
||||
process.env.IMPORT_USER_MAX = 'initialUserMax';
|
||||
process.env.IMPORT_USER_WINDOW = 'initialUserWindow';
|
||||
|
||||
// Mock a custom configuration without specific rate limits
|
||||
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve({}));
|
||||
|
||||
await AppService(app);
|
||||
|
||||
// Verify that process.env falls back to the initial values
|
||||
expect(process.env.IMPORT_IP_MAX).toEqual('initialMax');
|
||||
expect(process.env.IMPORT_IP_WINDOW).toEqual('initialWindow');
|
||||
expect(process.env.IMPORT_USER_MAX).toEqual('initialUserMax');
|
||||
expect(process.env.IMPORT_USER_WINDOW).toEqual('initialUserWindow');
|
||||
});
|
||||
});
|
||||
|
||||
describe('AppService updating app.locals and issuing warnings', () => {
|
||||
|
|
|
@ -6,17 +6,24 @@ const handleRateLimits = (rateLimits) => {
|
|||
if (!rateLimits) {
|
||||
return;
|
||||
}
|
||||
const { fileUploads } = rateLimits;
|
||||
if (!fileUploads) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { fileUploads, conversationsImport } = rateLimits;
|
||||
if (fileUploads) {
|
||||
process.env.FILE_UPLOAD_IP_MAX = fileUploads.ipMax ?? process.env.FILE_UPLOAD_IP_MAX;
|
||||
process.env.FILE_UPLOAD_IP_WINDOW =
|
||||
fileUploads.ipWindowInMinutes ?? process.env.FILE_UPLOAD_IP_WINDOW;
|
||||
process.env.FILE_UPLOAD_USER_MAX = fileUploads.userMax ?? process.env.FILE_UPLOAD_USER_MAX;
|
||||
process.env.FILE_UPLOAD_USER_WINDOW =
|
||||
fileUploads.userWindowInMinutes ?? process.env.FILE_UPLOAD_USER_WINDOW;
|
||||
}
|
||||
|
||||
if (conversationsImport) {
|
||||
process.env.IMPORT_IP_MAX = conversationsImport.ipMax ?? process.env.IMPORT_IP_MAX;
|
||||
process.env.IMPORT_IP_WINDOW =
|
||||
conversationsImport.ipWindowInMinutes ?? process.env.IMPORT_IP_WINDOW;
|
||||
process.env.IMPORT_USER_MAX = conversationsImport.userMax ?? process.env.IMPORT_USER_MAX;
|
||||
process.env.IMPORT_USER_WINDOW =
|
||||
conversationsImport.userWindowInMinutes ?? process.env.IMPORT_USER_WINDOW;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = handleRateLimits;
|
||||
|
|
98
api/server/utils/import/__data__/chatbotui-export.json
Normal file
98
api/server/utils/import/__data__/chatbotui-export.json
Normal file
|
@ -0,0 +1,98 @@
|
|||
{
|
||||
"version": 4,
|
||||
"history": [
|
||||
{
|
||||
"id": "493cb218-9ebb-4d35-af8b-5aded3f351be",
|
||||
"name": "Hello what are you able to do?",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"type": "text",
|
||||
"content": "Hello what are you able to do?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"type": "text",
|
||||
"content": "Hello! As an AI developed by OpenAI, I'm designed to assist with a wide range of tasks, including but not limited to:\n\n1. **Answering Questions**: I can provide explanations on various topics from science to history, technology, and more.\n2. **Providing Information**: Whether you need the latest knowledge (up to my last training data), tips, statistics, or how-to guides, I can help.\n3. **Learning and Education**: I can assist with learning new skills, languages, or subjects.\n4. **Writing and Editing**: From composing texts to editing them, I can assist in creating content such as essays, emails, and creative writing.\n5. **Programming Help**: Guidance in understanding programming concepts, debugging code, or learning about algorithms.\n6. **Entertainment**: I can tell jokes, stories, or even generate ideas for games and activities.\n7. **Brainstorming**: Whether it's ideas for a birthday party or strategies for a marketing plan, I can help brainstorm.\n8. **Conversations**: I can have in-depth or casual discussions on a broad range of topics.\n9. **Mental Health Support**: While I'm not a substitute for professional help, I can lend an ear and offer some general advice and support.\n10. **Travel Information**: Advice on destinations, tips for travel, and information about different cultures.\n\nPlease note that my responses are based on patterns and knowledge from text up to my last update, so I might not be aware of the very latest data or events."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"type": "text",
|
||||
"content": "I like Entertainment part. Go ahead. Entertain me."
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"type": "text",
|
||||
"content": "Certainly! Let's begin with a light-hearted joke:\n\nWhy don't skeletons fight each other?\n\nThey don't have the guts.\n\nIf you're in the mood for a story instead, let's start with a short tale:\n\n---\n\n**Title: The Lost Key**\n\nIn a quiet town nestled between rolling green hills and a silver-sheened lake, there lived a locksmith named Eli. Eli had a peculiar ability; he could craft keys that didn't just unlock doors but also opened pathways to hidden places and secret realms. However, Eli never ventured through these magical doors; he was content knowing that he had the power to unveil them.\n\nOne day, Eli crafted his masterpiece—a key forged from an unknown, shimmering metal that seemed to hum with an inner light. The locksmith felt a deep connection to this key, as if it were calling to him to seek the door it was destined to open. Despite his usual reservations, curiosity got the better of him, and he embarked on a journey to find the matching lock.\n\nHe traveled through the town, greeting familiar faces and searching every nook and cranny, but to no avail. Eli was about to give up when he noticed a peculiar cat with a coat as dark as the night sky, its eyes glinting with a hint of stars. The cat seemed to beckon him to follow.\n\nLed by the cosmic feline through alleys and gardens, Eli arrived at a forgotten part of town where an old, vine-covered wall stood. To his astonishment, in the wall was a door that rippled like the surface of the lake when touched by a soft breeze. It was a door he had never seen before. Trembling with excitement, Eli took out the key. It fit perfectly into the lock, which turned with a satisfying click.\n\nAs the door creaked open, a brilliant light spilled forth. Eli shielded his eyes, and when he finally could see, he found himself not in another realm, but in the heart of his own town. However, everything appeared different—colors were more vibrant, sounds were clearer, and the air carried the sweet scent of blooming flowers.\n\nThe town's people approached him, smiling. \"We've been waiting for you,\" they said. \"The key wasn't meant to lead you to another world, but to allow you to see your own world differently. To show you that magic exists here, with all of us.\"\n\nEli's life transformed that day. He realized that every smile, every act of kindness, and every moment of connection was a door to a world just as magical as those he could unlock with his keys.\n\nAnd as for the mysterious cosmic cat, it would appear now and then, always at the edge of his vision, reminding him that the real magic was here all along, hidden in plain sight.\n\n---\n\nI hope you enjoyed the joke and the short story. If you'd like to continue the entertainment, feel free to guide me on what you'd like to hear or discuss next!"
|
||||
}
|
||||
],
|
||||
"model": {
|
||||
"id": "gpt-4-1106-preview",
|
||||
"name": "GPT-4 Turbo Preview",
|
||||
"maxLength": 384000,
|
||||
"tokenLimit": 128000
|
||||
},
|
||||
"prompt": "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.",
|
||||
"temperature": 1,
|
||||
"folderId": null
|
||||
},
|
||||
{
|
||||
"id": "f883fb40-6e5c-45fd-92d4-883a5595560f",
|
||||
"name": "Give me the code that inverts ...",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"type": "text",
|
||||
"content": "Give me the code that inverts binary tree in COBOL"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"type": "text",
|
||||
"content": "COBOL is not a language that is commonly used for tasks like inverting a binary tree. It is more suited for business applications and mainframe programming. However, if you are looking for a general idea of how you might approach this in COBOL, here is a simplified example:\n\n```cobol\nIDENTIFICATION DIVISION.\nPROGRAM-ID. InvertBinaryTree.\n\nDATA DIVISION.\nWORKING-STORAGE SECTION.\n01 Node.\n 05 Left-Child PIC 9.\n 05 Right-Child PIC 9.\n\nPROCEDURE DIVISION.\nInvert-Binary-Tree.\n MOVE 1 TO Left-Child\n MOVE 2 TO Right-Child\n\n DISPLAY \"Original Binary Tree:\"\n DISPLAY \"Left Child: \" Left-Child\n DISPLAY \"Right Child: \" Right-Child\n\n PERFORM Swap-Children\n\n DISPLAY \"Inverted Binary Tree:\"\n DISPLAY \"Left Child: \" Left-Child\n DISPLAY \"Right Child: \" Right-Child\n\n STOP RUN.\n\nSwap-Children.\n MOVE Left-Child TO Node\n MOVE Right-Child TO Left-Child\n MOVE Node TO Right-Child\n```\n\nThis is a very basic example and may not cover all the complexities of inverting a binary tree. You would need to adapt and expand this code significantly for a more complex binary tree structure."
|
||||
}
|
||||
],
|
||||
"model": {
|
||||
"id": "gpt-3.5-turbo",
|
||||
"name": "GPT-3.5"
|
||||
},
|
||||
"prompt": "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.",
|
||||
"temperature": 0.5,
|
||||
"folderId": null
|
||||
}
|
||||
],
|
||||
"folders": [
|
||||
{
|
||||
"id": "cdc857de-e669-498d-8fac-edc4995c9d7a",
|
||||
"name": "New folder",
|
||||
"type": "prompt"
|
||||
}
|
||||
],
|
||||
"prompts": [
|
||||
{
|
||||
"id": "a61573d8-6686-487c-9c5d-cd79c6d201ee",
|
||||
"name": "Prompt 1",
|
||||
"description": "",
|
||||
"content": "",
|
||||
"model": {
|
||||
"id": "gpt-4",
|
||||
"name": "GPT-4",
|
||||
"maxLength": 24000,
|
||||
"tokenLimit": 8000
|
||||
},
|
||||
"folderId": null
|
||||
},
|
||||
{
|
||||
"id": "9bf456e3-61fc-494d-b940-55ec934e7a04",
|
||||
"name": "Prompt 2",
|
||||
"description": "afgdfsg",
|
||||
"content": "adfdsfsadf",
|
||||
"model": {
|
||||
"id": "gpt-4",
|
||||
"name": "GPT-4",
|
||||
"maxLength": 24000,
|
||||
"tokenLimit": 8000
|
||||
},
|
||||
"folderId": null
|
||||
}
|
||||
]
|
||||
}
|
1224
api/server/utils/import/__data__/chatgpt-export.json
Normal file
1224
api/server/utils/import/__data__/chatgpt-export.json
Normal file
File diff suppressed because one or more lines are too long
429
api/server/utils/import/__data__/chatgpt-tree.json
Normal file
429
api/server/utils/import/__data__/chatgpt-tree.json
Normal file
|
@ -0,0 +1,429 @@
|
|||
[
|
||||
{
|
||||
"title": "Assist user with summary",
|
||||
"create_time": 1714585031.148505,
|
||||
"update_time": 1714585060.879308,
|
||||
"mapping": {
|
||||
"d38605d2-7b2c-43de-b044-22ce472c749b": {
|
||||
"id": "d38605d2-7b2c-43de-b044-22ce472c749b",
|
||||
"message": {
|
||||
"id": "d38605d2-7b2c-43de-b044-22ce472c749b",
|
||||
"author": {
|
||||
"role": "system",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": null,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [""]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 0,
|
||||
"metadata": {
|
||||
"is_visually_hidden_from_conversation": true
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa1f70c-100e-46f0-999e-10c8565f047f",
|
||||
"children": ["aaa297ba-e2da-440e-84f4-e62e7be8b003"]
|
||||
},
|
||||
"aaa1f70c-100e-46f0-999e-10c8565f047f": {
|
||||
"id": "aaa1f70c-100e-46f0-999e-10c8565f047f",
|
||||
"message": null,
|
||||
"parent": null,
|
||||
"children": ["d38605d2-7b2c-43de-b044-22ce472c749b"]
|
||||
},
|
||||
"aaa297ba-e2da-440e-84f4-e62e7be8b003": {
|
||||
"id": "aaa297ba-e2da-440e-84f4-e62e7be8b003",
|
||||
"message": {
|
||||
"id": "aaa297ba-e2da-440e-84f4-e62e7be8b003",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585031.150442,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["hi there"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"request_id": "87d189bb49d412c5-IAD",
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "d38605d2-7b2c-43de-b044-22ce472c749b",
|
||||
"children": ["bda8a275-886d-4f59-b38c-d7037144f0d5"]
|
||||
},
|
||||
"bda8a275-886d-4f59-b38c-d7037144f0d5": {
|
||||
"id": "bda8a275-886d-4f59-b38c-d7037144f0d5",
|
||||
"message": {
|
||||
"id": "bda8a275-886d-4f59-b38c-d7037144f0d5",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585031.757056,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["Hello! How can I assist you today?"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [100260]
|
||||
},
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa297ba-e2da-440e-84f4-e62e7be8b003",
|
||||
"is_complete": true,
|
||||
"request_id": "87d189bb49d412c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa297ba-e2da-440e-84f4-e62e7be8b003",
|
||||
"children": ["aaa24023-b02f-4d49-b568-5856b41750c0", "aaa236a3-cdfc-4eb1-b5c5-790c6641f880"]
|
||||
},
|
||||
"aaa24023-b02f-4d49-b568-5856b41750c0": {
|
||||
"id": "aaa24023-b02f-4d49-b568-5856b41750c0",
|
||||
"message": {
|
||||
"id": "aaa24023-b02f-4d49-b568-5856b41750c0",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585034.306995,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["so cool bro"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"request_id": "87d189cf3df512c5-IAD",
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "bda8a275-886d-4f59-b38c-d7037144f0d5",
|
||||
"children": ["23afbea9-ca08-49f2-b417-e7ae58a1c97d"]
|
||||
},
|
||||
"23afbea9-ca08-49f2-b417-e7ae58a1c97d": {
|
||||
"id": "23afbea9-ca08-49f2-b417-e7ae58a1c97d",
|
||||
"message": {
|
||||
"id": "23afbea9-ca08-49f2-b417-e7ae58a1c97d",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585034.755907,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["Thanks! What brings you here today?"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [100260]
|
||||
},
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"is_complete": true,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa24023-b02f-4d49-b568-5856b41750c0",
|
||||
"request_id": "87d189cf3df512c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa24023-b02f-4d49-b568-5856b41750c0",
|
||||
"children": ["aaa292cc-1842-4dbf-bd79-13cf7150366a"]
|
||||
},
|
||||
"aaa292cc-1842-4dbf-bd79-13cf7150366a": {
|
||||
"id": "aaa292cc-1842-4dbf-bd79-13cf7150366a",
|
||||
"message": {
|
||||
"id": "aaa292cc-1842-4dbf-bd79-13cf7150366a",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585037.56986,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["tell me a story"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"request_id": "87d189e3dac712c5-IAD",
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "23afbea9-ca08-49f2-b417-e7ae58a1c97d",
|
||||
"children": ["ada93f81-f59e-4b31-933d-1357efd68bfc"]
|
||||
},
|
||||
"ada93f81-f59e-4b31-933d-1357efd68bfc": {
|
||||
"id": "ada93f81-f59e-4b31-933d-1357efd68bfc",
|
||||
"message": {
|
||||
"id": "ada93f81-f59e-4b31-933d-1357efd68bfc",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585045.606752,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
"Sure! Here's a short story for you:\n\n---\n\nOnce upon a time, in a small village nestled between rolling"
|
||||
]
|
||||
},
|
||||
"status": "in_progress",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa292cc-1842-4dbf-bd79-13cf7150366a",
|
||||
"finish_details": {
|
||||
"type": "interrupted"
|
||||
},
|
||||
"request_id": "87d189e3dac712c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa292cc-1842-4dbf-bd79-13cf7150366a",
|
||||
"children": []
|
||||
},
|
||||
"aaa236a3-cdfc-4eb1-b5c5-790c6641f880": {
|
||||
"id": "aaa236a3-cdfc-4eb1-b5c5-790c6641f880",
|
||||
"message": {
|
||||
"id": "aaa236a3-cdfc-4eb1-b5c5-790c6641f880",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585050.906034,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["hi again"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"request_id": "87d18a36cf9312c5-IAD",
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "bda8a275-886d-4f59-b38c-d7037144f0d5",
|
||||
"children": ["db88eddf-3622-4246-8527-b6eaf0e9e8cd"]
|
||||
},
|
||||
"db88eddf-3622-4246-8527-b6eaf0e9e8cd": {
|
||||
"id": "db88eddf-3622-4246-8527-b6eaf0e9e8cd",
|
||||
"message": {
|
||||
"id": "db88eddf-3622-4246-8527-b6eaf0e9e8cd",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585051.690729,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["Hey! Welcome back. What's on your mind?"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [100260]
|
||||
},
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"is_complete": true,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa236a3-cdfc-4eb1-b5c5-790c6641f880",
|
||||
"request_id": "87d18a36cf9312c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa236a3-cdfc-4eb1-b5c5-790c6641f880",
|
||||
"children": ["aaa20127-b9e3-44f6-afbe-a2475838625a"]
|
||||
},
|
||||
"aaa20127-b9e3-44f6-afbe-a2475838625a": {
|
||||
"id": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"message": {
|
||||
"id": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585055.908847,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": ["tell me a joke"]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"request_id": "87d18a6e39a312c5-IAD",
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "db88eddf-3622-4246-8527-b6eaf0e9e8cd",
|
||||
"children": ["d0d2a7df-d2fc-4df9-bf0a-1c5121e227ae", "f63b8e17-aa5c-4ca6-a1bf-d4d285e269b8"]
|
||||
},
|
||||
"d0d2a7df-d2fc-4df9-bf0a-1c5121e227ae": {
|
||||
"id": "d0d2a7df-d2fc-4df9-bf0a-1c5121e227ae",
|
||||
"message": {
|
||||
"id": "d0d2a7df-d2fc-4df9-bf0a-1c5121e227ae",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585056.580956,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
"Sure, here's one for you:\n\nWhy don't scientists trust atoms?\n\nBecause they make up everything!"
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [100260]
|
||||
},
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"is_complete": true,
|
||||
"request_id": "87d18a55ca6212c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"children": []
|
||||
},
|
||||
"f63b8e17-aa5c-4ca6-a1bf-d4d285e269b8": {
|
||||
"id": "f63b8e17-aa5c-4ca6-a1bf-d4d285e269b8",
|
||||
"message": {
|
||||
"id": "f63b8e17-aa5c-4ca6-a1bf-d4d285e269b8",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1714585060.598792,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
"Sure, here's one for you:\n\nWhy don't scientists trust atoms?\n\nBecause they make up everything!"
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [100260]
|
||||
},
|
||||
"citations": [],
|
||||
"gizmo_id": null,
|
||||
"is_complete": true,
|
||||
"message_type": null,
|
||||
"model_slug": "text-davinci-002-render-sha",
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"pad": "AAAAAAAAAAAAAAAAAAAAAAAAAA",
|
||||
"parent_id": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"request_id": "87d18a6e39a312c5-IAD",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all"
|
||||
},
|
||||
"parent": "aaa20127-b9e3-44f6-afbe-a2475838625a",
|
||||
"children": []
|
||||
}
|
||||
},
|
||||
"moderation_results": [],
|
||||
"current_node": "f63b8e17-aa5c-4ca6-a1bf-d4d285e269b8",
|
||||
"plugin_ids": null,
|
||||
"conversation_id": "d5dc5307-6807-41a0-8b04-4acee626eeb7",
|
||||
"conversation_template_id": null,
|
||||
"gizmo_id": null,
|
||||
"is_archived": false,
|
||||
"safe_urls": [],
|
||||
"default_model_slug": "text-davinci-002-render-sha",
|
||||
"id": "d5dc5307-6807-41a0-8b04-4acee626eeb7"
|
||||
}
|
||||
]
|
143
api/server/utils/import/__data__/librechat-export.json
Normal file
143
api/server/utils/import/__data__/librechat-export.json
Normal file
|
@ -0,0 +1,143 @@
|
|||
{
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"endpoint": "openAI",
|
||||
"title": "Conversation 1. Web Search",
|
||||
"exportAt": "16:33:32 GMT+0200 (Central European Summer Time)",
|
||||
"branches": true,
|
||||
"recursive": true,
|
||||
"options": {
|
||||
"presetId": null,
|
||||
"model": "gpt-3.5-turbo",
|
||||
"chatGptLabel": null,
|
||||
"promptPrefix": null,
|
||||
"temperature": 1,
|
||||
"top_p": 1,
|
||||
"presence_penalty": 0,
|
||||
"frequency_penalty": 0,
|
||||
"resendFiles": true,
|
||||
"imageDetail": "auto",
|
||||
"endpoint": "openAI",
|
||||
"title": "VW Transporter 2014 Fuel Consumption. Web Search"
|
||||
},
|
||||
"messagesTree": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40b6",
|
||||
"messageId": "b123942f-ca1a-4b16-9e1f-ea4af5171168",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.230Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": true,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "00000000-0000-0000-0000-000000000000",
|
||||
"sender": "user",
|
||||
"text": "What is the fuel consumption of vw transporter with 8 people in l/km",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.230Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40b8",
|
||||
"messageId": "549a4f45-cf93-4e3b-ae62-1abf02afbfc8",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.242Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": false,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "b123942f-ca1a-4b16-9e1f-ea4af5171168",
|
||||
"sender": "GPT-3.5",
|
||||
"text": "The fuel consumption of a Volkswagen Transporter can vary based on the specific model and driving conditions. For example, the 2021 Volkswagen Transporter T6.1 L2H1 2.0 TDI 110HP, which is one of the recent models, doesn't have its fuel consumption data explicitly mentioned in the sources I found. However, a review of a Volkswagen Transporter model, which is equipped with a 2.0-litre four-cylinder turbo diesel engine and a seven-speed dual-clutch automatic transmission, mentions a combined cycle fuel consumption figure of 8.2 litres per 100km. It's important to note that this figure might vary, especially when the vehicle is carrying eight people, as more weight can lead to increased fuel consumption.\n\nFor more detailed information, you might need to refer to the specific model's manual or contact a Volkswagen dealer. Also, keep in mind that real-world fuel consumption can be influenced by factors such as driving style, road conditions, and maintenance of the vehicle ([2021 Volkswagen Transporter T6.1 L2H1 2.0 TDI 110HP specs, dimensions](https://www.ultimatespecs.com/car-specs/Volkswagen/118424/2021-Volkswagen-Transporter-T61-L2H1-20-TDI-110HP.html)) ([Volkswagen Transporter Review - Drive](https://www.drive.com.au/reviews/volkswagen-transporter-review/)).",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.242Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40ba",
|
||||
"messageId": "880e5357-3e0c-4218-b351-fd3fc184adef",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.247Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": true,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "549a4f45-cf93-4e3b-ae62-1abf02afbfc8",
|
||||
"sender": "user",
|
||||
"text": "What about 10 year old model",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.247Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40bc",
|
||||
"messageId": "e9796d11-3bdf-4e25-9f0e-4802bbbb8c6d",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.256Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": false,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "880e5357-3e0c-4218-b351-fd3fc184adef",
|
||||
"sender": "GPT-3.5",
|
||||
"text": "The fuel consumption of a 2014 Volkswagen Transporter, specifically the TDI340 model with a 2.0 L engine, is reported as follows:\n- Combined: 8.2 L/100km\n- Highway: 6.9 L/100km\n- City: 10.2 L/100km\n\nThese figures can vary based on driving conditions and the vehicle's load. The actual consumption might be higher when carrying eight people due to the additional weight ([Volkswagen Transporter TDI340 2014 - Pricing & Specifications | carsales.com.au](https://www.carsales.com.au/volkswagen/transporter/price/2014/tdi340-381631/)).",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.256Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40be",
|
||||
"messageId": "04408c06-62dc-4961-8ef5-4336b68e7a0a",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.261Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": true,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "e9796d11-3bdf-4e25-9f0e-4802bbbb8c6d",
|
||||
"sender": "user",
|
||||
"text": "Is it for fully loaded car?",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.261Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": [
|
||||
{
|
||||
"_id": "6615516574dc2ddcdebe40c0",
|
||||
"messageId": "645b925b-ef14-451f-9be3-814de55a5ae4",
|
||||
"__v": 0,
|
||||
"conversationId": "af1ea676-f525-444f-a9ed-7c8dbf062733",
|
||||
"createdAt": "2024-04-09T14:32:05.266Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": false,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "04408c06-62dc-4961-8ef5-4336b68e7a0a",
|
||||
"sender": "GPT-3.5",
|
||||
"text": "The fuel consumption figures for the 2014 Volkswagen Transporter TDI340, such as the combined 8.2 L/100km, are typically based on standardized tests. These tests don't necessarily reflect a fully loaded vehicle. The actual fuel consumption can vary and is likely to be higher when the vehicle is fully loaded with passengers or cargo, due to the increased weight and potential changes in driving conditions such as more frequent acceleration and deceleration ([Volkswagen Transporter TDI340 2014 - Pricing & Specifications | carsales.com.au](https://www.carsales.com.au/volkswagen/transporter/price/2014/tdi340-381631/)).",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-04-09T14:32:05.266Z",
|
||||
"user": "65f1ad8c90523874d2d409f8",
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
153
api/server/utils/import/__data__/librechat-tree.json
Normal file
153
api/server/utils/import/__data__/librechat-tree.json
Normal file
|
@ -0,0 +1,153 @@
|
|||
{
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"endpoint": "openAI",
|
||||
"title": "Troubleshooting Python Virtual Environment Activation Issue",
|
||||
"exportAt": "13:18:04 GMT-0400 (Eastern Daylight Time)",
|
||||
"branches": true,
|
||||
"recursive": true,
|
||||
"options": {
|
||||
"presetId": null,
|
||||
"model": "gpt-4-turbo",
|
||||
"chatGptLabel": "Software Engineer",
|
||||
"promptPrefix": "You are an expert software engineer.",
|
||||
"temperature": 0.8,
|
||||
"top_p": 1,
|
||||
"presence_penalty": 0,
|
||||
"frequency_penalty": 0,
|
||||
"resendFiles": true,
|
||||
"imageDetail": "auto",
|
||||
"endpoint": "openAI",
|
||||
"title": "Troubleshooting Python Virtual Environment Activation Issue"
|
||||
},
|
||||
"messagesTree": [
|
||||
{
|
||||
"_id": "66326f3f04bed94b7f5be68d",
|
||||
"messageId": "9501f99d-9bbb-40cb-bbb2-16d79aeceb72",
|
||||
"__v": 0,
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"createdAt": "2024-05-01T16:35:11.530Z",
|
||||
"endpoint": "openAI",
|
||||
"error": false,
|
||||
"isCreatedByUser": true,
|
||||
"isEdited": false,
|
||||
"model": null,
|
||||
"parentMessageId": "00000000-0000-0000-0000-000000000000",
|
||||
"sender": "User",
|
||||
"text": "How do I use source venv/bin/activate?\nbash: venv/bin/activate: No such file or directory",
|
||||
"tokenCount": 59,
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-05-01T16:35:39.114Z",
|
||||
"user": "64d4f005d73ef522e1f48818",
|
||||
"children": [
|
||||
{
|
||||
"_id": "66326f4004bed94b7f5be699",
|
||||
"messageId": "4c395510-fdfd-4556-b034-f9440f025e91",
|
||||
"__v": 0,
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"createdAt": "2024-05-01T16:35:12.604Z",
|
||||
"error": false,
|
||||
"isCreatedByUser": false,
|
||||
"isEdited": false,
|
||||
"model": "gpt-4-turbo",
|
||||
"parentMessageId": "9501f99d-9bbb-40cb-bbb2-16d79aeceb72",
|
||||
"sender": "Software Engineer",
|
||||
"text": "It looks like you're trying to activate a Python virtual environment but encountering an issue because the system cannot find the `activate` script in the `venv/bin/` directory. This typically happens if the virtual environment has not been created yet or if the path specified is incorrect. Here are a few steps to troubleshoot and resolve this issue:\n\n1. **Check if the Virtual Environment Exists**:\n Ensure that the virtual environment directory (`venv`) exists in your current directory (`~/denis-libre`). You can check this by listing the directories:\n\n ```bash\n ls -l\n ```\n\n Look for `venv` in the output. If it's not there, you'll need to create it.\n\n2. **Create the Virtual Environment**:\n If the virtual environment does not exist, you can create one by using the following command:\n\n ```bash\n python3 -m venv venv\n ```\n\n This command creates a new virtual environment named `venv` in your current directory.\n\n3. **Activate the Virtual Environment**:\n Once you have confirmed that the `venv` directory exists, try activating the virtual environment again:\n\n ```bash\n source venv/bin/activate\n ```\n\n If you still receive an error, double-check the path. If you are on a Windows system using Git Bash or similar, the path might be `venv\\Scripts\\activate` instead.\n\n4. **Check for Environment Specific Issues**:\n - If you are using a different shell (like `csh` or `fish`), the activation script could be different (e.g., `activate.csh` or `activate.fish`).\n - Ensure there’s no typo in your command or path misconfiguration.\n\n5. **Permissions Issue**:\n Make sure you have the necessary permissions to access the `venv` directory and its contents. If not, you might need to modify the permissions:\n\n ```bash\n chmod -R 755 venv\n ```\n\n6. **Using the Correct Python Version**:\n Confirm that you are using the correct version of Python for which the virtual environment is supposed to be set up. Sometimes, using a different version might cause issues.\n\nBy following these steps, you should be able to resolve the issue with activating your Python virtual environment. If the problem persists, there might be a more specific issue with your Python installation or system configuration.",
|
||||
"unfinished": false,
|
||||
"updatedAt": "2024-05-01T16:35:39.104Z",
|
||||
"user": "64d4f005d73ef522e1f48818",
|
||||
"endpoint": "openAI",
|
||||
"finish_reason": "stop",
|
||||
"tokenCount": 504,
|
||||
"children": [
|
||||
{
|
||||
"messageId": "6e4c95ca-fcb1-4869-adc3-b4bcec83a270",
|
||||
"parentMessageId": "4c395510-fdfd-4556-b034-f9440f025e91",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"sender": "User",
|
||||
"text": "thanks",
|
||||
"isCreatedByUser": true,
|
||||
"tokenCount": 5,
|
||||
"children": [
|
||||
{
|
||||
"messageId": "5da2cdce-87f7-46b4-87c5-e8224e5d5125",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"parentMessageId": "6e4c95ca-fcb1-4869-adc3-b4bcec83a270",
|
||||
"isCreatedByUser": false,
|
||||
"model": "gpt-4-turbo",
|
||||
"sender": "Software Engineer",
|
||||
"text": "You're welcome! If you have any more questions or need further assistance, feel free to ask. Good luck with your project!",
|
||||
"promptTokens": 586,
|
||||
"endpoint": "openAI",
|
||||
"finish_reason": "stop",
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"messageId": "64ed7568-bdcf-42a9-b34a-73386d4d99dc",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"parentMessageId": "6e4c95ca-fcb1-4869-adc3-b4bcec83a270",
|
||||
"isCreatedByUser": false,
|
||||
"model": "gpt-4-turbo",
|
||||
"sender": "Software Engineer",
|
||||
"text": "You're welcome! If you have any more questions or need further assistance, feel free to ask. Happy coding!",
|
||||
"promptTokens": 586,
|
||||
"endpoint": "openAI",
|
||||
"finish_reason": "stop",
|
||||
"children": [
|
||||
{
|
||||
"messageId": "f82cca6f-7d2b-4fc1-9d5a-636acc137368",
|
||||
"parentMessageId": "64ed7568-bdcf-42a9-b34a-73386d4d99dc",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"sender": "User",
|
||||
"text": "does coding make you happy",
|
||||
"isCreatedByUser": true,
|
||||
"tokenCount": 9,
|
||||
"children": [
|
||||
{
|
||||
"messageId": "ad7b6ec6-aaa4-434c-9188-def6aadbb2bf",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"parentMessageId": "f82cca6f-7d2b-4fc1-9d5a-636acc137368",
|
||||
"isCreatedByUser": false,
|
||||
"model": "gpt-4-turbo",
|
||||
"sender": "Software Engineer",
|
||||
"text": "As a software engineer, coding can indeed be a source of happiness and satisfaction for many professionals in the field. Here’s why coding might make someone happy:\n\n1. **Problem Solving**: Coding offers a constant stream of challenges that require problem-solving skills. Solving these problems can be immensely gratifying and can provide a sense of accomplishment.\n\n2. **Creativity**: Writing software is often a creative process, similar to composing music or painting. Developers can express themselves through their code, which can be a fulfilling experience.\n\n3. **Continuous Learning**: The tech field is always evolving, offering endless opportunities to learn new languages, tools, and techniques. For many, this continuous learning keeps the job exciting and engaging.\n\n4. **Impact**: Software engineers often work on applications that impact the lives of millions of people. Knowing that your work is helping others can be a powerful source of happiness.\n\n5. **Community**: The coding community is known for its collaboration and sharing of knowledge through platforms like GitHub, Stack Overflow, and various forums. Being a part of this community can provide a sense of belonging and support.\n\n6. **Career Opportunities**: Coding skills are in high demand, which can lead to job security, good salaries, and the opportunity to work in a variety of industries. This can contribute to overall life satisfaction and stability.\n\nHowever, like any job, coding can also be stressful at times. Deadlines, bugs, difficult problems, and other challenges can cause frustration. The key is to find a balance, continue learning, and choose projects that align with your interests and values. If these elements are in place, coding can indeed be a very happy career choice.",
|
||||
"promptTokens": 622,
|
||||
"endpoint": "openAI",
|
||||
"finish_reason": "stop",
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"messageId": "d3e12088-962e-42ce-900d-edd6816acae5",
|
||||
"parentMessageId": "4c395510-fdfd-4556-b034-f9440f025e91",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"sender": "User",
|
||||
"text": "lol",
|
||||
"isCreatedByUser": true,
|
||||
"tokenCount": 5,
|
||||
"children": [
|
||||
{
|
||||
"messageId": "1f062c99-ff0a-4cf4-a1cf-7150261a24e2",
|
||||
"conversationId": "4a86c40e-e627-4454-b158-889680e23ad3",
|
||||
"parentMessageId": "d3e12088-962e-42ce-900d-edd6816acae5",
|
||||
"isCreatedByUser": false,
|
||||
"model": "gpt-4-turbo",
|
||||
"sender": "Software Engineer",
|
||||
"text": "It looks like you might have been amused or found something funny about the situation! If you have any specific questions or need further help with your virtual environment setup or anything else related to software engineering, feel free to ask!",
|
||||
"promptTokens": 586,
|
||||
"endpoint": "openAI",
|
||||
"finish_reason": "stop",
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
149
api/server/utils/import/importBatchBuilder.js
Normal file
149
api/server/utils/import/importBatchBuilder.js
Normal file
|
@ -0,0 +1,149 @@
|
|||
const { v4: uuidv4 } = require('uuid');
|
||||
const { EModelEndpoint, Constants, openAISettings } = require('librechat-data-provider');
|
||||
const { bulkSaveConvos } = require('~/models/Conversation');
|
||||
const { bulkSaveMessages } = require('~/models/Message');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Factory function for creating an instance of ImportBatchBuilder.
|
||||
* @param {string} requestUserId - The ID of the user making the request.
|
||||
* @returns {ImportBatchBuilder} - The newly created ImportBatchBuilder instance.
|
||||
*/
|
||||
function createImportBatchBuilder(requestUserId) {
|
||||
return new ImportBatchBuilder(requestUserId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for building a batch of conversations and messages and pushing them to DB for Conversation Import functionality
|
||||
*/
|
||||
class ImportBatchBuilder {
|
||||
/**
|
||||
* Creates an instance of ImportBatchBuilder.
|
||||
* @param {string} requestUserId - The ID of the user making the import request.
|
||||
*/
|
||||
constructor(requestUserId) {
|
||||
this.requestUserId = requestUserId;
|
||||
this.conversations = [];
|
||||
this.messages = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts a new conversation in the batch.
|
||||
* @param {string} [endpoint=EModelEndpoint.openAI] - The endpoint for the conversation. Defaults to EModelEndpoint.openAI.
|
||||
* @returns {void}
|
||||
*/
|
||||
startConversation(endpoint) {
|
||||
// we are simplifying by using a single model for the entire conversation
|
||||
this.endpoint = endpoint || EModelEndpoint.openAI;
|
||||
this.conversationId = uuidv4();
|
||||
this.lastMessageId = Constants.NO_PARENT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a user message to the current conversation.
|
||||
* @param {string} text - The text of the user message.
|
||||
* @returns {object} The saved message object.
|
||||
*/
|
||||
addUserMessage(text) {
|
||||
const message = this.saveMessage({ text, sender: 'user', isCreatedByUser: true });
|
||||
return message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a GPT message to the current conversation.
|
||||
* @param {string} text - The text of the GPT message.
|
||||
* @param {string} [model='defaultModel'] - The model used for generating the GPT message. Defaults to 'defaultModel'.
|
||||
* @param {string} [sender='GPT-3.5'] - The sender of the GPT message. Defaults to 'GPT-3.5'.
|
||||
* @returns {object} The saved message object.
|
||||
*/
|
||||
addGptMessage(text, model, sender = 'GPT-3.5') {
|
||||
const message = this.saveMessage({
|
||||
text,
|
||||
sender,
|
||||
isCreatedByUser: false,
|
||||
model: model || openAISettings.model.default,
|
||||
});
|
||||
return message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finishes the current conversation and adds it to the batch.
|
||||
* @param {string} [title='Imported Chat'] - The title of the conversation. Defaults to 'Imported Chat'.
|
||||
* @param {Date} [createdAt] - The creation date of the conversation.
|
||||
* @returns {object} The added conversation object.
|
||||
*/
|
||||
finishConversation(title, createdAt) {
|
||||
const convo = {
|
||||
user: this.requestUserId,
|
||||
conversationId: this.conversationId,
|
||||
title: title || 'Imported Chat',
|
||||
createdAt: createdAt,
|
||||
updatedAt: createdAt,
|
||||
overrideTimestamp: true,
|
||||
endpoint: this.endpoint,
|
||||
model: openAISettings.model.default,
|
||||
};
|
||||
this.conversations.push(convo);
|
||||
|
||||
return convo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the batch of conversations and messages to the DB.
|
||||
* @returns {Promise<void>} A promise that resolves when the batch is saved.
|
||||
* @throws {Error} If there is an error saving the batch.
|
||||
*/
|
||||
async saveBatch() {
|
||||
try {
|
||||
await bulkSaveConvos(this.conversations);
|
||||
await bulkSaveMessages(this.messages);
|
||||
logger.debug(
|
||||
`user: ${this.requestUserId} | Added ${this.conversations.length} conversations and ${this.messages.length} messages to the DB.`,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error('Error saving batch', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves a message to the current conversation.
|
||||
* @param {object} messageDetails - The details of the message.
|
||||
* @param {string} messageDetails.text - The text of the message.
|
||||
* @param {string} messageDetails.sender - The sender of the message.
|
||||
* @param {string} [messageDetails.messageId] - The ID of the current message.
|
||||
* @param {boolean} messageDetails.isCreatedByUser - Indicates whether the message is created by the user.
|
||||
* @param {string} [messageDetails.model] - The model used for generating the message.
|
||||
* @param {string} [messageDetails.parentMessageId=this.lastMessageId] - The ID of the parent message.
|
||||
* @returns {object} The saved message object.
|
||||
*/
|
||||
saveMessage({
|
||||
text,
|
||||
sender,
|
||||
isCreatedByUser,
|
||||
model,
|
||||
messageId,
|
||||
parentMessageId = this.lastMessageId,
|
||||
}) {
|
||||
const newMessageId = messageId ?? uuidv4();
|
||||
const message = {
|
||||
parentMessageId,
|
||||
messageId: newMessageId,
|
||||
conversationId: this.conversationId,
|
||||
isCreatedByUser: isCreatedByUser,
|
||||
model: model || this.model,
|
||||
user: this.requestUserId,
|
||||
endpoint: this.endpoint,
|
||||
unfinished: false,
|
||||
isEdited: false,
|
||||
error: false,
|
||||
sender,
|
||||
text,
|
||||
};
|
||||
this.lastMessageId = newMessageId;
|
||||
this.messages.push(message);
|
||||
return message;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ImportBatchBuilder, createImportBatchBuilder };
|
295
api/server/utils/import/importers.js
Normal file
295
api/server/utils/import/importers.js
Normal file
|
@ -0,0 +1,295 @@
|
|||
const { v4: uuidv4 } = require('uuid');
|
||||
const { EModelEndpoint, Constants, openAISettings } = require('librechat-data-provider');
|
||||
const { createImportBatchBuilder } = require('./importBatchBuilder');
|
||||
const logger = require('~/config/winston');
|
||||
|
||||
/**
|
||||
* Returns the appropriate importer function based on the provided JSON data.
|
||||
*
|
||||
* @param {Object} jsonData - The JSON data to import.
|
||||
* @returns {Function} - The importer function.
|
||||
* @throws {Error} - If the import type is not supported.
|
||||
*/
|
||||
function getImporter(jsonData) {
|
||||
// For ChatGPT
|
||||
if (Array.isArray(jsonData)) {
|
||||
logger.info('Importing ChatGPT conversation');
|
||||
return importChatGptConvo;
|
||||
}
|
||||
|
||||
// For ChatbotUI
|
||||
if (jsonData.version && Array.isArray(jsonData.history)) {
|
||||
logger.info('Importing ChatbotUI conversation');
|
||||
return importChatBotUiConvo;
|
||||
}
|
||||
|
||||
// For LibreChat
|
||||
if (jsonData.conversationId && jsonData.messagesTree) {
|
||||
logger.info('Importing LibreChat conversation');
|
||||
return importLibreChatConvo;
|
||||
}
|
||||
|
||||
throw new Error('Unsupported import type');
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports a chatbot-ui V1 conversation from a JSON file and saves it to the database.
|
||||
*
|
||||
* @param {Object} jsonData - The JSON data containing the chatbot conversation.
|
||||
* @param {string} requestUserId - The ID of the user making the import request.
|
||||
* @param {Function} [builderFactory=createImportBatchBuilder] - The factory function to create an import batch builder.
|
||||
* @returns {Promise<void>} - A promise that resolves when the import is complete.
|
||||
* @throws {Error} - If there is an error creating the conversation from the JSON file.
|
||||
*/
|
||||
async function importChatBotUiConvo(
|
||||
jsonData,
|
||||
requestUserId,
|
||||
builderFactory = createImportBatchBuilder,
|
||||
) {
|
||||
// this have been tested with chatbot-ui V1 export https://github.com/mckaywrigley/chatbot-ui/tree/b865b0555f53957e96727bc0bbb369c9eaecd83b#legacy-code
|
||||
try {
|
||||
/** @type {import('./importBatchBuilder').ImportBatchBuilder} */
|
||||
const importBatchBuilder = builderFactory(requestUserId);
|
||||
|
||||
for (const historyItem of jsonData.history) {
|
||||
importBatchBuilder.startConversation(EModelEndpoint.openAI);
|
||||
for (const message of historyItem.messages) {
|
||||
if (message.role === 'assistant') {
|
||||
importBatchBuilder.addGptMessage(message.content, historyItem.model.id);
|
||||
} else if (message.role === 'user') {
|
||||
importBatchBuilder.addUserMessage(message.content);
|
||||
}
|
||||
}
|
||||
importBatchBuilder.finishConversation(historyItem.name, new Date());
|
||||
}
|
||||
await importBatchBuilder.saveBatch();
|
||||
logger.info(`user: ${requestUserId} | ChatbotUI conversation imported`);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Error creating conversation from ChatbotUI file`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports a LibreChat conversation from JSON.
|
||||
*
|
||||
* @param {Object} jsonData - The JSON data representing the conversation.
|
||||
* @param {string} requestUserId - The ID of the user making the import request.
|
||||
* @param {Function} [builderFactory=createImportBatchBuilder] - The factory function to create an import batch builder.
|
||||
* @returns {Promise<void>} - A promise that resolves when the import is complete.
|
||||
*/
|
||||
async function importLibreChatConvo(
|
||||
jsonData,
|
||||
requestUserId,
|
||||
builderFactory = createImportBatchBuilder,
|
||||
) {
|
||||
try {
|
||||
/** @type {import('./importBatchBuilder').ImportBatchBuilder} */
|
||||
const importBatchBuilder = builderFactory(requestUserId);
|
||||
importBatchBuilder.startConversation(EModelEndpoint.openAI);
|
||||
|
||||
let firstMessageDate = null;
|
||||
|
||||
const traverseMessages = (messages, parentMessageId = null) => {
|
||||
for (const message of messages) {
|
||||
if (!message.text) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let savedMessage;
|
||||
if (message.sender?.toLowerCase() === 'user') {
|
||||
savedMessage = importBatchBuilder.saveMessage({
|
||||
text: message.text,
|
||||
sender: 'user',
|
||||
isCreatedByUser: true,
|
||||
parentMessageId: parentMessageId,
|
||||
});
|
||||
} else {
|
||||
savedMessage = importBatchBuilder.saveMessage({
|
||||
text: message.text,
|
||||
sender: message.sender,
|
||||
isCreatedByUser: false,
|
||||
model: jsonData.options.model,
|
||||
parentMessageId: parentMessageId,
|
||||
});
|
||||
}
|
||||
|
||||
if (!firstMessageDate) {
|
||||
firstMessageDate = new Date(message.createdAt);
|
||||
}
|
||||
|
||||
if (message.children) {
|
||||
traverseMessages(message.children, savedMessage.messageId);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
traverseMessages(jsonData.messagesTree);
|
||||
|
||||
importBatchBuilder.finishConversation(jsonData.title, firstMessageDate);
|
||||
await importBatchBuilder.saveBatch();
|
||||
logger.debug(`user: ${requestUserId} | Conversation "${jsonData.title}" imported`);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Error creating conversation from LibreChat file`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Imports ChatGPT conversations from provided JSON data.
|
||||
* Initializes the import process by creating a batch builder and processing each conversation in the data.
|
||||
*
|
||||
* @param {ChatGPTConvo[]} jsonData - Array of conversation objects to be imported.
|
||||
* @param {string} requestUserId - The ID of the user who initiated the import process.
|
||||
* @param {Function} builderFactory - Factory function to create a new import batch builder instance, defaults to createImportBatchBuilder.
|
||||
* @returns {Promise<void>} Promise that resolves when all conversations have been imported.
|
||||
*/
|
||||
async function importChatGptConvo(
|
||||
jsonData,
|
||||
requestUserId,
|
||||
builderFactory = createImportBatchBuilder,
|
||||
) {
|
||||
try {
|
||||
const importBatchBuilder = builderFactory(requestUserId);
|
||||
for (const conv of jsonData) {
|
||||
processConversation(conv, importBatchBuilder, requestUserId);
|
||||
}
|
||||
await importBatchBuilder.saveBatch();
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Error creating conversation from imported file`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes a single conversation, adding messages to the batch builder based on author roles and handling text content.
|
||||
* It directly manages the addition of messages for different roles and handles citations for assistant messages.
|
||||
*
|
||||
* @param {ChatGPTConvo} conv - A single conversation object that contains multiple messages and other details.
|
||||
* @param {import('./importBatchBuilder').ImportBatchBuilder} importBatchBuilder - The batch builder instance used to manage and batch conversation data.
|
||||
* @param {string} requestUserId - The ID of the user who initiated the import process.
|
||||
* @returns {void}
|
||||
*/
|
||||
function processConversation(conv, importBatchBuilder, requestUserId) {
|
||||
importBatchBuilder.startConversation(EModelEndpoint.openAI);
|
||||
|
||||
// Map all message IDs to new UUIDs
|
||||
const messageMap = new Map();
|
||||
for (const [id, mapping] of Object.entries(conv.mapping)) {
|
||||
if (mapping.message && mapping.message.content.content_type) {
|
||||
const newMessageId = uuidv4();
|
||||
messageMap.set(id, newMessageId);
|
||||
}
|
||||
}
|
||||
|
||||
// Create and save messages using the mapped IDs
|
||||
const messages = [];
|
||||
for (const [id, mapping] of Object.entries(conv.mapping)) {
|
||||
const role = mapping.message?.author?.role;
|
||||
if (!mapping.message) {
|
||||
messageMap.delete(id);
|
||||
continue;
|
||||
} else if (role === 'system') {
|
||||
messageMap.delete(id);
|
||||
continue;
|
||||
}
|
||||
|
||||
const newMessageId = messageMap.get(id);
|
||||
const parentMessageId =
|
||||
mapping.parent && messageMap.has(mapping.parent)
|
||||
? messageMap.get(mapping.parent)
|
||||
: Constants.NO_PARENT;
|
||||
|
||||
const messageText = formatMessageText(mapping.message);
|
||||
|
||||
const isCreatedByUser = role === 'user';
|
||||
let sender = isCreatedByUser ? 'user' : 'GPT-3.5';
|
||||
const model = mapping.message.metadata.model_slug || openAISettings.model.default;
|
||||
if (model === 'gpt-4') {
|
||||
sender = 'GPT-4';
|
||||
}
|
||||
|
||||
messages.push({
|
||||
messageId: newMessageId,
|
||||
parentMessageId,
|
||||
text: messageText,
|
||||
sender,
|
||||
isCreatedByUser,
|
||||
model,
|
||||
user: requestUserId,
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
}
|
||||
|
||||
for (const message of messages) {
|
||||
importBatchBuilder.saveMessage(message);
|
||||
}
|
||||
|
||||
importBatchBuilder.finishConversation(conv.title, new Date(conv.create_time * 1000));
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes text content of messages authored by an assistant, inserting citation links as required.
|
||||
* Applies citation metadata to construct regex patterns and replacements for inserting links into the text.
|
||||
*
|
||||
* @param {ChatGPTMessage} messageData - The message data containing metadata about citations.
|
||||
* @param {string} messageText - The original text of the message which may be altered by inserting citation links.
|
||||
* @returns {string} - The updated message text after processing for citations.
|
||||
*/
|
||||
function processAssistantMessage(messageData, messageText) {
|
||||
const citations = messageData.metadata.citations ?? [];
|
||||
|
||||
for (const citation of citations) {
|
||||
if (
|
||||
!citation.metadata ||
|
||||
!citation.metadata.extra ||
|
||||
!citation.metadata.extra.cited_message_idx ||
|
||||
(citation.metadata.type && citation.metadata.type !== 'webpage')
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const pattern = new RegExp(
|
||||
`\\u3010${citation.metadata.extra.cited_message_idx}\\u2020.+?\\u3011`,
|
||||
'g',
|
||||
);
|
||||
const replacement = ` ([${citation.metadata.title}](${citation.metadata.url}))`;
|
||||
messageText = messageText.replace(pattern, replacement);
|
||||
}
|
||||
|
||||
return messageText;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats the text content of a message based on its content type and author role.
|
||||
* @param {ChatGPTMessage} messageData - The message data.
|
||||
* @returns {string} - The updated message text after processing.
|
||||
*/
|
||||
function formatMessageText(messageData) {
|
||||
const isText = messageData.content.content_type === 'text';
|
||||
let messageText = '';
|
||||
|
||||
if (isText && messageData.content.parts) {
|
||||
messageText = messageData.content.parts.join(' ');
|
||||
} else if (messageData.content.content_type === 'code') {
|
||||
messageText = `\`\`\`${messageData.content.language}\n${messageData.content.text}\n\`\`\``;
|
||||
} else if (messageData.content.content_type === 'execution_output') {
|
||||
messageText = `Execution Output:\n> ${messageData.content.text}`;
|
||||
} else if (messageData.content.parts) {
|
||||
for (const part of messageData.content.parts) {
|
||||
if (typeof part === 'string') {
|
||||
messageText += part + ' ';
|
||||
} else if (typeof part === 'object') {
|
||||
messageText = `\`\`\`json\n${JSON.stringify(part, null, 2)}\n\`\`\`\n`;
|
||||
}
|
||||
}
|
||||
messageText = messageText.trim();
|
||||
} else {
|
||||
messageText = `\`\`\`json\n${JSON.stringify(messageData.content, null, 2)}\n\`\`\``;
|
||||
}
|
||||
|
||||
if (isText && messageData.author.role !== 'user') {
|
||||
messageText = processAssistantMessage(messageData, messageText);
|
||||
}
|
||||
|
||||
return messageText;
|
||||
}
|
||||
|
||||
module.exports = { getImporter };
|
246
api/server/utils/import/importers.spec.js
Normal file
246
api/server/utils/import/importers.spec.js
Normal file
|
@ -0,0 +1,246 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { EModelEndpoint, Constants } = require('librechat-data-provider');
|
||||
const { ImportBatchBuilder } = require('./importBatchBuilder');
|
||||
const { getImporter } = require('./importers');
|
||||
|
||||
// Mocking the ImportBatchBuilder class and its methods
|
||||
jest.mock('./importBatchBuilder', () => {
|
||||
return {
|
||||
ImportBatchBuilder: jest.fn().mockImplementation(() => {
|
||||
return {
|
||||
startConversation: jest.fn().mockResolvedValue(undefined),
|
||||
addUserMessage: jest.fn().mockResolvedValue(undefined),
|
||||
addGptMessage: jest.fn().mockResolvedValue(undefined),
|
||||
saveMessage: jest.fn().mockResolvedValue(undefined),
|
||||
finishConversation: jest.fn().mockResolvedValue(undefined),
|
||||
saveBatch: jest.fn().mockResolvedValue(undefined),
|
||||
};
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
describe('importChatGptConvo', () => {
|
||||
it('should import conversation correctly', async () => {
|
||||
const expectedNumberOfMessages = 19;
|
||||
const expectedNumberOfConversations = 2;
|
||||
// Given
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'chatgpt-export.json'), 'utf8'),
|
||||
);
|
||||
const requestUserId = 'user-123';
|
||||
const mockedBuilderFactory = jest.fn().mockReturnValue(new ImportBatchBuilder(requestUserId));
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, mockedBuilderFactory);
|
||||
|
||||
// Then
|
||||
expect(mockedBuilderFactory).toHaveBeenCalledWith(requestUserId);
|
||||
const mockImportBatchBuilder = mockedBuilderFactory.mock.results[0].value;
|
||||
|
||||
expect(mockImportBatchBuilder.startConversation).toHaveBeenCalledWith(EModelEndpoint.openAI);
|
||||
expect(mockImportBatchBuilder.saveMessage).toHaveBeenCalledTimes(expectedNumberOfMessages); // Adjust expected number
|
||||
expect(mockImportBatchBuilder.finishConversation).toHaveBeenCalledTimes(
|
||||
expectedNumberOfConversations,
|
||||
); // Adjust expected number
|
||||
expect(mockImportBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
it('should maintain correct message hierarchy (tree parent/children relationship)', async () => {
|
||||
// Prepare test data with known hierarchy
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'chatgpt-tree.json'), 'utf8'),
|
||||
);
|
||||
|
||||
const requestUserId = 'user-123';
|
||||
const mockedBuilderFactory = jest.fn().mockReturnValue(new ImportBatchBuilder(requestUserId));
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, mockedBuilderFactory);
|
||||
|
||||
// Then
|
||||
expect(mockedBuilderFactory).toHaveBeenCalledWith(requestUserId);
|
||||
const mockImportBatchBuilder = mockedBuilderFactory.mock.results[0].value;
|
||||
|
||||
const entries = Object.keys(jsonData[0].mapping);
|
||||
// Filter entries that should be processed (not system and have content)
|
||||
const messageEntries = entries.filter(
|
||||
(id) =>
|
||||
jsonData[0].mapping[id].message &&
|
||||
jsonData[0].mapping[id].message.author.role !== 'system' &&
|
||||
jsonData[0].mapping[id].message.content,
|
||||
);
|
||||
|
||||
// Expect the saveMessage to be called for each valid entry
|
||||
expect(mockImportBatchBuilder.saveMessage).toHaveBeenCalledTimes(messageEntries.length);
|
||||
|
||||
const idToUUIDMap = new Map();
|
||||
// Map original IDs to dynamically generated UUIDs
|
||||
mockImportBatchBuilder.saveMessage.mock.calls.forEach((call, index) => {
|
||||
const originalId = messageEntries[index];
|
||||
idToUUIDMap.set(originalId, call[0].messageId);
|
||||
});
|
||||
|
||||
// Validate the UUID map contains all expected entries
|
||||
expect(idToUUIDMap.size).toBe(messageEntries.length);
|
||||
|
||||
// Validate correct parent-child relationships
|
||||
messageEntries.forEach((id) => {
|
||||
const { parent } = jsonData[0].mapping[id];
|
||||
|
||||
const expectedParentId = parent
|
||||
? idToUUIDMap.get(parent) ?? Constants.NO_PARENT
|
||||
: Constants.NO_PARENT;
|
||||
|
||||
const actualParentId = idToUUIDMap.get(id)
|
||||
? mockImportBatchBuilder.saveMessage.mock.calls.find(
|
||||
(call) => call[0].messageId === idToUUIDMap.get(id),
|
||||
)[0].parentMessageId
|
||||
: Constants.NO_PARENT;
|
||||
|
||||
expect(actualParentId).toBe(expectedParentId);
|
||||
});
|
||||
|
||||
expect(mockImportBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('importLibreChatConvo', () => {
|
||||
it('should import conversation correctly', async () => {
|
||||
const expectedNumberOfMessages = 6;
|
||||
const expectedNumberOfConversations = 1;
|
||||
|
||||
// Given
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'librechat-export.json'), 'utf8'),
|
||||
);
|
||||
const requestUserId = 'user-123';
|
||||
const mockedBuilderFactory = jest.fn().mockReturnValue(new ImportBatchBuilder(requestUserId));
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, mockedBuilderFactory);
|
||||
|
||||
// Then
|
||||
const mockImportBatchBuilder = mockedBuilderFactory.mock.results[0].value;
|
||||
expect(mockImportBatchBuilder.startConversation).toHaveBeenCalledWith(EModelEndpoint.openAI);
|
||||
expect(mockImportBatchBuilder.saveMessage).toHaveBeenCalledTimes(expectedNumberOfMessages); // Adjust expected number
|
||||
expect(mockImportBatchBuilder.finishConversation).toHaveBeenCalledTimes(
|
||||
expectedNumberOfConversations,
|
||||
); // Adjust expected number
|
||||
expect(mockImportBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
it('should maintain correct message hierarchy (tree parent/children relationship)', async () => {
|
||||
// Load test data
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'librechat-tree.json'), 'utf8'),
|
||||
);
|
||||
const requestUserId = 'user-123';
|
||||
const mockedBuilderFactory = jest.fn().mockReturnValue(new ImportBatchBuilder(requestUserId));
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, mockedBuilderFactory);
|
||||
|
||||
// Then
|
||||
const mockImportBatchBuilder = mockedBuilderFactory.mock.results[0].value;
|
||||
|
||||
// Create a map to track original message IDs to new UUIDs
|
||||
const idToUUIDMap = new Map();
|
||||
mockImportBatchBuilder.saveMessage.mock.calls.forEach((call) => {
|
||||
const message = call[0];
|
||||
idToUUIDMap.set(message.originalMessageId, message.messageId);
|
||||
});
|
||||
|
||||
// Function to recursively check children
|
||||
const checkChildren = (children, parentId) => {
|
||||
children.forEach((child) => {
|
||||
const childUUID = idToUUIDMap.get(child.messageId);
|
||||
const expectedParentId = idToUUIDMap.get(parentId) ?? null;
|
||||
const messageCall = mockImportBatchBuilder.saveMessage.mock.calls.find(
|
||||
(call) => call[0].messageId === childUUID,
|
||||
);
|
||||
|
||||
const actualParentId = messageCall[0].parentMessageId;
|
||||
expect(actualParentId).toBe(expectedParentId);
|
||||
|
||||
if (child.children && child.children.length > 0) {
|
||||
checkChildren(child.children, child.messageId);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// Start hierarchy validation from root messages
|
||||
checkChildren(jsonData.messagesTree, null); // Assuming root messages have no parent
|
||||
|
||||
expect(mockImportBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('importChatBotUiConvo', () => {
|
||||
it('should import custom conversation correctly', async () => {
|
||||
// Given
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'chatbotui-export.json'), 'utf8'),
|
||||
);
|
||||
const requestUserId = 'custom-user-456';
|
||||
const mockedBuilderFactory = jest.fn().mockReturnValue(new ImportBatchBuilder(requestUserId));
|
||||
|
||||
// When
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId, mockedBuilderFactory);
|
||||
|
||||
// Then
|
||||
const mockImportBatchBuilder = mockedBuilderFactory.mock.results[0].value;
|
||||
expect(mockImportBatchBuilder.startConversation).toHaveBeenCalledWith('openAI');
|
||||
|
||||
// User messages
|
||||
expect(mockImportBatchBuilder.addUserMessage).toHaveBeenCalledTimes(3);
|
||||
expect(mockImportBatchBuilder.addUserMessage).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'Hello what are you able to do?',
|
||||
);
|
||||
expect(mockImportBatchBuilder.addUserMessage).toHaveBeenNthCalledWith(
|
||||
3,
|
||||
'Give me the code that inverts binary tree in COBOL',
|
||||
);
|
||||
|
||||
// GPT messages
|
||||
expect(mockImportBatchBuilder.addGptMessage).toHaveBeenCalledTimes(3);
|
||||
expect(mockImportBatchBuilder.addGptMessage).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
expect.stringMatching(/^Hello! As an AI developed by OpenAI/),
|
||||
'gpt-4-1106-preview',
|
||||
);
|
||||
expect(mockImportBatchBuilder.addGptMessage).toHaveBeenNthCalledWith(
|
||||
3,
|
||||
expect.stringContaining('```cobol'),
|
||||
'gpt-3.5-turbo',
|
||||
);
|
||||
|
||||
expect(mockImportBatchBuilder.finishConversation).toHaveBeenCalledTimes(2);
|
||||
expect(mockImportBatchBuilder.finishConversation).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'Hello what are you able to do?',
|
||||
expect.any(Date),
|
||||
);
|
||||
expect(mockImportBatchBuilder.finishConversation).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
'Give me the code that inverts ...',
|
||||
expect.any(Date),
|
||||
);
|
||||
|
||||
expect(mockImportBatchBuilder.saveBatch).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getImporter', () => {
|
||||
it('should throw an error if the import type is not supported', () => {
|
||||
// Given
|
||||
const jsonData = { unsupported: 'data' };
|
||||
|
||||
// When
|
||||
expect(() => getImporter(jsonData)).toThrow('Unsupported import type');
|
||||
});
|
||||
});
|
5
api/server/utils/import/index.js
Normal file
5
api/server/utils/import/index.js
Normal file
|
@ -0,0 +1,5 @@
|
|||
const importers = require('./importers');
|
||||
|
||||
module.exports = {
|
||||
...importers,
|
||||
};
|
41
api/server/utils/import/jobDefinition.js
Normal file
41
api/server/utils/import/jobDefinition.js
Normal file
|
@ -0,0 +1,41 @@
|
|||
const fs = require('fs').promises;
|
||||
const jobScheduler = require('~/server/utils/jobScheduler');
|
||||
const { getImporter } = require('./importers');
|
||||
const { indexSync } = require('~/lib/db');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const IMPORT_CONVERSATION_JOB_NAME = 'import conversation';
|
||||
|
||||
/**
|
||||
* Job definition for importing a conversation.
|
||||
* @param {import('agenda').Job} job - The job object.
|
||||
* @param {Function} done - The done function.
|
||||
*/
|
||||
const importConversationJob = async (job, done) => {
|
||||
const { filepath, requestUserId } = job.attrs.data;
|
||||
try {
|
||||
logger.debug(`user: ${requestUserId} | Importing conversation(s) from file...`);
|
||||
const fileData = await fs.readFile(filepath, 'utf8');
|
||||
const jsonData = JSON.parse(fileData);
|
||||
const importer = getImporter(jsonData);
|
||||
await importer(jsonData, requestUserId);
|
||||
// Sync Meilisearch index
|
||||
await indexSync();
|
||||
logger.debug(`user: ${requestUserId} | Finished importing conversations`);
|
||||
done();
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Failed to import conversation: `, error);
|
||||
done(error);
|
||||
} finally {
|
||||
try {
|
||||
await fs.unlink(filepath);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Failed to delete file: ${filepath}`, error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Call the jobScheduler.define function at startup
|
||||
jobScheduler.define(IMPORT_CONVERSATION_JOB_NAME, importConversationJob);
|
||||
|
||||
module.exports = { IMPORT_CONVERSATION_JOB_NAME };
|
99
api/server/utils/jobScheduler.js
Normal file
99
api/server/utils/jobScheduler.js
Normal file
|
@ -0,0 +1,99 @@
|
|||
const Agenda = require('agenda');
|
||||
const { logger } = require('~/config');
|
||||
const mongodb = require('mongodb');
|
||||
|
||||
/**
|
||||
* Class for scheduling and running jobs.
|
||||
* The workflow is as follows: start the job scheduler, define a job, and then schedule the job using defined job name.
|
||||
*/
|
||||
class JobScheduler {
|
||||
constructor() {
|
||||
this.agenda = new Agenda({ db: { address: process.env.MONGO_URI } });
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the job scheduler.
|
||||
*/
|
||||
async start() {
|
||||
try {
|
||||
logger.info('Starting Agenda...');
|
||||
await this.agenda.start();
|
||||
logger.info('Agenda successfully started and connected to MongoDB.');
|
||||
} catch (error) {
|
||||
logger.error('Failed to start Agenda:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedules a job to start immediately.
|
||||
* @param {string} jobName - The name of the job to schedule.
|
||||
* @param {string} filepath - The filepath to pass to the job.
|
||||
* @param {string} userId - The ID of the user requesting the job.
|
||||
* @returns {Promise<{ id: string }>} - A promise that resolves with the ID of the scheduled job.
|
||||
* @throws {Error} - If the job fails to schedule.
|
||||
*/
|
||||
async now(jobName, filepath, userId) {
|
||||
try {
|
||||
const job = await this.agenda.now(jobName, { filepath, requestUserId: userId });
|
||||
logger.debug(`Job '${job.attrs.name}' scheduled successfully.`);
|
||||
return { id: job.attrs._id.toString() };
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to schedule job '${jobName}': ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the status of a job.
|
||||
* @param {string} jobId - The ID of the job to get the status of.
|
||||
* @returns {Promise<{ id: string, userId: string, name: string, failReason: string, status: string } | null>} - A promise that resolves with the job status or null if the job is not found.
|
||||
* @throws {Error} - If multiple jobs are found.
|
||||
*/
|
||||
async getJobStatus(jobId) {
|
||||
const job = await this.agenda.jobs({ _id: new mongodb.ObjectId(jobId) });
|
||||
if (!job || job.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (job.length > 1) {
|
||||
// This should never happen
|
||||
throw new Error('Multiple jobs found.');
|
||||
}
|
||||
|
||||
const jobDetails = {
|
||||
id: job[0]._id,
|
||||
userId: job[0].attrs.data.requestUserId,
|
||||
name: job[0].attrs.name,
|
||||
failReason: job[0].attrs.failReason,
|
||||
status: !job[0].attrs.lastRunAt
|
||||
? 'scheduled'
|
||||
: job[0].attrs.failedAt
|
||||
? 'failed'
|
||||
: job[0].attrs.lastFinishedAt
|
||||
? 'completed'
|
||||
: 'running',
|
||||
};
|
||||
|
||||
return jobDetails;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines a new job.
|
||||
* @param {string} name - The name of the job.
|
||||
* @param {Function} jobFunction - The function to run when the job is executed.
|
||||
*/
|
||||
define(name, jobFunction) {
|
||||
this.agenda.define(name, async (job, done) => {
|
||||
try {
|
||||
await jobFunction(job, done);
|
||||
} catch (error) {
|
||||
logger.error(`Failed to run job '${name}': ${error}`);
|
||||
done(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const jobScheduler = new JobScheduler();
|
||||
jobScheduler.start();
|
||||
|
||||
module.exports = jobScheduler;
|
|
@ -1,6 +1,7 @@
|
|||
// See .env.test.example for an example of the '.env.test' file.
|
||||
require('dotenv').config({ path: './test/.env.test' });
|
||||
|
||||
process.env.MONGO_URI = 'mongodb://127.0.0.1:27017/dummy-uri';
|
||||
process.env.BAN_VIOLATIONS = 'true';
|
||||
process.env.BAN_DURATION = '7200000';
|
||||
process.env.BAN_INTERVAL = '20';
|
||||
|
|
|
@ -1202,3 +1202,58 @@
|
|||
* @property {OllamaModel[]} models - the list of models available.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTAuthor
|
||||
* @property {string} role - The role of the author (e.g., 'assistant', 'system', 'user').
|
||||
* @property {?string} name - The name of the author, if available.
|
||||
* @property {Object} metadata - Additional metadata related to the author.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTContentPart
|
||||
* @property {string} content_type - The type of content (e.g., 'text').
|
||||
* @property {string[]} parts - The textual parts of the message.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTMetadata
|
||||
* @property {boolean} is_visually_hidden_from_conversation - Indicates if the message should be hidden.
|
||||
* @property {?Array<Object>} citations - Potential citations included in the message.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTMessage
|
||||
* @property {string} id - Unique identifier for the message.
|
||||
* @property {?ChatGPTAuthor} author - The author of the message.
|
||||
* @property {?number} create_time - Creation time as a Unix timestamp.
|
||||
* @property {?number} update_time - Last update time as a Unix timestamp.
|
||||
* @property {ChatGPTContentPart} content - Content of the message.
|
||||
* @property {string} status - Status of the message (e.g., 'finished_successfully').
|
||||
* @property {boolean} end_turn - Indicates if it's the end of a conversation turn.
|
||||
* @property {number} weight - A numerical value representing the weight/importance of the message.
|
||||
* @property {ChatGPTMetadata} metadata - Metadata associated with the message.
|
||||
* @property {string} recipient - Intended recipient of the message.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTMapping
|
||||
* @property {ChatGPTMessage} message - Details of the message.
|
||||
* @property {string} id - Identifier of the message.
|
||||
* @property {?string} parent - Parent message ID.
|
||||
* @property {string[]} children - Child message IDs.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} ChatGPTConvo
|
||||
* @property {string} title - Title of the conversation.
|
||||
* @property {number} create_time - Creation time of the conversation as a Unix timestamp.
|
||||
* @property {number} update_time - Last update time of the conversation as a Unix timestamp.
|
||||
* @property {Object.<string, ChatGPTMapping>} mapping - Mapping of message nodes within the conversation.
|
||||
* @memberof typedefs
|
||||
*/
|
||||
|
|
|
@ -7,6 +7,7 @@ import { SettingsTabValues } from 'librechat-data-provider';
|
|||
import React, { useState, useCallback, useRef } from 'react';
|
||||
import { useOnClickOutside } from '~/hooks';
|
||||
import DangerButton from '../DangerButton';
|
||||
import ImportConversations from './ImportConversations';
|
||||
|
||||
export const RevokeKeysButton = ({
|
||||
showText = true,
|
||||
|
@ -76,6 +77,9 @@ function Data() {
|
|||
<div className="border-b pb-3 last-of-type:border-b-0 dark:border-gray-700">
|
||||
<RevokeKeysButton all={true} />
|
||||
</div>
|
||||
<div className="border-b pb-3 last-of-type:border-b-0 dark:border-gray-700">
|
||||
<ImportConversations />
|
||||
</div>
|
||||
</div>
|
||||
</Tabs.Content>
|
||||
);
|
||||
|
|
|
@ -0,0 +1,87 @@
|
|||
import { Import } from 'lucide-react';
|
||||
import { cn } from '~/utils';
|
||||
import { useUploadConversationsMutation } from '~/data-provider';
|
||||
import { useLocalize, useConversations } from '~/hooks';
|
||||
import { useState } from 'react';
|
||||
import { useToastContext } from '~/Providers';
|
||||
|
||||
function ImportConversations() {
|
||||
const localize = useLocalize();
|
||||
|
||||
const { showToast } = useToastContext();
|
||||
const [, setErrors] = useState<string[]>([]);
|
||||
const setError = (error: string) => setErrors((prevErrors) => [...prevErrors, error]);
|
||||
const { refreshConversations } = useConversations();
|
||||
|
||||
const uploadFile = useUploadConversationsMutation({
|
||||
onSuccess: () => {
|
||||
refreshConversations();
|
||||
showToast({ message: localize('com_ui_import_conversation_success') });
|
||||
},
|
||||
onError: (error) => {
|
||||
console.error('Error: ', error);
|
||||
setError(
|
||||
(error as { response: { data: { message?: string } } })?.response?.data?.message ??
|
||||
'An error occurred while uploading the file.',
|
||||
);
|
||||
if (error?.toString().includes('Unsupported import type')) {
|
||||
showToast({
|
||||
message: localize('com_ui_import_conversation_file_type_error'),
|
||||
status: 'error',
|
||||
});
|
||||
} else {
|
||||
showToast({ message: localize('com_ui_import_conversation_error'), status: 'error' });
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const startUpload = async (file: File) => {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file, encodeURIComponent(file?.name || 'File'));
|
||||
|
||||
uploadFile.mutate(formData);
|
||||
};
|
||||
|
||||
const handleFiles = async (_file: File) => {
|
||||
console.log('Handling files...');
|
||||
|
||||
/* Process files */
|
||||
try {
|
||||
await startUpload(_file);
|
||||
} catch (error) {
|
||||
console.log('file handling error', error);
|
||||
setError('An error occurred while processing the file.');
|
||||
}
|
||||
};
|
||||
|
||||
const handleFileChange = (event) => {
|
||||
console.log('file change');
|
||||
const file = event.target.files[0];
|
||||
if (file) {
|
||||
handleFiles(file);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-between">
|
||||
<span>{localize('com_ui_import_conversation_info')}</span>
|
||||
<label
|
||||
htmlFor={'import-conversations-file'}
|
||||
className="flex h-auto cursor-pointer items-center rounded bg-transparent px-2 py-1 text-xs font-medium font-normal transition-colors hover:bg-gray-100 hover:text-green-700 dark:bg-transparent dark:text-white dark:hover:bg-gray-600 dark:hover:text-green-500"
|
||||
>
|
||||
<Import className="mr-1 flex w-[22px] items-center stroke-1" />
|
||||
<span>{localize('com_ui_import_conversation')}</span>
|
||||
<input
|
||||
id={'import-conversations-file'}
|
||||
value=""
|
||||
type="file"
|
||||
className={cn('hidden')}
|
||||
accept=".json"
|
||||
onChange={handleFileChange}
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ImportConversations;
|
|
@ -6,8 +6,10 @@ import type {
|
|||
TFile,
|
||||
BatchFile,
|
||||
TFileUpload,
|
||||
TImportStartResponse,
|
||||
AssistantListResponse,
|
||||
UploadMutationOptions,
|
||||
UploadConversationsMutationOptions,
|
||||
DeleteFilesResponse,
|
||||
DeleteFilesBody,
|
||||
DeleteMutationOptions,
|
||||
|
@ -131,6 +133,89 @@ export const useDeleteConversationMutation = (
|
|||
);
|
||||
};
|
||||
|
||||
export const useUploadConversationsMutation = (_options?: UploadConversationsMutationOptions) => {
|
||||
const queryClient = useQueryClient();
|
||||
const { onSuccess, onError } = _options || {};
|
||||
|
||||
// returns the job status or reason of failure
|
||||
const checkJobStatus = async (jobId) => {
|
||||
try {
|
||||
const response = await dataService.queryImportConversationJobStatus(jobId);
|
||||
return response;
|
||||
} catch (error) {
|
||||
throw new Error('Failed to check job status');
|
||||
}
|
||||
};
|
||||
|
||||
// Polls the job status until it is completed, failed, or timed out
|
||||
const pollJobStatus = (jobId, onSuccess, onError) => {
|
||||
let timeElapsed = 0;
|
||||
const timeout = 60000; // Timeout after a minute
|
||||
const pollInterval = 500; // Poll every 500ms
|
||||
const intervalId = setInterval(async () => {
|
||||
try {
|
||||
const statusResponse = await checkJobStatus(jobId);
|
||||
console.log('Polling job status', statusResponse);
|
||||
if (statusResponse.status === 'completed' || statusResponse.status === 'failed') {
|
||||
clearInterval(intervalId);
|
||||
if (statusResponse.status === 'completed') {
|
||||
onSuccess && onSuccess(statusResponse);
|
||||
} else {
|
||||
onError &&
|
||||
onError(
|
||||
new Error(
|
||||
statusResponse.failReason
|
||||
? statusResponse.failReason
|
||||
: 'Failed to import conversations',
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
timeElapsed += pollInterval; // Increment time elapsed by polling interval
|
||||
if (timeElapsed >= timeout) {
|
||||
clearInterval(intervalId);
|
||||
onError && onError(new Error('Polling timed out'));
|
||||
}
|
||||
} catch (error) {
|
||||
clearInterval(intervalId);
|
||||
onError && onError(error);
|
||||
}
|
||||
}, pollInterval);
|
||||
};
|
||||
return useMutation<TImportStartResponse, unknown, FormData>({
|
||||
mutationFn: (formData: FormData) => dataService.importConversationsFile(formData),
|
||||
onSuccess: (data, variables, context) => {
|
||||
queryClient.invalidateQueries([QueryKeys.allConversations]);
|
||||
// Assuming the job ID is in the response data
|
||||
const jobId = data.jobId;
|
||||
if (jobId) {
|
||||
// Start polling for job status
|
||||
pollJobStatus(
|
||||
jobId,
|
||||
(statusResponse) => {
|
||||
// This is the final success callback when the job is completed
|
||||
queryClient.invalidateQueries([QueryKeys.allConversations]); // Optionally refresh conversations query
|
||||
if (onSuccess) {
|
||||
onSuccess(statusResponse, variables, context);
|
||||
}
|
||||
},
|
||||
(error) => {
|
||||
// This is the error callback for job failure or polling errors
|
||||
if (onError) {
|
||||
onError(error, variables, context);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
},
|
||||
onError: (err, variables, context) => {
|
||||
if (onError) {
|
||||
onError(err, variables, context);
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const useUploadFileMutation = (
|
||||
_options?: UploadMutationOptions,
|
||||
): UseMutationResult<
|
||||
|
|
|
@ -45,6 +45,10 @@ export default {
|
|||
com_ui_clear: 'مسح',
|
||||
com_ui_revoke: 'إلغاء',
|
||||
com_ui_revoke_info: 'إلغاء جميع بيانات الاعتماد المقدمة من المستخدم.',
|
||||
com_ui_import_conversation: 'استيراد',
|
||||
com_ui_import_conversation_info: 'استيراد محادثات من ملف JSON',
|
||||
com_ui_import_conversation_success: 'تم استيراد المحادثات بنجاح',
|
||||
com_ui_import_conversation_error: 'حدث خطأ أثناء استيراد محادثاتك',
|
||||
com_ui_confirm_action: 'تأكيد الإجراء',
|
||||
com_ui_chats: 'الدردشات',
|
||||
com_ui_delete: 'حذف',
|
||||
|
|
|
@ -119,6 +119,10 @@ export default {
|
|||
com_ui_clear: 'Limpar',
|
||||
com_ui_revoke: 'Revogar',
|
||||
com_ui_revoke_info: 'Revogar todas as credenciais fornecidas pelo usuário',
|
||||
com_ui_import_conversation: 'Importar',
|
||||
com_ui_import_conversation_info: 'Importe conversas de um arquivo JSON',
|
||||
com_ui_import_conversation_success: 'Conversas importadas com sucesso',
|
||||
com_ui_import_conversation_error: 'Houve um erro ao importar suas conversas',
|
||||
com_ui_confirm_action: 'Confirmar Ação',
|
||||
com_ui_chats: 'conversas',
|
||||
com_ui_avatar: 'Avatar',
|
||||
|
|
|
@ -125,6 +125,10 @@ export default {
|
|||
com_ui_clear: 'Löschen',
|
||||
com_ui_revoke: 'Widerrufen',
|
||||
com_ui_revoke_info: 'Widerrufe alle vom Benutzer angegebenen Anmeldeinformationen',
|
||||
com_ui_import_conversation: 'Importieren',
|
||||
com_ui_import_conversation_info: 'Chats aus einer JSON-Datei importieren',
|
||||
com_ui_import_conversation_success: 'Chats erfolgreich importiert',
|
||||
com_ui_import_conversation_error: 'Beim Importieren Ihrer Chats ist ein Fehler aufgetreten',
|
||||
com_ui_confirm_action: 'Bestätige Aktion',
|
||||
com_ui_chats: 'Chats',
|
||||
com_ui_avatar: 'Avatar',
|
||||
|
|
|
@ -133,6 +133,11 @@ export default {
|
|||
com_ui_clear: 'Clear',
|
||||
com_ui_revoke: 'Revoke',
|
||||
com_ui_revoke_info: 'Revoke all user provided credentials',
|
||||
com_ui_import_conversation: 'Import',
|
||||
com_ui_import_conversation_info: 'Import conversations from a JSON file',
|
||||
com_ui_import_conversation_success: 'Conversations imported successfully',
|
||||
com_ui_import_conversation_error: 'There was an error importing your conversations',
|
||||
com_ui_import_conversation_file_type_error: 'Unsupported import type',
|
||||
com_ui_confirm_action: 'Confirm Action',
|
||||
com_ui_chats: 'chats',
|
||||
com_ui_avatar: 'Avatar',
|
||||
|
|
|
@ -121,6 +121,10 @@ export default {
|
|||
com_ui_clear: 'Limpiar',
|
||||
com_ui_revoke: 'Revocar',
|
||||
com_ui_revoke_info: 'Revocar todas las credenciales proporcionadas por el usuario',
|
||||
com_ui_import_conversation: 'Importar',
|
||||
com_ui_import_conversation_info: 'Importar chats de un archivo JSON',
|
||||
com_ui_import_conversation_success: 'Chats importados exitosamente',
|
||||
com_ui_import_conversation_error: 'Hubo un error al importar tus chats',
|
||||
com_ui_confirm_action: 'Confirmar Acción',
|
||||
com_ui_chats: 'conversaciones',
|
||||
com_ui_avatar: 'Avatar',
|
||||
|
|
|
@ -58,6 +58,11 @@ export default {
|
|||
com_ui_revoke: 'Révoquer',
|
||||
com_ui_revoke_info:
|
||||
'Révoquer toutes les informations d\'identification fournies par l\'utilisateur',
|
||||
com_ui_import_conversation: 'Importer',
|
||||
com_ui_import_conversation_info: 'Importer des conversations à partir d’un fichier JSON',
|
||||
com_ui_import_conversation_success: 'Conversations importées avec succès',
|
||||
com_ui_import_conversation_error:
|
||||
'Une erreur s’est produite lors de l’importation de vos conversations',
|
||||
com_ui_confirm_action: 'Confirmer l\'action',
|
||||
com_ui_chats: 'discussions',
|
||||
com_ui_delete: 'Supprimer',
|
||||
|
|
|
@ -82,6 +82,10 @@ export default {
|
|||
com_ui_clear: 'נקה',
|
||||
com_ui_revoke: 'בטל',
|
||||
com_ui_revoke_info: 'בטל את כל האישורים שסופקו על ידי המשתמש',
|
||||
com_ui_import_conversation: 'יבוא',
|
||||
com_ui_import_conversation_info: 'ייבא שיחות מקובץ JSON',
|
||||
com_ui_import_conversation_success: 'השיחות יובאו בהצלחה',
|
||||
com_ui_import_conversation_error: 'אירעה שגיאה בעת ייבוא השיחות שלך',
|
||||
com_ui_confirm_action: 'אשר פעולה',
|
||||
com_ui_chats: 'צאטים',
|
||||
com_ui_assistant: 'סייען',
|
||||
|
|
|
@ -55,6 +55,10 @@ export default {
|
|||
com_ui_clear: 'Bersihkan',
|
||||
com_ui_revoke: 'Cabut',
|
||||
com_ui_revoke_info: 'Cabut semua kredensial yang diberikan pengguna',
|
||||
com_ui_import_conversation: 'Impor',
|
||||
com_ui_import_conversation_info: 'Impor percakapan dari file JSON',
|
||||
com_ui_import_conversation_success: 'Percakapan berhasil diimpor',
|
||||
com_ui_import_conversation_error: 'Terjadi kesalahan saat mengimpor percakapan Anda',
|
||||
com_ui_confirm_action: 'Konfirmasi Aksi',
|
||||
com_ui_chats: 'chat',
|
||||
com_ui_delete: 'Hapus',
|
||||
|
|
|
@ -53,6 +53,11 @@ export default {
|
|||
com_ui_clear: 'Pulisci',
|
||||
com_ui_revoke: 'Revoca',
|
||||
com_ui_revoke_info: 'Revoca tutte le credenziali fornite dall\'utente',
|
||||
com_ui_import_conversation: 'Importa',
|
||||
com_ui_import_conversation_info: 'Importa conversazioni da un file JSON',
|
||||
com_ui_import_conversation_success: 'Conversazioni importate con successo',
|
||||
com_ui_import_conversation_error:
|
||||
'Si è verificato un errore durante l’importazione delle tue conversazioni',
|
||||
com_ui_confirm_action: 'Conferma azione',
|
||||
com_ui_chats: 'chat',
|
||||
com_ui_delete: 'Elimina',
|
||||
|
|
|
@ -130,6 +130,10 @@ export default {
|
|||
com_ui_clear: '削除する',
|
||||
com_ui_revoke: '無効にする',
|
||||
com_ui_revoke_info: 'ユーザへ発行した認証情報をすべて無効にする。',
|
||||
com_ui_import_conversation: 'インポート',
|
||||
com_ui_import_conversation_info: 'JSONファイルから会話をインポートする',
|
||||
com_ui_import_conversation_success: '会話のインポートに成功しました',
|
||||
com_ui_import_conversation_error: '会話のインポート時にエラーが発生しました',
|
||||
com_ui_confirm_action: '実行する',
|
||||
com_ui_chats: 'チャット',
|
||||
com_ui_avatar: 'アバター',
|
||||
|
|
|
@ -44,6 +44,10 @@ export default {
|
|||
com_ui_clear: '지우기',
|
||||
com_ui_revoke: '취소',
|
||||
com_ui_revoke_info: '사용자가 제공한 자격 증명을 모두 취소합니다.',
|
||||
com_ui_import_conversation: '가져오기',
|
||||
com_ui_import_conversation_info: 'JSON 파일에서 대화 가져오기',
|
||||
com_ui_import_conversation_success: '대화가 성공적으로 가져와졌습니다',
|
||||
com_ui_import_conversation_error: '대화를 가져오는 동안 오류가 발생했습니다',
|
||||
com_ui_confirm_action: '작업 확인',
|
||||
com_ui_chats: '채팅',
|
||||
com_ui_delete: '삭제',
|
||||
|
|
|
@ -47,6 +47,11 @@ export default {
|
|||
com_ui_clear: 'Wissen',
|
||||
com_ui_revoke: 'Intrekken',
|
||||
com_ui_revoke_info: 'Trek alle door de gebruiker verstrekte referenties in',
|
||||
com_ui_import_conversation: 'Importeren',
|
||||
com_ui_import_conversation_info: 'Gesprekken importeren vanuit een JSON-bestand',
|
||||
com_ui_import_conversation_success: 'Gesprekken succesvol geïmporteerd',
|
||||
com_ui_import_conversation_error:
|
||||
'Er is een fout opgetreden bij het importeren van je gesprekken',
|
||||
com_ui_confirm_action: 'Bevestig actie',
|
||||
com_ui_chats: 'chats',
|
||||
com_ui_delete: 'Verwijderen',
|
||||
|
|
|
@ -205,4 +205,8 @@ export default {
|
|||
com_nav_settings: 'Ustawienia',
|
||||
com_nav_search_placeholder: 'Szukaj wiadomości',
|
||||
com_nav_setting_general: 'Ogólne',
|
||||
com_ui_import_conversation: 'Importuj',
|
||||
com_ui_import_conversation_info: 'Importuj konwersacje z pliku JSON',
|
||||
com_ui_import_conversation_success: 'Konwersacje zostały pomyślnie zaimportowane',
|
||||
com_ui_import_conversation_error: 'Wystąpił błąd podczas importowania konwersacji',
|
||||
};
|
||||
|
|
|
@ -56,6 +56,10 @@ export default {
|
|||
com_ui_clear: 'Удалить',
|
||||
com_ui_revoke: 'Отозвать',
|
||||
com_ui_revoke_info: 'Отозвать все предоставленные пользователем учетные данные',
|
||||
com_ui_import_conversation: 'Импортировать',
|
||||
com_ui_import_conversation_info: 'Импортировать беседы из файла JSON',
|
||||
com_ui_import_conversation_success: 'Беседы успешно импортированы',
|
||||
com_ui_import_conversation_error: 'При импорте бесед произошла ошибка',
|
||||
com_ui_confirm_action: 'Подтвердить действие',
|
||||
com_ui_chats: 'чаты',
|
||||
com_ui_delete: 'Удалить',
|
||||
|
|
|
@ -45,6 +45,10 @@ export default {
|
|||
com_ui_clear: 'Rensa',
|
||||
com_ui_revoke: 'Återkalla',
|
||||
com_ui_revoke_info: 'Återkalla alla användaruppgifter.',
|
||||
com_ui_import_conversation: 'Importera',
|
||||
com_ui_import_conversation_info: 'Importera konversationer från en JSON-fil',
|
||||
com_ui_import_conversation_success: 'Konversationer har importerats framgångsrikt',
|
||||
com_ui_import_conversation_error: 'Det uppstod ett fel vid import av dina konversationer',
|
||||
com_ui_confirm_action: 'Bekräfta åtgärd',
|
||||
com_ui_chats: 'chattar',
|
||||
com_ui_delete: 'Radera',
|
||||
|
|
|
@ -47,6 +47,10 @@ export default {
|
|||
com_ui_clear: 'Temizle',
|
||||
com_ui_revoke: 'İptal et',
|
||||
com_ui_revoke_info: 'Tüm kullanıcı tarafından verilen kimlik bilgilerini iptal et.',
|
||||
com_ui_import_conversation: 'İçe Aktar',
|
||||
com_ui_import_conversation_info: 'Bir JSON dosyasından sohbetleri içe aktar',
|
||||
com_ui_import_conversation_success: 'Sohbetler başarıyla içe aktarıldı',
|
||||
com_ui_import_conversation_error: 'Sohbetlerinizi içe aktarırken bir hata oluştu',
|
||||
com_ui_confirm_action: 'İşlemi Onayla',
|
||||
com_ui_chats: 'sohbetler',
|
||||
com_ui_delete: 'Sil',
|
||||
|
|
|
@ -47,6 +47,10 @@ export default {
|
|||
com_ui_clear: 'Xóa',
|
||||
com_ui_revoke: 'Hủy bỏ',
|
||||
com_ui_revoke_info: 'Hủy bỏ tất cả các thông tin xác thực được cung cấp bởi người dùng.',
|
||||
com_ui_import_conversation: 'Nhập khẩu',
|
||||
com_ui_import_conversation_info: 'Nhập khẩu cuộc trò chuyện từ một tệp JSON',
|
||||
com_ui_import_conversation_success: 'Đã nhập khẩu cuộc trò chuyện thành công',
|
||||
com_ui_import_conversation_error: 'Đã xảy ra lỗi khi nhập khẩu cuộc trò chuyện của bạn',
|
||||
com_ui_confirm_action: 'Xác nhận hành động',
|
||||
com_ui_chats: 'cuộc trò chuyện',
|
||||
com_ui_delete: 'Xóa',
|
||||
|
|
|
@ -13,14 +13,12 @@ export default {
|
|||
com_sidepanel_manage_files: '管理文件',
|
||||
com_assistants_capabilities: '功能',
|
||||
com_assistants_knowledge: '知识',
|
||||
com_assistants_knowledge_info:
|
||||
'如果您在“知识”中上传文件,与助手的对话可能包括文件内容。',
|
||||
com_assistants_knowledge_info: '如果您在“知识”中上传文件,与助手的对话可能包括文件内容。',
|
||||
com_assistants_knowledge_disabled:
|
||||
'必须创建助手,且启用并保存代码解释器或检索,才能将文件作为知识上传。',
|
||||
com_assistants_image_vision: '识图',
|
||||
com_assistants_code_interpreter: '代码解释器',
|
||||
com_assistants_code_interpreter_files:
|
||||
'以下文件仅适用于代码解释器:',
|
||||
com_assistants_code_interpreter_files: '以下文件仅适用于代码解释器:',
|
||||
com_assistants_retrieval: '检索',
|
||||
com_assistants_search_name: '按名称搜索助手',
|
||||
com_assistants_tools: '工具',
|
||||
|
@ -50,8 +48,7 @@ export default {
|
|||
com_ui_download_error: '下载文件时出错,该文件可能已被删除。',
|
||||
com_ui_attach_error_type: '渠道不支持的文件类型:',
|
||||
com_ui_attach_error_size: '超出渠道规定的文件大小:',
|
||||
com_ui_attach_error:
|
||||
'无法附加文件,请创建或选择一个对话,或尝试刷新页面。',
|
||||
com_ui_attach_error: '无法附加文件,请创建或选择一个对话,或尝试刷新页面。',
|
||||
com_ui_examples: '示例',
|
||||
com_ui_new_chat: '创建新对话',
|
||||
com_ui_happy_birthday: '这是我的第一个生日!',
|
||||
|
@ -64,8 +61,7 @@ export default {
|
|||
com_ui_capability_decline_requests: '限制不当信息',
|
||||
com_ui_limitations: '局限性',
|
||||
com_ui_limitation_incorrect_info: '可能会不时出现错误信息',
|
||||
com_ui_limitation_harmful_biased:
|
||||
'可能会提供有害指示或者偏见',
|
||||
com_ui_limitation_harmful_biased: '可能会提供有害指示或者偏见',
|
||||
com_ui_limitation_limited_2021: '基于2021年以前信息训练',
|
||||
com_ui_experimental: '实验性',
|
||||
com_ui_ascending: '升序',
|
||||
|
@ -120,6 +116,10 @@ export default {
|
|||
com_ui_clear: '清除',
|
||||
com_ui_revoke: '撤销',
|
||||
com_ui_revoke_info: '撤销所有用户提供的凭据',
|
||||
com_ui_import_conversation: '导入',
|
||||
com_ui_import_conversation_info: '从JSON文件导入对话',
|
||||
com_ui_import_conversation_success: '对话导入成功',
|
||||
com_ui_import_conversation_error: '导入对话时发生错误',
|
||||
com_ui_confirm_action: '确认执行',
|
||||
com_ui_chats: '聊天',
|
||||
com_ui_avatar: '头像',
|
||||
|
@ -134,23 +134,17 @@ export default {
|
|||
com_ui_create: '创建',
|
||||
com_ui_delete_conversation: '删除对话?',
|
||||
com_ui_delete_conversation_confirm: '这将删除',
|
||||
com_ui_delete_assistant_confirm:
|
||||
'确定要删除此助手吗?该操作无法撤销。',
|
||||
com_ui_delete_assistant_confirm: '确定要删除此助手吗?该操作无法撤销。',
|
||||
com_ui_preview: '预览',
|
||||
com_ui_upload: '上传',
|
||||
com_ui_connect: '连接',
|
||||
com_ui_upload_delay:
|
||||
'上传 "{0}" 时比预期花了更长时间。 文件正在进行检索索引,请稍候。',
|
||||
com_ui_upload_delay: '上传 "{0}" 时比预期花了更长时间。 文件正在进行检索索引,请稍候。',
|
||||
com_ui_privacy_policy: '隐私政策',
|
||||
com_ui_terms_of_service: '服务政策',
|
||||
com_auth_error_login:
|
||||
'无法登录,请确认提供的账户密码正确,并重新尝试。',
|
||||
com_auth_error_login_rl:
|
||||
'尝试登录次数过多,请稍后再试。',
|
||||
com_auth_error_login_ban:
|
||||
'根据我们的服务规则,您的帐号被暂时禁用。',
|
||||
com_auth_error_login_server:
|
||||
'内部服务器错误,请稍后再试。',
|
||||
com_auth_error_login: '无法登录,请确认提供的账户密码正确,并重新尝试。',
|
||||
com_auth_error_login_rl: '尝试登录次数过多,请稍后再试。',
|
||||
com_auth_error_login_ban: '根据我们的服务规则,您的帐号被暂时禁用。',
|
||||
com_auth_error_login_server: '内部服务器错误,请稍后再试。',
|
||||
com_auth_no_account: '新用户注册',
|
||||
com_auth_sign_up: '注册',
|
||||
com_auth_sign_in: '登录',
|
||||
|
@ -173,8 +167,7 @@ export default {
|
|||
com_auth_password_not_match: '密码不一致',
|
||||
com_auth_continue: '继续',
|
||||
com_auth_create_account: '创建账号',
|
||||
com_auth_error_create:
|
||||
'注册账户过程中出现错误,请重试。',
|
||||
com_auth_error_create: '注册账户过程中出现错误,请重试。',
|
||||
com_auth_full_name: '姓名',
|
||||
com_auth_name_required: '姓名为必填项',
|
||||
com_auth_name_min_length: '姓名至少3个字符',
|
||||
|
@ -190,10 +183,8 @@ export default {
|
|||
com_auth_here: '这里',
|
||||
com_auth_to_reset_your_password: '重置密码。',
|
||||
com_auth_reset_password_link_sent: '重置密码链接已发送至邮箱',
|
||||
com_auth_reset_password_email_sent:
|
||||
'重置密码邮件已发送至邮箱',
|
||||
com_auth_error_reset_password:
|
||||
'重置密码出现错误,未找到对应的邮箱地址,请重新输入。',
|
||||
com_auth_reset_password_email_sent: '重置密码邮件已发送至邮箱',
|
||||
com_auth_error_reset_password: '重置密码出现错误,未找到对应的邮箱地址,请重新输入。',
|
||||
com_auth_reset_password_success: '密码重置成功',
|
||||
com_auth_login_with_new_password: '现在你可以使用你的新密码登录。',
|
||||
com_auth_error_invalid_reset_token: '重置密码的密钥已失效。',
|
||||
|
@ -247,8 +238,7 @@ export default {
|
|||
com_endpoint_max_output_tokens: '最大输出词元数',
|
||||
com_endpoint_openai_temp:
|
||||
'值越高表示输出越随机,值越低表示输出越确定。建议不要同时改变此值和Top P。',
|
||||
com_endpoint_openai_max:
|
||||
'最大生成词元数。输入词元长度由模型的上下文长度决定。',
|
||||
com_endpoint_openai_max: '最大生成词元数。输入词元长度由模型的上下文长度决定。',
|
||||
com_endpoint_openai_topp:
|
||||
'相较于随机性的另一个取样方法,称为核采样,模型选取输出词元中大于P值(概率密度在整个概率分布中的比例)的结果。比如 top_p=0.1 表示只有概率占比为前10%的词元才会被考虑作为输出。建议不要同时改变此值和随机性。',
|
||||
com_endpoint_openai_freq:
|
||||
|
@ -262,8 +252,7 @@ export default {
|
|||
com_endpoint_openai_detail:
|
||||
'发送给Vision的图像分辨率。 “Low”更便宜且更快,“High”更详细但更昂贵,“Auto”将基于图像分辨率自动在两者之间进行选择。',
|
||||
com_endpoint_openai_custom_name_placeholder: '为ChatGPT设置一个名称',
|
||||
com_endpoint_openai_prompt_prefix_placeholder:
|
||||
'在消息开头添加系统级提示词,默认为空',
|
||||
com_endpoint_openai_prompt_prefix_placeholder: '在消息开头添加系统级提示词,默认为空',
|
||||
com_endpoint_anthropic_temp:
|
||||
'值介于0到1之间。 对于分析性/选择性任务,值应更接近0;对于创造性和生成性任务,值应更接近1。我们建议更改该参数或Top-p,但不要同时更改这两个参数。',
|
||||
com_endpoint_anthropic_topp:
|
||||
|
@ -316,8 +305,7 @@ export default {
|
|||
com_endpoint_use_active_assistant: '使用激活的助手',
|
||||
com_endpoint_assistant_model: '助手模型',
|
||||
com_endpoint_save_as_preset: '保存为预设',
|
||||
com_endpoint_presets_clear_warning:
|
||||
'确定要清除所有预设吗?此操作不可逆。',
|
||||
com_endpoint_presets_clear_warning: '确定要清除所有预设吗?此操作不可逆。',
|
||||
com_endpoint_not_implemented: '未实现功能',
|
||||
com_endpoint_no_presets: '暂无预设,使用设置按钮创建一个。',
|
||||
com_endpoint_not_available: '无可用渠道',
|
||||
|
@ -327,8 +315,7 @@ export default {
|
|||
com_endpoint_agent_model: '代理模型 (推荐: GPT-3.5)',
|
||||
com_endpoint_completion_model: '补全模型 (推荐: GPT-4)',
|
||||
com_endpoint_func_hover: '将插件作为OpenAI函数使用',
|
||||
com_endpoint_skip_hover:
|
||||
'跳过补全步骤, 检查最终答案和生成步骤',
|
||||
com_endpoint_skip_hover: '跳过补全步骤, 检查最终答案和生成步骤',
|
||||
com_endpoint_config_key: '设置API Key',
|
||||
com_endpoint_assistant_placeholder: '请从右侧面板中选择助手',
|
||||
com_endpoint_config_placeholder: '在顶部菜单设置API KEY',
|
||||
|
@ -346,8 +333,7 @@ export default {
|
|||
com_endpoint_config_google_api_info: '获取您的生成式语言API密钥(Gemini),',
|
||||
com_endpoint_config_key_import_json_key: '导入服务账号JSON密钥',
|
||||
com_endpoint_config_key_import_json_key_success: '成功导入服务账号JSON密钥',
|
||||
com_endpoint_config_key_import_json_key_invalid:
|
||||
'无效的服务账号JSON密钥,您是否导入正确的文件?',
|
||||
com_endpoint_config_key_import_json_key_invalid: '无效的服务账号JSON密钥,您是否导入正确的文件?',
|
||||
com_endpoint_config_key_get_edge_key: '为获得Bing访问凭证(Access token),请登录:',
|
||||
com_endpoint_config_key_get_edge_key_dev_tool:
|
||||
'登录网站后,使用开发工具或扩展程序复制 _U cookie 的内容。如果失败,请按照以下步骤操作:',
|
||||
|
@ -383,8 +369,7 @@ export default {
|
|||
com_show_examples: '显示样例',
|
||||
com_nav_plugin_search: '搜索插件',
|
||||
com_nav_tool_search: '搜索工具',
|
||||
com_nav_plugin_auth_error:
|
||||
'尝试验证此插件时出错。请重试。',
|
||||
com_nav_plugin_auth_error: '尝试验证此插件时出错。请重试。',
|
||||
com_nav_export_filename: '文件名',
|
||||
com_nav_export_filename_placeholder: '设置文件名',
|
||||
com_nav_export_type: '类型',
|
||||
|
@ -410,8 +395,7 @@ export default {
|
|||
com_nav_log_out: '注销',
|
||||
com_nav_user: '默认用户',
|
||||
com_nav_clear_conversation: '清空对话',
|
||||
com_nav_clear_conversation_confirm_message:
|
||||
'请是否清空所有对话?该操作无法撤销',
|
||||
com_nav_clear_conversation_confirm_message: '请是否清空所有对话?该操作无法撤销',
|
||||
com_nav_help_faq: '帮助',
|
||||
com_nav_settings: '设置',
|
||||
com_nav_search_placeholder: '搜索对话及对话内容',
|
||||
|
|
|
@ -44,6 +44,10 @@ export default {
|
|||
com_ui_clear: '清除',
|
||||
com_ui_revoke: '撤銷',
|
||||
com_ui_revoke_info: '撤銷所有使用者提供的憑證。',
|
||||
com_ui_import_conversation: '導入',
|
||||
com_ui_import_conversation_info: '從JSON文件導入對話',
|
||||
com_ui_import_conversation_success: '對話導入成功',
|
||||
com_ui_import_conversation_error: '導入對話時發生錯誤',
|
||||
com_ui_confirm_action: '確認操作',
|
||||
com_ui_chats: '對話',
|
||||
com_ui_delete: '刪除',
|
||||
|
|
|
@ -31,7 +31,7 @@ _Note: you will need a credit card or PayPal to sign up. I'm able to use a prepa
|
|||
- [2. Access console](#2-access-your-droplet-console)
|
||||
- [3. Console user setup](#3-once-you-have-logged-in-immediately-create-a-new-non-root-user)
|
||||
- [4. Firewall Setup](#4-firewall-setup)
|
||||
- **[Part II: Installing Docker & Other Dependencies](#part-ii-installing-docker-and-other-dependencies)**
|
||||
- **[Part II: Installing Docker & Other Dependencies](./docker_ubuntu_deploy.md)**
|
||||
|
||||
## Part I: Starting from Zero:
|
||||
|
||||
|
|
|
@ -267,8 +267,8 @@ client:
|
|||
- /etc/letsencrypt/ssl-dhparams.pem:/etc/letsencrypt/ssl-dhparams.pem
|
||||
```
|
||||
|
||||
after you changed them you should follow the instruction from [Part V: Editing the NGINX file](digitalocean.md#part-v-editing-the-nginx-file-for-custom-domains-and-advanced-configs)\*\*
|
||||
in order to update the git and deploy from a rebased branch.
|
||||
after you changed them you should follow the instruction from [Part V: Editing the NGINX file](./docker_ubuntu_deploy.md#part-iv-editing-the-nginx-file-for-custom-domains-and-advanced-configs) in order to update the git and deploy from a rebased branch.
|
||||
|
||||
[TBA: TO ADD HERE a simple explanation based on that explanation]
|
||||
|
||||
#### Option B: Configure NGINX without Basic Authentication on the host
|
||||
|
|
27
docs/features/conversations_import.md
Normal file
27
docs/features/conversations_import.md
Normal file
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
title: 📥 Import conversations from other chats
|
||||
description: Conversations Import allows user to import conversations exported from other GPT chat applications. Currently, we support importing conversations from ChatGPT, ChatbotUI v1, and LibreChat itself.
|
||||
weight: -1
|
||||
---
|
||||
Conversations Import allows user to import conversations exported from other GPT chat applications. Currently, we support importing conversations from ChatGPT, [ChatbotUI v1](https://github.com/mckaywrigley/chatbot-ui/tree/b865b0555f53957e96727bc0bbb369c9eaecd83b?tab=readme-ov-file#legacy-code), and LibreChat itself.
|
||||
|
||||
Import functionality is available in the "Settings" -> "Data Controls" section.
|
||||
|
||||

|
||||
|
||||
# How to import conversations from Chat GPT
|
||||
|
||||
1. Follow the [ChatGPT export instructions](https://help.openai.com/en/articles/7260999-how-do-i-export-my-chatgpt-history-and-data) to export your conversations.
|
||||
2. You should get a link to download archive in you email.
|
||||
3. Download the archive. It should be a zip file with random name like: _d119d98bb3711aff7a2c73bcc7ea53d96c984650d8f7e033faef78386a9907-2024-01-01-10-30-00.zip_
|
||||
4. Extract the content of the zip file.
|
||||
5. Navigate to LibreChat Settings -> Data Controls
|
||||

|
||||
6. Click on the "Import" button and select `conversations.json` file from the extracted archive. It will start importing the conversations.
|
||||
7. Shortly you will get a notification that the import is complete.
|
||||

|
||||
|
||||
|
||||
## Sharing on Discord
|
||||
|
||||
Join us on [discord](https://discord.librechat.ai) and see our **[#presets ](https://discord.com/channels/1086345563026489514/1093249324797935746)** channel where thousands of presets are shared by users worldwide. Check out pinned posts for popular presets!
|
|
@ -11,6 +11,7 @@ weight: 2
|
|||
* 🤖[Custom Endpoints](../install/configuration/custom_config.md)
|
||||
* 🗃️ [RAG API (Chat with Files)](./rag_api.md)
|
||||
* 🔖 [Presets](./presets.md)
|
||||
* 📥 [Import conversations from other chats](./conversations_import.md)
|
||||
* 🔌[Plugins](./plugins/index.md)
|
||||
* 🔌 [Introduction](./plugins/introduction.md)
|
||||
* 🛠️ [Make Your Own](./plugins/make_your_own.md)
|
||||
|
|
|
@ -30,7 +30,7 @@ Here are the key takeaways for creating your own plugin:
|
|||
|
||||
**3.** [**Define Helper Methods:**](make_your_own.md#step-3-define-helper-methods) Define helper methods within your class to handle specific tasks if needed.
|
||||
|
||||
**4.** [**Implement the `_call` Method:**](make_your_own.md#step-4-implement-the-_call-method) Implement the `_call` method where the main functionality of your plugin is defined. This method is called when the language model decides to use your plugin. It should take an `input` parameter and return a result. If an error occurs, the function should return a string representing an error, rather than throwing an error. If your plugin requires multiple inputs from the LLM, read the [StructuredTools](#StructuredTools) section.
|
||||
**4.** [**Implement the `_call` Method:**](make_your_own.md#step-4-implement-the-_call-method) Implement the `_call` method where the main functionality of your plugin is defined. This method is called when the language model decides to use your plugin. It should take an `input` parameter and return a result. If an error occurs, the function should return a string representing an error, rather than throwing an error. If your plugin requires multiple inputs from the LLM, read the [StructuredTools](#structuredtools) section.
|
||||
|
||||
**5.** [**Export Your Plugin and Import into handleTools.js:**](make_your_own.md#step-5-export-your-plugin-and-import-into-handletoolsjs) Export your plugin and import it into `handleTools.js`. Add your plugin to the `toolConstructors` object in the `loadTools` function. If your plugin requires more advanced initialization, add it to the `customConstructors` object.
|
||||
|
||||
|
@ -132,7 +132,7 @@ class StableDiffusionAPI extends Tool {
|
|||
|
||||
The `_call` method is where the main functionality of your plugin is implemented. This method is called when the language model decides to use your plugin. It should take an `input` parameter and return a result.
|
||||
|
||||
> In a basic Tool, the LLM will generate one string value as an input. If your plugin requires multiple inputs from the LLM, read the **[StructuredTools](#StructuredTools)** section.
|
||||
> In a basic Tool, the LLM will generate one string value as an input. If your plugin requires multiple inputs from the LLM, read the **[StructuredTools](#structuredtools)** section.
|
||||
|
||||
```javascript
|
||||
class StableDiffusionAPI extends Tool {
|
||||
|
|
|
@ -239,7 +239,7 @@ Applying these setup requirements thoughtfully will ensure a correct and efficie
|
|||
|
||||
### Model Deployments
|
||||
|
||||
The list of models available to your users are determined by the model groupings specified in your [`azureOpenAI` endpoint config.](./custom_config.md#models-1)
|
||||
The list of models available to your users are determined by the model groupings specified in your [`azureOpenAI` endpoint config.](./custom_config.md#models_1)
|
||||
|
||||
For example:
|
||||
|
||||
|
@ -408,7 +408,7 @@ endpoints:
|
|||
|
||||
To use Vision (image analysis) with Azure OpenAI, you need to make sure `gpt-4-vision-preview` is a specified model [in one of your groupings](#model-deployments)
|
||||
|
||||
This will work seamlessly as it does with the [OpenAI endpoint](#openai) (no need to select the vision model, it will be switched behind the scenes)
|
||||
This will work seamlessly as it does with the [OpenAI endpoint](./ai_setup.md#openai) (no need to select the vision model, it will be switched behind the scenes)
|
||||
|
||||
### Generate images with Azure OpenAI Service (DALL-E)
|
||||
|
||||
|
@ -639,15 +639,15 @@ In any case, you can adjust the title model as such: `OPENAI_TITLE_MODEL=your-ti
|
|||
|
||||
Currently, the best way to setup Vision is to use your deployment names as the model names, as [shown here](#model-deployments)
|
||||
|
||||
This will work seamlessly as it does with the [OpenAI endpoint](#openai) (no need to select the vision model, it will be switched behind the scenes)
|
||||
This will work seamlessly as it does with the [OpenAI endpoint](./ai_setup.md#openai) (no need to select the vision model, it will be switched behind the scenes)
|
||||
|
||||
Alternatively, you can set the [required variables](#required-variables) to explicitly use your vision deployment, but this may limit you to exclusively using your vision deployment for all Azure chat settings.
|
||||
Alternatively, you can set the [required variables](#required-fields) to explicitly use your vision deployment, but this may limit you to exclusively using your vision deployment for all Azure chat settings.
|
||||
|
||||
|
||||
**Notes:**
|
||||
|
||||
- If using `AZURE_OPENAI_BASEURL`, you should not specify instance and deployment names instead of placeholders as the vision request will fail.
|
||||
- As of December 18th, 2023, Vision models seem to have degraded performance with Azure OpenAI when compared to [OpenAI](#openai)
|
||||
- As of December 18th, 2023, Vision models seem to have degraded performance with Azure OpenAI when compared to [OpenAI](./ai_setup.md#openai)
|
||||
|
||||

|
||||
|
||||
|
|
|
@ -6,6 +6,10 @@ weight: -10
|
|||
|
||||
# 🖥️ Config Changelog
|
||||
|
||||
## v1.0.9
|
||||
|
||||
- Added `conversationsImport` to [rateLimits](./custom_config.md#ratelimits) along with the [new feature](https://github.com/danny-avila/LibreChat/pull/2355) for importing conversations from LibreChat, ChatGPT, and Chatbot UI.
|
||||
|
||||
## v1.0.8
|
||||
|
||||
- Added additional fields to [interface config](./custom_config.md#interface-object-structure) to toggle access to specific features:
|
||||
|
|
|
@ -112,6 +112,13 @@ docker compose up
|
|||
userMax: 50
|
||||
# Rate limit window for file uploads per user
|
||||
userWindowInMinutes: 60
|
||||
conversationsImport:
|
||||
ipMax: 100
|
||||
# Rate limit window for file uploads per IP
|
||||
ipWindowInMinutes: 60
|
||||
userMax: 50
|
||||
# Rate limit window for file uploads per user
|
||||
userWindowInMinutes: 60
|
||||
registration:
|
||||
socialLogins: ["google", "facebook", "github", "discord", "openid"]
|
||||
allowedDomains:
|
||||
|
@ -278,7 +285,7 @@ docker compose up
|
|||
- `fileUploads`
|
||||
- **Type**: Object
|
||||
- **Description**: Configures rate limits specifically for file upload operations.
|
||||
- **Sub-keys:**
|
||||
- <u>**Sub-keys:**</u>
|
||||
- `ipMax`
|
||||
- **Type**: Number
|
||||
- **Description**: Maximum number of uploads allowed per IP address per window.
|
||||
|
@ -291,6 +298,22 @@ docker compose up
|
|||
- `userWindowInMinutes`
|
||||
- **Type**: Number
|
||||
- **Description**: Time window in minutes for the user-based upload limit.
|
||||
- `conversationsImport`
|
||||
- **Type**: Object
|
||||
- **Description**: Configures rate limits specifically for conversation import operations.
|
||||
- <u>**Sub-keys:**</u>
|
||||
- `ipMax`
|
||||
- **Type**: Number
|
||||
- **Description**: Maximum number of imports allowed per IP address per window.
|
||||
- `ipWindowInMinutes`
|
||||
- **Type**: Number
|
||||
- **Description**: Time window in minutes for the IP-based imports limit.
|
||||
- `userMax`
|
||||
- **Type**: Number
|
||||
- **Description**: Maximum number of imports per user per window.
|
||||
- `userWindowInMinutes`
|
||||
- **Type**: Number
|
||||
- **Description**: Time window in minutes for the user-based imports limit.
|
||||
|
||||
- **Example**:
|
||||
```yaml
|
||||
|
@ -300,6 +323,11 @@ docker compose up
|
|||
ipWindowInMinutes: 60
|
||||
userMax: 50
|
||||
userWindowInMinutes: 60
|
||||
conversationsImport:
|
||||
ipMax: 100
|
||||
ipWindowInMinutes: 60
|
||||
userMax: 50
|
||||
userWindowInMinutes: 60
|
||||
```
|
||||
|
||||
### registration
|
||||
|
@ -308,8 +336,8 @@ docker compose up
|
|||
- **Type**: Object
|
||||
- **Description**: Configures registration-related settings for the application.
|
||||
- <u>**Sub-keys:**</u>
|
||||
- `socialLogins`: [More info](#socialLogins)
|
||||
- `allowedDomains`: [More info](#allowedDomains)
|
||||
- `socialLogins`: [More info](#sociallogins)
|
||||
- `allowedDomains`: [More info](#alloweddomains)
|
||||
- [Registration Object Structure](#registration-object-structure)
|
||||
|
||||
### interface
|
||||
|
@ -1015,7 +1043,7 @@ The preset field for a modelSpec list item is made up of a comprehensive configu
|
|||
```yaml
|
||||
socialLogins: ["google", "facebook", "github", "discord", "openid"]
|
||||
```
|
||||
- **Note**: The order of the providers in the list determines their appearance order on the login/registration page. Each provider listed must be [properly configured](./user_auth_system.md#social-authentication-setup-and-configuration) within the system to be active and available for users. This configuration allows for a tailored authentication experience, emphasizing the most relevant or preferred social login options for your user base.
|
||||
- **Note**: The order of the providers in the list determines their appearance order on the login/registration page. Each provider listed must be [properly configured](./user_auth_system.md#social-authentication) within the system to be active and available for users. This configuration allows for a tailored authentication experience, emphasizing the most relevant or preferred social login options for your user base.
|
||||
|
||||
### **allowedDomains**
|
||||
|
||||
|
@ -1656,7 +1684,7 @@ Custom endpoints share logic with the OpenAI endpoint, and thus have default par
|
|||
- `stream`: If set, partial message deltas will be sent, like in ChatGPT. Otherwise, generation will only be available when completed.
|
||||
- `messages`: [OpenAI format for messages](https://platform.openai.com/docs/api-reference/chat/create#chat-create-messages); the `name` field is added to messages with `system` and `assistant` roles when a custom name is specified via preset.
|
||||
|
||||
**Note:** The `max_tokens` field is not sent to use the maximum amount of tokens available, which is default OpenAI API behavior. Some alternate APIs require this field, or it may default to a very low value and your responses may appear cut off; in this case, you should add it to `addParams` field as shown in the [Endpoint Object Structure](#endpoint-object-structure).
|
||||
**Note:** The `max_tokens` field is not sent to use the maximum amount of tokens available, which is default OpenAI API behavior. Some alternate APIs require this field, or it may default to a very low value and your responses may appear cut off; in this case, you should add it to `addParams` field as shown in the [Custom Endpoint Object Structure](#custom-endpoint-object-structure).
|
||||
|
||||
### Additional Notes
|
||||
|
||||
|
|
|
@ -232,7 +232,7 @@ AZURE_OPENAI_BASEURL=https://gateway.ai.cloudflare.com/v1/ACCOUNT_TAG/GATEWAY/az
|
|||
- Sets the base URL for Azure OpenAI API requests.
|
||||
- Can include `${INSTANCE_NAME}` and `${DEPLOYMENT_NAME}` placeholders or specific credentials.
|
||||
- Example: "https://gateway.ai.cloudflare.com/v1/ACCOUNT_TAG/GATEWAY/azure-openai/${INSTANCE_NAME}/${DEPLOYMENT_NAME}"
|
||||
- [More info about `AZURE_OPENAI_BASEURL` here](./ai_setup.md#using-a-specified-base-url-with-azure)
|
||||
- [More info about `AZURE_OPENAI_BASEURL` here](./azure_openai.md#using-a-specified-base-url-with-azure)
|
||||
|
||||
> Note: as deployment names can't have periods, they will be removed when the endpoint is generated.
|
||||
|
||||
|
@ -412,7 +412,7 @@ ASSISTANTS_BASE_URL=http://your-alt-baseURL:3080/
|
|||
- There is additional, optional configuration, depending on your needs, such as disabling the assistant builder UI, and determining which assistants can be used, that are available via the [`librechat.yaml` custom config file](./custom_config.md#assistants-endpoint-object-structure).
|
||||
|
||||
### OpenRouter
|
||||
See [OpenRouter](./free_ai_apis.md#openrouter-preferred) for more info.
|
||||
See [OpenRouter](./ai_endpoints.md#openrouter) for more info.
|
||||
|
||||
- OpenRouter is a legitimate proxy service to a multitude of LLMs, both closed and open source, including: OpenAI models, Anthropic models, Meta's Llama models, pygmalionai/mythalion-13b and many more open source models. Newer integrations are usually discounted, too!
|
||||
|
||||
|
|
|
@ -31,6 +31,11 @@ registration:
|
|||
# ipWindowInMinutes: 60 # Rate limit window for file uploads per IP
|
||||
# userMax: 50
|
||||
# userWindowInMinutes: 60 # Rate limit window for file uploads per user
|
||||
# conversationsImport:
|
||||
# ipMax: 100
|
||||
# ipWindowInMinutes: 60 # Rate limit window for conversation imports per IP
|
||||
# userMax: 50
|
||||
# userWindowInMinutes: 60 # Rate limit window for conversation imports per user
|
||||
|
||||
# Definition of custom endpoints
|
||||
endpoints:
|
||||
|
|
138
package-lock.json
generated
138
package-lock.json
generated
|
@ -13,6 +13,9 @@
|
|||
"client",
|
||||
"packages/*"
|
||||
],
|
||||
"dependencies": {
|
||||
"agenda": "^5.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.38.1",
|
||||
"@typescript-eslint/eslint-plugin": "^5.62.0",
|
||||
|
@ -10597,6 +10600,73 @@
|
|||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/agenda": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/agenda/-/agenda-5.0.0.tgz",
|
||||
"integrity": "sha512-jOoa7PvARpst/y2PI8h0wph4NmcjYJ/4wzFhQcHUbNgN+Hte/9h/MzKE0ZmHfIwdsSlnv3rhbBQ3Zd/gwFkThg==",
|
||||
"dependencies": {
|
||||
"cron-parser": "^3.5.0",
|
||||
"date.js": "~0.3.3",
|
||||
"debug": "~4.3.4",
|
||||
"human-interval": "~2.0.1",
|
||||
"moment-timezone": "~0.5.37",
|
||||
"mongodb": "^4.11.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/agenda/node_modules/bson": {
|
||||
"version": "4.7.2",
|
||||
"resolved": "https://registry.npmjs.org/bson/-/bson-4.7.2.tgz",
|
||||
"integrity": "sha512-Ry9wCtIZ5kGqkJoi6aD8KjxFZEx78guTQDnpXWiNthsxzrxAK/i8E6pCHAIZTbaEFWcOCvbecMukfK7XUvyLpQ==",
|
||||
"dependencies": {
|
||||
"buffer": "^5.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/agenda/node_modules/buffer": {
|
||||
"version": "5.7.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
|
||||
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"base64-js": "^1.3.1",
|
||||
"ieee754": "^1.1.13"
|
||||
}
|
||||
},
|
||||
"node_modules/agenda/node_modules/mongodb": {
|
||||
"version": "4.17.2",
|
||||
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.17.2.tgz",
|
||||
"integrity": "sha512-mLV7SEiov2LHleRJPMPrK2PMyhXFZt2UQLC4VD4pnth3jMjYKHhtqfwwkkvS/NXuo/Fp3vbhaNcXrIDaLRb9Tg==",
|
||||
"dependencies": {
|
||||
"bson": "^4.7.2",
|
||||
"mongodb-connection-string-url": "^2.6.0",
|
||||
"socks": "^2.7.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.9.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@aws-sdk/credential-providers": "^3.186.0",
|
||||
"@mongodb-js/saslprep": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/agent-base": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz",
|
||||
|
@ -12729,6 +12799,18 @@
|
|||
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/cron-parser": {
|
||||
"version": "3.5.0",
|
||||
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-3.5.0.tgz",
|
||||
"integrity": "sha512-wyVZtbRs6qDfFd8ap457w3XVntdvqcwBGxBoTvJQH9KGVKL/fB+h2k3C8AqiVxvUQKN1Ps/Ns46CNViOpVDhfQ==",
|
||||
"dependencies": {
|
||||
"is-nan": "^1.3.2",
|
||||
"luxon": "^1.26.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/cross-env": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz",
|
||||
|
@ -12979,6 +13061,27 @@
|
|||
"url": "https://github.com/sponsors/kossnocorp"
|
||||
}
|
||||
},
|
||||
"node_modules/date.js": {
|
||||
"version": "0.3.3",
|
||||
"resolved": "https://registry.npmjs.org/date.js/-/date.js-0.3.3.tgz",
|
||||
"integrity": "sha512-HgigOS3h3k6HnW011nAb43c5xx5rBXk8P2v/WIT9Zv4koIaVXiH2BURguI78VVp+5Qc076T7OR378JViCnZtBw==",
|
||||
"dependencies": {
|
||||
"debug": "~3.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/date.js/node_modules/debug": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
|
||||
"integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/date.js/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.3.4",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
|
||||
|
@ -13138,7 +13241,6 @@
|
|||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz",
|
||||
"integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"define-data-property": "^1.0.1",
|
||||
"has-property-descriptors": "^1.0.0",
|
||||
|
@ -16294,6 +16396,14 @@
|
|||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/human-interval": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/human-interval/-/human-interval-2.0.1.tgz",
|
||||
"integrity": "sha512-r4Aotzf+OtKIGQCB3odUowy4GfUDTy3aTWTfLd7ZF2gBCy3XW3v/dJLRefZnOFFnjqs5B1TypvS8WarpBkYUNQ==",
|
||||
"dependencies": {
|
||||
"numbered": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/human-signals": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz",
|
||||
|
@ -17103,7 +17213,6 @@
|
|||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/is-nan/-/is-nan-1.3.2.tgz",
|
||||
"integrity": "sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.0",
|
||||
"define-properties": "^1.1.3"
|
||||
|
@ -19460,6 +19569,14 @@
|
|||
"react": "^16.5.1 || ^17.0.0 || ^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/luxon": {
|
||||
"version": "1.28.1",
|
||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-1.28.1.tgz",
|
||||
"integrity": "sha512-gYHAa180mKrNIUJCbwpmD0aTu9kV0dREDrwNnuyFAsO1Wt0EVYSZelPnJlbj9HplzXX/YWXHFTL45kvZ53M0pw==",
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/lz-string": {
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz",
|
||||
|
@ -20613,6 +20730,17 @@
|
|||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/moment-timezone": {
|
||||
"version": "0.5.45",
|
||||
"resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.45.tgz",
|
||||
"integrity": "sha512-HIWmqA86KcmCAhnMAN0wuDOARV/525R2+lOLotuGFzn4HO+FH+/645z2wx0Dt3iDv6/p61SIvKnDstISainhLQ==",
|
||||
"dependencies": {
|
||||
"moment": "^2.29.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb": {
|
||||
"version": "5.9.2",
|
||||
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-5.9.2.tgz",
|
||||
|
@ -21260,6 +21388,11 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/numbered": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/numbered/-/numbered-1.1.0.tgz",
|
||||
"integrity": "sha512-pv/ue2Odr7IfYOO0byC1KgBI10wo5YDauLhxY6/saNzAdAs0r1SotGCPzzCLNPL0xtrAwWRialLu23AAu9xO1g=="
|
||||
},
|
||||
"node_modules/nwsapi": {
|
||||
"version": "2.2.7",
|
||||
"resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.7.tgz",
|
||||
|
@ -21315,7 +21448,6 @@
|
|||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
|
||||
"integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
|
|
|
@ -100,5 +100,8 @@
|
|||
"admin/",
|
||||
"packages/"
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"agenda": "^5.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "librechat-data-provider",
|
||||
"version": "0.5.8",
|
||||
"version": "0.5.9",
|
||||
"description": "data services for librechat apps",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.es.js",
|
||||
|
|
|
@ -19,15 +19,23 @@ export const revokeAllUserKeys = () => `${keysEndpoint}?all=true`;
|
|||
|
||||
export const abortRequest = (endpoint: string) => `/api/ask/${endpoint}/abort`;
|
||||
|
||||
export const conversations = (pageNumber: string) => `/api/convos?pageNumber=${pageNumber}`;
|
||||
export const conversationsRoot = '/api/convos';
|
||||
|
||||
export const conversationById = (id: string) => `/api/convos/${id}`;
|
||||
export const conversations = (pageNumber: string) =>
|
||||
`${conversationsRoot}?pageNumber=${pageNumber}`;
|
||||
|
||||
export const genTitle = () => '/api/convos/gen_title';
|
||||
export const conversationById = (id: string) => `${conversationsRoot}/${id}`;
|
||||
|
||||
export const updateConversation = () => '/api/convos/update';
|
||||
export const genTitle = () => `${conversationsRoot}/gen_title`;
|
||||
|
||||
export const deleteConversation = () => '/api/convos/clear';
|
||||
export const updateConversation = () => `${conversationsRoot}/update`;
|
||||
|
||||
export const deleteConversation = () => `${conversationsRoot}/clear`;
|
||||
|
||||
export const importConversation = () => `${conversationsRoot}/import`;
|
||||
|
||||
export const importConversationJobStatus = (jobId: string) =>
|
||||
`${conversationsRoot}/import/jobs/${jobId}`;
|
||||
|
||||
export const search = (q: string, pageNumber: string) =>
|
||||
`/api/search?q=${q}&pageNumber=${pageNumber}`;
|
||||
|
|
|
@ -224,6 +224,14 @@ export const rateLimitSchema = z.object({
|
|||
userWindowInMinutes: z.number().optional(),
|
||||
})
|
||||
.optional(),
|
||||
conversationsImport: z
|
||||
.object({
|
||||
ipMax: z.number().optional(),
|
||||
ipWindowInMinutes: z.number().optional(),
|
||||
userMax: z.number().optional(),
|
||||
userWindowInMinutes: z.number().optional(),
|
||||
})
|
||||
.optional(),
|
||||
});
|
||||
|
||||
export enum EImageOutputType {
|
||||
|
@ -660,7 +668,7 @@ export enum Constants {
|
|||
/** Key for the app's version. */
|
||||
VERSION = 'v0.7.1',
|
||||
/** Key for the Custom Config's version (librechat.yaml). */
|
||||
CONFIG_VERSION = '1.0.8',
|
||||
CONFIG_VERSION = '1.0.9',
|
||||
/** Standard value for the first message's `parentMessageId` value, to indicate no parent exists. */
|
||||
NO_PARENT = '00000000-0000-0000-0000-000000000000',
|
||||
/** Fixed, encoded domain length for Azure OpenAI Assistants Function name parsing. */
|
||||
|
|
|
@ -191,6 +191,28 @@ export const uploadFile = (data: FormData): Promise<f.TFileUpload> => {
|
|||
return request.postMultiPart(endpoints.files(), data);
|
||||
};
|
||||
|
||||
/**
|
||||
* Imports a conversations file.
|
||||
*
|
||||
* @param data - The FormData containing the file to import.
|
||||
* @returns A Promise that resolves to the import start response.
|
||||
*/
|
||||
export const importConversationsFile = (data: FormData): Promise<t.TImportStartResponse> => {
|
||||
return request.postMultiPart(endpoints.importConversation(), data);
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves the status of an import conversation job.
|
||||
*
|
||||
* @param jobId - The ID of the import conversation job.
|
||||
* @returns A promise that resolves to the import job status.
|
||||
*/
|
||||
export const queryImportConversationJobStatus = async (
|
||||
jobId: string,
|
||||
): Promise<t.TImportJobStatus> => {
|
||||
return request.get(endpoints.importConversationJobStatus(jobId));
|
||||
};
|
||||
|
||||
export const uploadAvatar = (data: FormData): Promise<f.AvatarUploadResponse> => {
|
||||
return request.postMultiPart(endpoints.avatar(), data);
|
||||
};
|
||||
|
|
|
@ -247,3 +247,43 @@ export type TRequestPasswordResetResponse = {
|
|||
link?: string;
|
||||
message?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents the response from the import endpoint.
|
||||
*/
|
||||
export type TImportStartResponse = {
|
||||
/**
|
||||
* The message associated with the response.
|
||||
*/
|
||||
message: string;
|
||||
|
||||
/**
|
||||
* The ID of the job associated with the import.
|
||||
*/
|
||||
jobId: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents the status of an import job.
|
||||
*/
|
||||
export type TImportJobStatus = {
|
||||
/**
|
||||
* The name of the job.
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* The ID of the job.
|
||||
*/
|
||||
id: string;
|
||||
|
||||
/**
|
||||
* The status of the job.
|
||||
*/
|
||||
status: 'scheduled' | 'running' | 'completed' | 'failed';
|
||||
|
||||
/**
|
||||
* The reason the job failed, if applicable.
|
||||
*/
|
||||
failReason?: string;
|
||||
};
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue