mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 06:00:56 +02:00
🛡️ fix: Minor Vulnerabilities (#4543)
* fix: ReDoS in ChatGPT Import * ci: should correctly process citations from real ChatGPT data * ci: Add ReDoS vulnerability test for processAssistantMessage * refactor: Update thread management and citation handling * refactor(validateImageRequest): robust validation * refactor(Prompt.js): update name search regex to escape special characters * refactor(Preset): exclude user from preset update to prevent mass assignment * refactor(files.js): Improve file deletion process * ci: updated validateImageRequest.spec.js * a11y: plugin pagination * refactor(CreatePromptForm.tsx): Improve input field styling * chore(Prompts): typing and accessibility * fix: prompt creation access role check * chore: remove duplicate jsdocs
This commit is contained in:
parent
094a40dbb0
commit
3f3b5929e9
15 changed files with 698 additions and 53 deletions
|
@ -38,7 +38,8 @@ module.exports = {
|
|||
savePreset: async (user, { presetId, newPresetId, defaultPreset, ...preset }) => {
|
||||
try {
|
||||
const setter = { $set: {} };
|
||||
const update = { presetId, ...preset };
|
||||
const { user: _, ...cleanPreset } = preset;
|
||||
const update = { presetId, ...cleanPreset };
|
||||
if (preset.tools && Array.isArray(preset.tools)) {
|
||||
update.tools =
|
||||
preset.tools
|
||||
|
|
|
@ -7,6 +7,7 @@ const {
|
|||
removeGroupFromAllProjects,
|
||||
} = require('./Project');
|
||||
const { Prompt, PromptGroup } = require('./schema/promptSchema');
|
||||
const { escapeRegExp } = require('~/server/utils');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
|
@ -106,7 +107,7 @@ const getAllPromptGroups = async (req, filter) => {
|
|||
let searchShared = true;
|
||||
let searchSharedOnly = false;
|
||||
if (name) {
|
||||
query.name = new RegExp(name, 'i');
|
||||
query.name = new RegExp(escapeRegExp(name), 'i');
|
||||
}
|
||||
if (!query.category) {
|
||||
delete query.category;
|
||||
|
@ -159,7 +160,7 @@ const getPromptGroups = async (req, filter) => {
|
|||
let searchShared = true;
|
||||
let searchSharedOnly = false;
|
||||
if (name) {
|
||||
query.name = new RegExp(name, 'i');
|
||||
query.name = new RegExp(escapeRegExp(name), 'i');
|
||||
}
|
||||
if (!query.category) {
|
||||
delete query.category;
|
||||
|
|
|
@ -3,6 +3,7 @@ const validateImageRequest = require('~/server/middleware/validateImageRequest')
|
|||
|
||||
describe('validateImageRequest middleware', () => {
|
||||
let req, res, next;
|
||||
const validObjectId = '65cfb246f7ecadb8b1e8036b';
|
||||
|
||||
beforeEach(() => {
|
||||
req = {
|
||||
|
@ -43,7 +44,7 @@ describe('validateImageRequest middleware', () => {
|
|||
|
||||
test('should return 403 if refresh token is expired', () => {
|
||||
const expiredToken = jwt.sign(
|
||||
{ id: '123', exp: Math.floor(Date.now() / 1000) - 3600 },
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) - 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${expiredToken}`;
|
||||
|
@ -54,22 +55,34 @@ describe('validateImageRequest middleware', () => {
|
|||
|
||||
test('should call next() for valid image path', () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: '123', exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = '/images/123/example.jpg';
|
||||
req.originalUrl = `/images/${validObjectId}/example.jpg`;
|
||||
validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should return 403 for invalid image path', () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: '123', exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = '/images/456/example.jpg';
|
||||
req.originalUrl = '/images/65cfb246f7ecadb8b1e8036c/example.jpg'; // Different ObjectId
|
||||
validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
});
|
||||
|
||||
test('should return 403 for invalid ObjectId format', () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = '/images/123/example.jpg'; // Invalid ObjectId
|
||||
validateImageRequest(req, res, next);
|
||||
expect(res.status).toHaveBeenCalledWith(403);
|
||||
expect(res.send).toHaveBeenCalledWith('Access Denied');
|
||||
|
@ -78,16 +91,16 @@ describe('validateImageRequest middleware', () => {
|
|||
// File traversal tests
|
||||
test('should prevent file traversal attempts', () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: '123', exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
|
||||
const traversalAttempts = [
|
||||
'/images/123/../../../etc/passwd',
|
||||
'/images/123/..%2F..%2F..%2Fetc%2Fpasswd',
|
||||
'/images/123/image.jpg/../../../etc/passwd',
|
||||
'/images/123/%2e%2e%2f%2e%2e%2f%2e%2e%2fetc%2fpasswd',
|
||||
`/images/${validObjectId}/../../../etc/passwd`,
|
||||
`/images/${validObjectId}/..%2F..%2F..%2Fetc%2Fpasswd`,
|
||||
`/images/${validObjectId}/image.jpg/../../../etc/passwd`,
|
||||
`/images/${validObjectId}/%2e%2e%2f%2e%2e%2f%2e%2e%2fetc%2fpasswd`,
|
||||
];
|
||||
|
||||
traversalAttempts.forEach((attempt) => {
|
||||
|
@ -101,11 +114,11 @@ describe('validateImageRequest middleware', () => {
|
|||
|
||||
test('should handle URL encoded characters in valid paths', () => {
|
||||
const validToken = jwt.sign(
|
||||
{ id: '123', exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
{ id: validObjectId, exp: Math.floor(Date.now() / 1000) + 3600 },
|
||||
process.env.JWT_REFRESH_SECRET,
|
||||
);
|
||||
req.headers.cookie = `refreshToken=${validToken}`;
|
||||
req.originalUrl = '/images/123/image%20with%20spaces.jpg';
|
||||
req.originalUrl = `/images/${validObjectId}/image%20with%20spaces.jpg`;
|
||||
validateImageRequest(req, res, next);
|
||||
expect(next).toHaveBeenCalled();
|
||||
});
|
||||
|
|
|
@ -2,6 +2,24 @@ const cookies = require('cookie');
|
|||
const jwt = require('jsonwebtoken');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
const OBJECT_ID_LENGTH = 24;
|
||||
const OBJECT_ID_PATTERN = /^[0-9a-f]{24}$/i;
|
||||
|
||||
/**
|
||||
* Validates if a string is a valid MongoDB ObjectId
|
||||
* @param {string} id - String to validate
|
||||
* @returns {boolean} - Whether string is a valid ObjectId format
|
||||
*/
|
||||
function isValidObjectId(id) {
|
||||
if (typeof id !== 'string') {
|
||||
return false;
|
||||
}
|
||||
if (id.length !== OBJECT_ID_LENGTH) {
|
||||
return false;
|
||||
}
|
||||
return OBJECT_ID_PATTERN.test(id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware to validate image request.
|
||||
* Must be set by `secureImageLinks` via custom config file.
|
||||
|
@ -25,6 +43,11 @@ function validateImageRequest(req, res, next) {
|
|||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
if (!isValidObjectId(payload.id)) {
|
||||
logger.warn('[validateImageRequest] Invalid User ID');
|
||||
return res.status(403).send('Access Denied');
|
||||
}
|
||||
|
||||
const currentTimeInSeconds = Math.floor(Date.now() / 1000);
|
||||
if (payload.exp < currentTimeInSeconds) {
|
||||
logger.warn('[validateImageRequest] Refresh token expired');
|
||||
|
|
|
@ -66,7 +66,13 @@ router.delete('/', async (req, res) => {
|
|||
return;
|
||||
}
|
||||
|
||||
await processDeleteRequest({ req, files });
|
||||
const fileIds = files.map((file) => file.file_id);
|
||||
const userFiles = await getFiles({ file_id: { $in: fileIds }, user: req.user.id });
|
||||
if (userFiles.length !== files.length) {
|
||||
return res.status(403).json({ message: 'You can only delete your own files' });
|
||||
}
|
||||
|
||||
await processDeleteRequest({ req, files: userFiles });
|
||||
|
||||
logger.debug(
|
||||
`[/files] Files deleted successfully: ${files
|
||||
|
|
|
@ -134,7 +134,7 @@ const createPrompt = async (req, res) => {
|
|||
}
|
||||
};
|
||||
|
||||
router.post('/', createPrompt);
|
||||
router.post('/', checkPromptCreate, createPrompt);
|
||||
|
||||
/**
|
||||
* Updates a prompt group
|
||||
|
|
|
@ -8,9 +8,9 @@ const {
|
|||
} = require('librechat-data-provider');
|
||||
const { retrieveAndProcessFile } = require('~/server/services/Files/process');
|
||||
const { recordMessage, getMessages } = require('~/models/Message');
|
||||
const { countTokens, escapeRegExp } = require('~/server/utils');
|
||||
const { spendTokens } = require('~/models/spendTokens');
|
||||
const { saveConvo } = require('~/models/Conversation');
|
||||
const { countTokens } = require('~/server/utils');
|
||||
|
||||
/**
|
||||
* Initializes a new thread or adds messages to an existing thread.
|
||||
|
@ -518,14 +518,6 @@ const recordUsage = async ({
|
|||
const uniqueCitationStart = '^====||===';
|
||||
const uniqueCitationEnd = '==|||||^';
|
||||
|
||||
/** Helper function to escape special characters in regex
|
||||
* @param {string} string - The string to escape.
|
||||
* @returns {string} The escaped string.
|
||||
*/
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
/**
|
||||
* Sorts, processes, and flattens messages to a single string.
|
||||
*
|
||||
|
|
|
@ -1,6 +1,14 @@
|
|||
const citationRegex = /\[\^\d+?\^\]/g;
|
||||
const regex = / \[.*?]\(.*?\)/g;
|
||||
|
||||
/** Helper function to escape special characters in regex
|
||||
* @param {string} string - The string to escape.
|
||||
* @returns {string} The escaped string.
|
||||
*/
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
const getCitations = (res) => {
|
||||
const adaptiveCards = res.details.adaptiveCards;
|
||||
const textBlocks = adaptiveCards && adaptiveCards[0].body;
|
||||
|
@ -47,4 +55,4 @@ const citeText = (res, noLinks = false) => {
|
|||
return result;
|
||||
};
|
||||
|
||||
module.exports = { getCitations, citeText };
|
||||
module.exports = { getCitations, citeText, escapeRegExp };
|
||||
|
|
345
api/server/utils/import/__data__/chatgpt-citations.json
Normal file
345
api/server/utils/import/__data__/chatgpt-citations.json
Normal file
File diff suppressed because one or more lines are too long
|
@ -277,34 +277,39 @@ function processConversation(conv, importBatchBuilder, requestUserId) {
|
|||
|
||||
/**
|
||||
* Processes text content of messages authored by an assistant, inserting citation links as required.
|
||||
* Applies citation metadata to construct regex patterns and replacements for inserting links into the text.
|
||||
* Uses citation start and end indices to place links at the correct positions.
|
||||
*
|
||||
* @param {ChatGPTMessage} messageData - The message data containing metadata about citations.
|
||||
* @param {string} messageText - The original text of the message which may be altered by inserting citation links.
|
||||
* @returns {string} - The updated message text after processing for citations.
|
||||
*/
|
||||
function processAssistantMessage(messageData, messageText) {
|
||||
const citations = messageData.metadata.citations ?? [];
|
||||
if (!messageText) {
|
||||
return messageText;
|
||||
}
|
||||
|
||||
for (const citation of citations) {
|
||||
const citations = messageData.metadata?.citations ?? [];
|
||||
|
||||
const sortedCitations = [...citations].sort((a, b) => b.start_ix - a.start_ix);
|
||||
|
||||
let result = messageText;
|
||||
for (const citation of sortedCitations) {
|
||||
if (
|
||||
!citation.metadata ||
|
||||
!citation.metadata.extra ||
|
||||
!citation.metadata.extra.cited_message_idx ||
|
||||
(citation.metadata.type && citation.metadata.type !== 'webpage')
|
||||
!citation.metadata?.type ||
|
||||
citation.metadata.type !== 'webpage' ||
|
||||
typeof citation.start_ix !== 'number' ||
|
||||
typeof citation.end_ix !== 'number' ||
|
||||
citation.start_ix >= citation.end_ix
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const pattern = new RegExp(
|
||||
`\\u3010${citation.metadata.extra.cited_message_idx}\\u2020.+?\\u3011`,
|
||||
'g',
|
||||
);
|
||||
const replacement = ` ([${citation.metadata.title}](${citation.metadata.url}))`;
|
||||
messageText = messageText.replace(pattern, replacement);
|
||||
|
||||
result = result.slice(0, citation.start_ix) + replacement + result.slice(citation.end_ix);
|
||||
}
|
||||
|
||||
return messageText;
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -342,4 +347,4 @@ function formatMessageText(messageData) {
|
|||
return messageText;
|
||||
}
|
||||
|
||||
module.exports = { getImporter };
|
||||
module.exports = { getImporter, processAssistantMessage };
|
||||
|
|
|
@ -2,10 +2,10 @@ const fs = require('fs');
|
|||
const path = require('path');
|
||||
const { EModelEndpoint, Constants, openAISettings } = require('librechat-data-provider');
|
||||
const { bulkSaveConvos: _bulkSaveConvos } = require('~/models/Conversation');
|
||||
const { getImporter, processAssistantMessage } = require('./importers');
|
||||
const { ImportBatchBuilder } = require('./importBatchBuilder');
|
||||
const { bulkSaveMessages } = require('~/models/Message');
|
||||
const getLogStores = require('~/cache/getLogStores');
|
||||
const { getImporter } = require('./importers');
|
||||
|
||||
jest.mock('~/cache/getLogStores');
|
||||
const mockedCacheGet = jest.fn();
|
||||
|
@ -404,3 +404,234 @@ describe('getImporter', () => {
|
|||
expect(() => getImporter(jsonData)).toThrow('Unsupported import type');
|
||||
});
|
||||
});
|
||||
|
||||
describe('processAssistantMessage', () => {
|
||||
const testMessage = 'This is a test citation 【3:0†source】【3:1†source】';
|
||||
|
||||
const messageData = {
|
||||
metadata: {
|
||||
citations: [
|
||||
{
|
||||
start_ix: 23, // Position of first "【3:0†source】"
|
||||
end_ix: 36, // End of first citation (including closing bracket)
|
||||
citation_format_type: 'tether_og',
|
||||
metadata: {
|
||||
type: 'webpage',
|
||||
title: 'Signal Sciences - Crunchbase Company Profile & Funding',
|
||||
url: 'https://www.crunchbase.com/organization/signal-sciences',
|
||||
text: '',
|
||||
pub_date: null,
|
||||
extra: {
|
||||
evidence_text: 'source',
|
||||
cited_message_idx: 3,
|
||||
search_result_idx: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
start_ix: 36, // Position of second "【3:1†source】"
|
||||
end_ix: 49, // End of second citation (including closing bracket)
|
||||
citation_format_type: 'tether_og',
|
||||
metadata: {
|
||||
type: 'webpage',
|
||||
title: 'Demand More from Your WAF - Signal Sciences now part of Fastly',
|
||||
url: 'https://www.signalsciences.com/',
|
||||
text: '',
|
||||
pub_date: null,
|
||||
extra: {
|
||||
evidence_text: 'source',
|
||||
cited_message_idx: 3,
|
||||
search_result_idx: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const messageText = testMessage;
|
||||
const expectedOutput =
|
||||
'This is a test citation ([Signal Sciences - Crunchbase Company Profile & Funding](https://www.crunchbase.com/organization/signal-sciences)) ([Demand More from Your WAF - Signal Sciences now part of Fastly](https://www.signalsciences.com/))';
|
||||
|
||||
test('should correctly process citations and replace them with markdown links', () => {
|
||||
const result = processAssistantMessage(messageData, messageText);
|
||||
expect(result).toBe(expectedOutput);
|
||||
});
|
||||
|
||||
test('should handle message with no citations', () => {
|
||||
const messageWithNoCitations = {
|
||||
metadata: {},
|
||||
};
|
||||
const result = processAssistantMessage(messageWithNoCitations, messageText);
|
||||
expect(result).toBe(messageText);
|
||||
});
|
||||
|
||||
test('should handle citations with missing metadata', () => {
|
||||
const messageWithBadCitation = {
|
||||
metadata: {
|
||||
citations: [
|
||||
{
|
||||
start_ix: 85,
|
||||
end_ix: 97,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
const result = processAssistantMessage(messageWithBadCitation, messageText);
|
||||
expect(result).toBe(messageText);
|
||||
});
|
||||
|
||||
test('should handle citations with non-webpage type', () => {
|
||||
const messageWithNonWebpage = {
|
||||
metadata: {
|
||||
citations: [
|
||||
{
|
||||
start_ix: 85,
|
||||
end_ix: 97,
|
||||
metadata: {
|
||||
type: 'other',
|
||||
title: 'Test',
|
||||
url: 'http://test.com',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
const result = processAssistantMessage(messageWithNonWebpage, messageText);
|
||||
expect(result).toBe(messageText);
|
||||
});
|
||||
|
||||
test('should handle empty message text', () => {
|
||||
const result = processAssistantMessage(messageData, '');
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
test('should handle undefined message text', () => {
|
||||
const result = processAssistantMessage(messageData, undefined);
|
||||
expect(result).toBe(undefined);
|
||||
});
|
||||
|
||||
test('should handle invalid citation indices', () => {
|
||||
const messageWithBadIndices = {
|
||||
metadata: {
|
||||
citations: [
|
||||
{
|
||||
start_ix: 100,
|
||||
end_ix: 90, // end before start
|
||||
metadata: {
|
||||
type: 'webpage',
|
||||
title: 'Test',
|
||||
url: 'http://test.com',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
const result = processAssistantMessage(messageWithBadIndices, messageText);
|
||||
expect(result).toBe(messageText);
|
||||
});
|
||||
|
||||
test('should correctly process citations from real ChatGPT data', () => {
|
||||
const jsonData = JSON.parse(
|
||||
fs.readFileSync(path.join(__dirname, '__data__', 'chatgpt-citations.json'), 'utf8'),
|
||||
);
|
||||
|
||||
// Get the message containing citations from the JSON data
|
||||
const assistantMessage = jsonData[0].mapping['4b3aec6b-5146-4bad-ae8e-204fdb6accda'].message;
|
||||
|
||||
const messageText = assistantMessage.content.parts[0];
|
||||
const citations = assistantMessage.metadata.citations;
|
||||
|
||||
// Expected output should have all citations replaced with markdown links
|
||||
const expectedOutput =
|
||||
'Signal Sciences is a web application security company that was founded on March 10, 2014, by Andrew Peterson, Nick Galbreath, and Zane Lackey. It operates as a for-profit company with its legal name being Signal Sciences Corp. The company has achieved significant growth and is recognized as the fastest-growing web application security company in the world. Signal Sciences developed a next-gen web application firewall (NGWAF) and runtime application self-protection (RASP) technologies designed to increase security and maintain reliability without compromising the performance of modern web applications distributed across cloud, on-premise, edge, or hybrid environments ([Signal Sciences - Crunchbase Company Profile & Funding](https://www.crunchbase.com/organization/signal-sciences)) ([Demand More from Your WAF - Signal Sciences now part of Fastly](https://www.signalsciences.com/)).\n\nIn a major development, Fastly, Inc., a provider of an edge cloud platform, announced the completion of its acquisition of Signal Sciences on October 1, 2020. This acquisition was valued at approximately $775 million in cash and stock. By integrating Signal Sciences\' powerful web application and API security solutions with Fastly\'s edge cloud platform and existing security offerings, they aimed to form a unified suite of security solutions. The merger was aimed at expanding Fastly\'s security portfolio, particularly at a time when digital security has become paramount for businesses operating online ([Fastly Completes Acquisition of Signal Sciences | Fastly](https://www.fastly.com/press/press-releases/fastly-completes-acquisition-signal-sciences)) ([Fastly Agrees to Acquire Signal Sciences for $775 Million - Cooley](https://www.cooley.com/news/coverage/2020/2020-08-27-fastly-agrees-to-acquire-signal-sciences-for-775-million)).';
|
||||
|
||||
const result = processAssistantMessage(assistantMessage, messageText);
|
||||
expect(result).toBe(expectedOutput);
|
||||
|
||||
// Additional checks to verify citation processing
|
||||
citations.forEach((citation) => {
|
||||
// Verify each citation was replaced
|
||||
const markdownLink = `([${citation.metadata.title}](${citation.metadata.url}))`;
|
||||
expect(result).toContain(markdownLink);
|
||||
|
||||
// Verify original citation format is not present
|
||||
const originalCitation = messageText.slice(citation.start_ix, citation.end_ix);
|
||||
expect(result).not.toContain(originalCitation);
|
||||
});
|
||||
});
|
||||
|
||||
test('should handle potential ReDoS attack payloads', () => {
|
||||
// Test with increasing input sizes to check for exponential behavior
|
||||
const sizes = [32, 33, 34]; // Adding more sizes would increase test time
|
||||
const regExp = '(a+)+';
|
||||
const results = [];
|
||||
|
||||
sizes.forEach((size) => {
|
||||
const startTime = process.hrtime();
|
||||
|
||||
const maliciousMessageData = {
|
||||
metadata: {
|
||||
citations: [
|
||||
{
|
||||
start_ix: 0,
|
||||
end_ix: size,
|
||||
citation_format_type: 'tether_og',
|
||||
metadata: {
|
||||
type: 'webpage',
|
||||
title: 'Test',
|
||||
url: 'http://test.com',
|
||||
extra: {
|
||||
cited_message_idx: regExp,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const maliciousText = '【' + 'a'.repeat(size) + '】';
|
||||
|
||||
processAssistantMessage(maliciousMessageData, maliciousText);
|
||||
|
||||
const endTime = process.hrtime(startTime);
|
||||
const duration = endTime[0] * 1000 + endTime[1] / 1000000; // Convert to milliseconds
|
||||
results.push(duration);
|
||||
});
|
||||
|
||||
// Check if processing time increases exponentially
|
||||
// In a ReDoS vulnerability, time would roughly double with each size increase
|
||||
for (let i = 1; i < results.length; i++) {
|
||||
const ratio = results[i] / results[i - 1];
|
||||
expect(ratio).toBeLessThan(2); // Processing time should not double
|
||||
console.log(`Size ${sizes[i]} processing time ratio: ${ratio}`);
|
||||
}
|
||||
|
||||
// Also test with the exact payload from the security report
|
||||
const maliciousPayload = {
|
||||
metadata: {
|
||||
citations: [
|
||||
{
|
||||
metadata: {
|
||||
extra: {
|
||||
cited_message_idx: '(a+)+',
|
||||
},
|
||||
type: 'webpage',
|
||||
title: '1',
|
||||
url: '2',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const text = '【' + 'a'.repeat(32);
|
||||
const startTime = process.hrtime();
|
||||
processAssistantMessage(maliciousPayload, text);
|
||||
const endTime = process.hrtime(startTime);
|
||||
const duration = endTime[0] * 1000 + endTime[1] / 1000000;
|
||||
|
||||
// The processing should complete quickly (under 100ms)
|
||||
expect(duration).toBeLessThan(100);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import React from 'react';
|
||||
import { useLocalize } from '~/hooks';
|
||||
|
||||
type TPluginPaginationProps = {
|
||||
currentPage: number;
|
||||
|
@ -11,6 +12,7 @@ const PluginPagination: React.FC<TPluginPaginationProps> = ({
|
|||
maxPage,
|
||||
onChangePage,
|
||||
}) => {
|
||||
const localize = useLocalize();
|
||||
const pages = [...Array(maxPage).keys()].map((i) => i + 1);
|
||||
|
||||
const handlePageChange = (page: number) => {
|
||||
|
@ -24,8 +26,14 @@ const PluginPagination: React.FC<TPluginPaginationProps> = ({
|
|||
<div className="flex gap-2 text-sm text-black/60 dark:text-white/70">
|
||||
<div
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
aria-label="Previous page"
|
||||
onClick={() => handlePageChange(currentPage - 1)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
onChangePage(currentPage - 1);
|
||||
}
|
||||
}}
|
||||
className={`flex cursor-default items-center text-sm ${
|
||||
currentPage === 1
|
||||
? 'text-black/70 opacity-50 dark:text-white/70'
|
||||
|
@ -45,14 +53,15 @@ const PluginPagination: React.FC<TPluginPaginationProps> = ({
|
|||
width="1em"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<polyline points="15 18 9 12 15 6"></polyline>
|
||||
<polyline points="15 18 9 12 15 6" />
|
||||
</svg>
|
||||
Prev
|
||||
{localize('com_ui_prev')}
|
||||
</div>
|
||||
{pages.map((page) => (
|
||||
<div
|
||||
role="button"
|
||||
key={page}
|
||||
tabIndex={0}
|
||||
className={`flex h-5 w-5 items-center justify-center text-sm ${
|
||||
currentPage === page
|
||||
? 'text-blue-600 hover:text-blue-600 dark:text-blue-600 dark:hover:text-blue-600'
|
||||
|
@ -60,6 +69,11 @@ const PluginPagination: React.FC<TPluginPaginationProps> = ({
|
|||
}`}
|
||||
style={{ userSelect: 'none' }}
|
||||
onClick={() => onChangePage(page)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
onChangePage(page);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{page}
|
||||
</div>
|
||||
|
@ -67,7 +81,13 @@ const PluginPagination: React.FC<TPluginPaginationProps> = ({
|
|||
<div
|
||||
role="button"
|
||||
aria-label="Next page"
|
||||
tabIndex={0}
|
||||
onClick={() => handlePageChange(currentPage + 1)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
onChangePage(currentPage + 1);
|
||||
}
|
||||
}}
|
||||
className={`flex cursor-default items-center text-sm ${
|
||||
currentPage === maxPage
|
||||
? 'text-black/70 opacity-50 dark:text-white/70'
|
||||
|
@ -75,7 +95,7 @@ const PluginPagination: React.FC<TPluginPaginationProps> = ({
|
|||
}`}
|
||||
style={{ userSelect: 'none' }}
|
||||
>
|
||||
Next
|
||||
{localize('com_ui_next')}
|
||||
<svg
|
||||
stroke="currentColor"
|
||||
fill="none"
|
||||
|
@ -88,7 +108,7 @@ const PluginPagination: React.FC<TPluginPaginationProps> = ({
|
|||
width="1em"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<polyline points="9 18 15 12 9 6"></polyline>
|
||||
<polyline points="9 18 15 12 9 6" />
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -81,10 +81,10 @@ const CreatePromptForm = ({
|
|||
CreateFormValues,
|
||||
'name' | 'category' | 'oneliner' | 'command'
|
||||
>;
|
||||
if ((oneliner?.length || 0) > 0) {
|
||||
if ((oneliner?.length ?? 0) > 0) {
|
||||
groupData.oneliner = oneliner;
|
||||
}
|
||||
if ((command?.length || 0) > 0) {
|
||||
if ((command?.length ?? 0) > 0) {
|
||||
groupData.command = command;
|
||||
}
|
||||
createPromptMutation.mutate({
|
||||
|
@ -111,7 +111,7 @@ const CreatePromptForm = ({
|
|||
<Input
|
||||
{...field}
|
||||
type="text"
|
||||
className="mr-2 w-full border border-border-medium p-2 text-2xl placeholder:text-text-tertiary dark:placeholder:text-text-secondary"
|
||||
className="mr-2 w-full border border-border-medium p-2 text-2xl text-text-primary placeholder:text-text-tertiary dark:placeholder:text-text-secondary"
|
||||
placeholder={`${localize('com_ui_prompt_name')}*`}
|
||||
tabIndex={0}
|
||||
/>
|
||||
|
|
|
@ -35,7 +35,7 @@ export function FilterItem({
|
|||
>
|
||||
{icon}
|
||||
<span>{label}</span>
|
||||
{isActive && (
|
||||
{isActive === true && (
|
||||
<span className="absolute bottom-0 right-0 top-0 flex items-center">
|
||||
<Dot />
|
||||
</span>
|
||||
|
@ -122,7 +122,7 @@ export default function FilterPrompts({
|
|||
return setCategory('');
|
||||
}
|
||||
setCategory(category);
|
||||
if (icon && React.isValidElement(icon)) {
|
||||
if (icon != null && React.isValidElement(icon)) {
|
||||
setSelectedIcon(icon);
|
||||
}
|
||||
},
|
||||
|
@ -130,7 +130,7 @@ export default function FilterPrompts({
|
|||
);
|
||||
|
||||
return (
|
||||
<div className={cn('flex gap-2', className)}>
|
||||
<div className={cn('flex gap-2 text-text-primary', className)}>
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
|
|
|
@ -7,7 +7,7 @@ const loadingCategories = [
|
|||
label: 'Loading...',
|
||||
value: '',
|
||||
},
|
||||
];
|
||||
] as undefined | { label: string; value: string }[];
|
||||
|
||||
const emptyCategory = {
|
||||
label: '-',
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue