🔍 refactor: OpenID Fetch Handling and Logging (#7790)

* feat: Enhance OpenID Strategy with Debug Logging and Header Management

- Added detailed logging for OpenID requests and responses when debug mode is enabled.
- Introduced helper functions for safely logging sensitive data and headers.
- Updated OpenID strategy to handle non-standard WWW-Authenticate headers in responses.
- Refactored proxy configuration handling for improved clarity and logging.

* refactor: MemoryViewer Layout with Conditional Justification

- Updated the MemoryViewer component to conditionally apply justification styles based on memory data and access permissions.
- Introduced utility function `cn` for cleaner class name management in the component.

* refactor: Update OpenID Strategy to use Global Fetch

* refactor: Add undici for customFetch request handling in OpenID strategy

* fix: Export 'files' module in utils index

* chore: Add node-fetch dependency for openid image download

* ci: Add comprehensive tests for multer configuration and file handling

- Introduced a new test suite for multer configuration, covering storage destination and filename generation.
- Implemented tests for file filtering, ensuring only valid JSON files are accepted.
- Added error handling tests for edge cases and vulnerabilities, including handling empty field names and malformed filenames.
- Integrated real configuration testing with actual fileConfig and custom endpoints.
- Enhanced UUID generation tests to ensure uniqueness and cryptographic security.

* chore: Improve proxy configuration logging in customFetch function

* fix: Improve logging for non-standard WWW-Authenticate header in customFetch function
This commit is contained in:
Danny Avila 2025-06-09 11:27:23 -04:00 committed by GitHub
parent b0054c775a
commit 272522452a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 719 additions and 9 deletions

View file

@ -110,6 +110,7 @@
"tiktoken": "^1.0.15", "tiktoken": "^1.0.15",
"traverse": "^0.6.7", "traverse": "^0.6.7",
"ua-parser-js": "^1.0.36", "ua-parser-js": "^1.0.36",
"undici": "^7.10.0",
"winston": "^3.11.0", "winston": "^3.11.0",
"winston-daily-rotate-file": "^5.0.0", "winston-daily-rotate-file": "^5.0.0",
"youtube-transcript": "^1.2.1", "youtube-transcript": "^1.2.1",

View file

@ -0,0 +1,571 @@
/* eslint-disable no-unused-vars */
/* eslint-disable jest/no-done-callback */
const fs = require('fs');
const os = require('os');
const path = require('path');
const crypto = require('crypto');
const { createMulterInstance, storage, importFileFilter } = require('./multer');
// Mock only the config service that requires external dependencies
jest.mock('~/server/services/Config', () => ({
getCustomConfig: jest.fn(() =>
Promise.resolve({
fileConfig: {
endpoints: {
openAI: {
supportedMimeTypes: ['image/jpeg', 'image/png', 'application/pdf'],
},
default: {
supportedMimeTypes: ['image/jpeg', 'image/png', 'text/plain'],
},
},
serverFileSizeLimit: 10000000, // 10MB
},
}),
),
}));
describe('Multer Configuration', () => {
let tempDir;
let mockReq;
let mockFile;
beforeEach(() => {
// Create a temporary directory for each test
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'multer-test-'));
mockReq = {
user: { id: 'test-user-123' },
app: {
locals: {
paths: {
uploads: tempDir,
},
},
},
body: {},
originalUrl: '/api/files/upload',
};
mockFile = {
originalname: 'test-file.jpg',
mimetype: 'image/jpeg',
size: 1024,
};
// Clear mocks
jest.clearAllMocks();
});
afterEach(() => {
// Clean up temporary directory
if (fs.existsSync(tempDir)) {
fs.rmSync(tempDir, { recursive: true, force: true });
}
});
describe('Storage Configuration', () => {
describe('destination function', () => {
it('should create the correct destination path', (done) => {
const cb = jest.fn((err, destination) => {
expect(err).toBeNull();
expect(destination).toBe(path.join(tempDir, 'temp', 'test-user-123'));
expect(fs.existsSync(destination)).toBe(true);
done();
});
storage.getDestination(mockReq, mockFile, cb);
});
it("should create directory recursively if it doesn't exist", (done) => {
const deepPath = path.join(tempDir, 'deep', 'nested', 'path');
mockReq.app.locals.paths.uploads = deepPath;
const cb = jest.fn((err, destination) => {
expect(err).toBeNull();
expect(destination).toBe(path.join(deepPath, 'temp', 'test-user-123'));
expect(fs.existsSync(destination)).toBe(true);
done();
});
storage.getDestination(mockReq, mockFile, cb);
});
});
describe('filename function', () => {
it('should generate a UUID for req.file_id', (done) => {
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(mockReq.file_id).toBeDefined();
expect(mockReq.file_id).toMatch(
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i,
);
done();
});
storage.getFilename(mockReq, mockFile, cb);
});
it('should decode URI components in filename', (done) => {
const encodedFile = {
...mockFile,
originalname: encodeURIComponent('test file with spaces.jpg'),
};
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(encodedFile.originalname).toBe('test file with spaces.jpg');
done();
});
storage.getFilename(mockReq, encodedFile, cb);
});
it('should call real sanitizeFilename with properly encoded filename', (done) => {
// Test with a properly URI-encoded filename that needs sanitization
const unsafeFile = {
...mockFile,
originalname: encodeURIComponent('test@#$%^&*()file with spaces!.jpg'),
};
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
// The actual sanitizeFilename should have cleaned this up after decoding
expect(filename).not.toContain('@');
expect(filename).not.toContain('#');
expect(filename).not.toContain('*');
expect(filename).not.toContain('!');
// Should still preserve dots and hyphens
expect(filename).toContain('.jpg');
done();
});
storage.getFilename(mockReq, unsafeFile, cb);
});
it('should handle very long filenames with actual crypto', (done) => {
const longFile = {
...mockFile,
originalname: 'a'.repeat(300) + '.jpg',
};
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(filename.length).toBeLessThanOrEqual(255);
expect(filename).toMatch(/\.jpg$/); // Should still end with .jpg
// Should contain a hex suffix if truncated
if (filename.length === 255) {
expect(filename).toMatch(/-[a-f0-9]{6}\.jpg$/);
}
done();
});
storage.getFilename(mockReq, longFile, cb);
});
it('should generate unique file_id for each call', (done) => {
let firstFileId;
const firstCb = jest.fn((err, filename) => {
expect(err).toBeNull();
firstFileId = mockReq.file_id;
// Reset req for second call
delete mockReq.file_id;
const secondCb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(mockReq.file_id).toBeDefined();
expect(mockReq.file_id).not.toBe(firstFileId);
done();
});
storage.getFilename(mockReq, mockFile, secondCb);
});
storage.getFilename(mockReq, mockFile, firstCb);
});
});
});
describe('Import File Filter', () => {
it('should accept JSON files by mimetype', (done) => {
const jsonFile = {
...mockFile,
mimetype: 'application/json',
originalname: 'data.json',
};
const cb = jest.fn((err, result) => {
expect(err).toBeNull();
expect(result).toBe(true);
done();
});
importFileFilter(mockReq, jsonFile, cb);
});
it('should accept files with .json extension', (done) => {
const jsonFile = {
...mockFile,
mimetype: 'text/plain',
originalname: 'data.json',
};
const cb = jest.fn((err, result) => {
expect(err).toBeNull();
expect(result).toBe(true);
done();
});
importFileFilter(mockReq, jsonFile, cb);
});
it('should reject non-JSON files', (done) => {
const textFile = {
...mockFile,
mimetype: 'text/plain',
originalname: 'document.txt',
};
const cb = jest.fn((err, result) => {
expect(err).toBeInstanceOf(Error);
expect(err.message).toBe('Only JSON files are allowed');
expect(result).toBe(false);
done();
});
importFileFilter(mockReq, textFile, cb);
});
it('should handle files with uppercase .JSON extension', (done) => {
const jsonFile = {
...mockFile,
mimetype: 'text/plain',
originalname: 'DATA.JSON',
};
const cb = jest.fn((err, result) => {
expect(err).toBeNull();
expect(result).toBe(true);
done();
});
importFileFilter(mockReq, jsonFile, cb);
});
});
describe('File Filter with Real defaultFileConfig', () => {
it('should use real fileConfig.checkType for validation', async () => {
// Test with actual librechat-data-provider functions
const {
fileConfig,
imageMimeTypes,
applicationMimeTypes,
} = require('librechat-data-provider');
// Test that the real checkType function works with regex patterns
expect(fileConfig.checkType('image/jpeg', [imageMimeTypes])).toBe(true);
expect(fileConfig.checkType('video/mp4', [imageMimeTypes])).toBe(false);
expect(fileConfig.checkType('application/pdf', [applicationMimeTypes])).toBe(true);
expect(fileConfig.checkType('application/pdf', [])).toBe(false);
});
it('should handle audio files for speech-to-text endpoint with real config', async () => {
mockReq.originalUrl = '/api/speech/stt';
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
expect(typeof multerInstance.single).toBe('function');
});
it('should reject unsupported file types using real config', async () => {
// Mock defaultFileConfig for this specific test
const originalCheckType = require('librechat-data-provider').fileConfig.checkType;
const mockCheckType = jest.fn().mockReturnValue(false);
require('librechat-data-provider').fileConfig.checkType = mockCheckType;
try {
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
// Test the actual file filter behavior would reject unsupported files
expect(mockCheckType).toBeDefined();
} finally {
// Restore original function
require('librechat-data-provider').fileConfig.checkType = originalCheckType;
}
});
it('should use real mergeFileConfig function', async () => {
const { mergeFileConfig, mbToBytes } = require('librechat-data-provider');
// Test with actual merge function - note that it converts MB to bytes
const testConfig = {
serverFileSizeLimit: 5, // 5 MB
endpoints: {
custom: {
supportedMimeTypes: ['text/plain'],
},
},
};
const result = mergeFileConfig(testConfig);
// The function converts MB to bytes, so 5 MB becomes 5 * 1024 * 1024 bytes
expect(result.serverFileSizeLimit).toBe(mbToBytes(5));
expect(result.endpoints.custom.supportedMimeTypes).toBeDefined();
// Should still have the default endpoints
expect(result.endpoints.default).toBeDefined();
});
});
describe('createMulterInstance with Real Functions', () => {
it('should create a multer instance with correct configuration', async () => {
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
expect(typeof multerInstance.single).toBe('function');
expect(typeof multerInstance.array).toBe('function');
expect(typeof multerInstance.fields).toBe('function');
});
it('should use real config merging', async () => {
const { getCustomConfig } = require('~/server/services/Config');
const multerInstance = await createMulterInstance();
expect(getCustomConfig).toHaveBeenCalled();
expect(multerInstance).toBeDefined();
});
it('should create multer instance with expected interface', async () => {
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
expect(typeof multerInstance.single).toBe('function');
expect(typeof multerInstance.array).toBe('function');
expect(typeof multerInstance.fields).toBe('function');
});
});
describe('Real Crypto Integration', () => {
it('should use actual crypto.randomUUID()', (done) => {
// Spy on crypto.randomUUID to ensure it's called
const uuidSpy = jest.spyOn(crypto, 'randomUUID');
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(uuidSpy).toHaveBeenCalled();
expect(mockReq.file_id).toMatch(
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i,
);
uuidSpy.mockRestore();
done();
});
storage.getFilename(mockReq, mockFile, cb);
});
it('should generate different UUIDs on subsequent calls', (done) => {
const uuids = [];
let callCount = 0;
const totalCalls = 5;
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
uuids.push(mockReq.file_id);
callCount++;
if (callCount === totalCalls) {
// Check that all UUIDs are unique
const uniqueUuids = new Set(uuids);
expect(uniqueUuids.size).toBe(totalCalls);
done();
} else {
// Reset for next call
delete mockReq.file_id;
storage.getFilename(mockReq, mockFile, cb);
}
});
// Start the chain
storage.getFilename(mockReq, mockFile, cb);
});
it('should generate cryptographically secure UUIDs', (done) => {
const generatedUuids = new Set();
let callCount = 0;
const totalCalls = 10;
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
// Verify UUID format and uniqueness
expect(mockReq.file_id).toMatch(
/^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i,
);
generatedUuids.add(mockReq.file_id);
callCount++;
if (callCount === totalCalls) {
// All UUIDs should be unique
expect(generatedUuids.size).toBe(totalCalls);
done();
} else {
// Reset for next call
delete mockReq.file_id;
storage.getFilename(mockReq, mockFile, cb);
}
});
// Start the chain
storage.getFilename(mockReq, mockFile, cb);
});
});
describe('Error Handling', () => {
it('should handle CVE-2024-28870: empty field name DoS vulnerability', async () => {
// Test for the CVE where empty field name could cause unhandled exception
const multerInstance = await createMulterInstance();
// Create a mock request with empty field name (the vulnerability scenario)
const mockReqWithEmptyField = {
...mockReq,
headers: {
'content-type': 'multipart/form-data',
},
};
const mockRes = {
status: jest.fn().mockReturnThis(),
json: jest.fn(),
end: jest.fn(),
};
// This should not crash or throw unhandled exceptions
const uploadMiddleware = multerInstance.single(''); // Empty field name
const mockNext = jest.fn((err) => {
// If there's an error, it should be handled gracefully, not crash
if (err) {
expect(err).toBeInstanceOf(Error);
// The error should be handled, not crash the process
}
});
// This should complete without crashing the process
expect(() => {
uploadMiddleware(mockReqWithEmptyField, mockRes, mockNext);
}).not.toThrow();
});
it('should handle file system errors when directory creation fails', (done) => {
// Test with a non-existent parent directory to simulate fs issues
const invalidPath = '/nonexistent/path/that/should/not/exist';
mockReq.app.locals.paths.uploads = invalidPath;
try {
// Call getDestination which should fail due to permission/path issues
storage.getDestination(mockReq, mockFile, (err, destination) => {
// If callback is reached, we didn't get the expected error
done(new Error('Expected mkdirSync to throw an error but callback was called'));
});
// If we get here without throwing, something unexpected happened
done(new Error('Expected mkdirSync to throw an error but no error was thrown'));
} catch (error) {
// This is the expected behavior - mkdirSync throws synchronously for invalid paths
expect(error.code).toBe('EACCES');
done();
}
});
it('should handle malformed filenames with real sanitization', (done) => {
const malformedFile = {
...mockFile,
originalname: null, // This should be handled gracefully
};
const cb = jest.fn((err, filename) => {
// The function should handle this gracefully
expect(typeof err === 'object' || err === null).toBe(true);
done();
});
try {
storage.getFilename(mockReq, malformedFile, cb);
} catch (error) {
// If it throws, that's also acceptable behavior
done();
}
});
it('should handle edge cases in filename sanitization', (done) => {
const edgeCaseFiles = [
{ originalname: '', expected: /_/ },
{ originalname: '.hidden', expected: /^_\.hidden/ },
{ originalname: '../../../etc/passwd', expected: /passwd/ },
{ originalname: 'file\x00name.txt', expected: /file_name\.txt/ },
];
let testCount = 0;
const testNextFile = (fileData) => {
const fileToTest = { ...mockFile, originalname: fileData.originalname };
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(filename).toMatch(fileData.expected);
testCount++;
if (testCount === edgeCaseFiles.length) {
done();
} else {
testNextFile(edgeCaseFiles[testCount]);
}
});
storage.getFilename(mockReq, fileToTest, cb);
};
testNextFile(edgeCaseFiles[0]);
});
});
describe('Real Configuration Testing', () => {
it('should handle missing custom config gracefully with real mergeFileConfig', async () => {
const { getCustomConfig } = require('~/server/services/Config');
// Mock getCustomConfig to return undefined
getCustomConfig.mockResolvedValueOnce(undefined);
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
expect(typeof multerInstance.single).toBe('function');
});
it('should properly integrate real fileConfig with custom endpoints', async () => {
const { getCustomConfig } = require('~/server/services/Config');
// Mock a custom config with additional endpoints
getCustomConfig.mockResolvedValueOnce({
fileConfig: {
endpoints: {
anthropic: {
supportedMimeTypes: ['text/plain', 'image/png'],
},
},
serverFileSizeLimit: 20, // 20 MB
},
});
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
// Verify that getCustomConfig was called (we can't spy on the actual merge function easily)
expect(getCustomConfig).toHaveBeenCalled();
});
});
});

View file

@ -1,3 +1,4 @@
const undici = require('undici');
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const passport = require('passport'); const passport = require('passport');
const client = require('openid-client'); const client = require('openid-client');
@ -6,17 +7,87 @@ const { CacheKeys } = require('librechat-data-provider');
const { HttpsProxyAgent } = require('https-proxy-agent'); const { HttpsProxyAgent } = require('https-proxy-agent');
const { hashToken, logger } = require('@librechat/data-schemas'); const { hashToken, logger } = require('@librechat/data-schemas');
const { Strategy: OpenIDStrategy } = require('openid-client/passport'); const { Strategy: OpenIDStrategy } = require('openid-client/passport');
const { isEnabled, safeStringify, logHeaders } = require('@librechat/api');
const { getStrategyFunctions } = require('~/server/services/Files/strategies'); const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { findUser, createUser, updateUser } = require('~/models'); const { findUser, createUser, updateUser } = require('~/models');
const { getBalanceConfig } = require('~/server/services/Config'); const { getBalanceConfig } = require('~/server/services/Config');
const getLogStores = require('~/cache/getLogStores'); const getLogStores = require('~/cache/getLogStores');
const { isEnabled } = require('~/server/utils');
/** /**
* @typedef {import('openid-client').ClientMetadata} ClientMetadata * @typedef {import('openid-client').ClientMetadata} ClientMetadata
* @typedef {import('openid-client').Configuration} Configuration * @typedef {import('openid-client').Configuration} Configuration
**/ **/
/**
* @param {string} url
* @param {client.CustomFetchOptions} options
*/
async function customFetch(url, options) {
const urlStr = url.toString();
logger.debug(`[openidStrategy] Request to: ${urlStr}`);
const debugOpenId = isEnabled(process.env.DEBUG_OPENID_REQUESTS);
if (debugOpenId) {
logger.debug(`[openidStrategy] Request method: ${options.method || 'GET'}`);
logger.debug(`[openidStrategy] Request headers: ${logHeaders(options.headers)}`);
if (options.body) {
let bodyForLogging = '';
if (options.body instanceof URLSearchParams) {
bodyForLogging = options.body.toString();
} else if (typeof options.body === 'string') {
bodyForLogging = options.body;
} else {
bodyForLogging = safeStringify(options.body);
}
logger.debug(`[openidStrategy] Request body: ${bodyForLogging}`);
}
}
try {
/** @type {undici.RequestInit} */
let fetchOptions = options;
if (process.env.PROXY) {
logger.info(`[openidStrategy] proxy agent configured: ${process.env.PROXY}`);
fetchOptions = {
...options,
dispatcher: new HttpsProxyAgent(process.env.PROXY),
};
}
const response = await undici.fetch(url, fetchOptions);
if (debugOpenId) {
logger.debug(`[openidStrategy] Response status: ${response.status} ${response.statusText}`);
logger.debug(`[openidStrategy] Response headers: ${logHeaders(response.headers)}`);
}
if (response.status === 200 && response.headers.has('www-authenticate')) {
const wwwAuth = response.headers.get('www-authenticate');
logger.warn(`[openidStrategy] Non-standard WWW-Authenticate header found in successful response (200 OK): ${wwwAuth}.
This violates RFC 7235 and may cause issues with strict OAuth clients. Removing header for compatibility.`);
/** Cloned response without the WWW-Authenticate header */
const responseBody = await response.arrayBuffer();
const newHeaders = new Headers();
for (const [key, value] of response.headers.entries()) {
if (key.toLowerCase() !== 'www-authenticate') {
newHeaders.append(key, value);
}
}
return new Response(responseBody, {
status: response.status,
statusText: response.statusText,
headers: newHeaders,
});
}
return response;
} catch (error) {
logger.error(`[openidStrategy] Fetch error: ${error.message}`);
throw error;
}
}
/** @typedef {Configuration | null} */ /** @typedef {Configuration | null} */
let openidConfig = null; let openidConfig = null;
@ -208,14 +279,12 @@ async function setupOpenId() {
new URL(process.env.OPENID_ISSUER), new URL(process.env.OPENID_ISSUER),
process.env.OPENID_CLIENT_ID, process.env.OPENID_CLIENT_ID,
clientMetadata, clientMetadata,
undefined,
{
[client.customFetch]: customFetch,
},
); );
if (process.env.PROXY) {
const proxyAgent = new HttpsProxyAgent(process.env.PROXY);
openidConfig[client.customFetch] = (...args) => {
return fetch(args[0], { ...args[1], agent: proxyAgent });
};
logger.info(`[openidStrategy] proxy agent added: ${process.env.PROXY}`);
}
const requiredRole = process.env.OPENID_REQUIRED_ROLE; const requiredRole = process.env.OPENID_REQUIRED_ROLE;
const requiredRoleParameterPath = process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH; const requiredRoleParameterPath = process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH;
const requiredRoleTokenKind = process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND; const requiredRoleTokenKind = process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND;

View file

@ -34,6 +34,7 @@ import MemoryCreateDialog from './MemoryCreateDialog';
import MemoryEditDialog from './MemoryEditDialog'; import MemoryEditDialog from './MemoryEditDialog';
import { useToastContext } from '~/Providers'; import { useToastContext } from '~/Providers';
import AdminSettings from './AdminSettings'; import AdminSettings from './AdminSettings';
import { cn } from '~/utils';
export default function MemoryViewer() { export default function MemoryViewer() {
const localize = useLocalize(); const localize = useLocalize();
@ -251,7 +252,12 @@ export default function MemoryViewer() {
</div> </div>
{/* Memory Usage and Toggle Display */} {/* Memory Usage and Toggle Display */}
{(memData?.tokenLimit || hasOptOutAccess) && ( {(memData?.tokenLimit || hasOptOutAccess) && (
<div className="flex items-center justify-between rounded-lg"> <div
className={cn(
'flex items-center rounded-lg',
memData?.tokenLimit != null && hasOptOutAccess ? 'justify-between' : 'justify-end',
)}
>
{/* Usage Display */} {/* Usage Display */}
{memData?.tokenLimit && ( {memData?.tokenLimit && (
<div className="flex items-center gap-2"> <div className="flex items-center gap-2">

10
package-lock.json generated
View file

@ -126,6 +126,7 @@
"tiktoken": "^1.0.15", "tiktoken": "^1.0.15",
"traverse": "^0.6.7", "traverse": "^0.6.7",
"ua-parser-js": "^1.0.36", "ua-parser-js": "^1.0.36",
"undici": "^7.10.0",
"winston": "^3.11.0", "winston": "^3.11.0",
"winston-daily-rotate-file": "^5.0.0", "winston-daily-rotate-file": "^5.0.0",
"youtube-transcript": "^1.2.1", "youtube-transcript": "^1.2.1",
@ -2403,6 +2404,15 @@
"@img/sharp-win32-x64": "0.33.5" "@img/sharp-win32-x64": "0.33.5"
} }
}, },
"api/node_modules/undici": {
"version": "7.10.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-7.10.0.tgz",
"integrity": "sha512-u5otvFBOBZvmdjWLVW+5DAc9Nkq8f24g0O9oY7qw2JVIF1VocIFoyz9JFkuVOS2j41AufeO0xnlweJ2RLT8nGw==",
"license": "MIT",
"engines": {
"node": ">=20.18.1"
}
},
"api/node_modules/uuid": { "api/node_modules/uuid": {
"version": "10.0.0", "version": "10.0.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz",

View file

@ -1,5 +1,7 @@
export * from './azure'; export * from './azure';
export * from './common'; export * from './common';
export * from './events'; export * from './events';
export * from './files';
export * from './generators'; export * from './generators';
export * from './openid';
export { default as Tokenizer } from './tokenizer'; export { default as Tokenizer } from './tokenizer';

View file

@ -0,0 +1,51 @@
/**
* Helper function to safely log sensitive data when debug mode is enabled
* @param obj - Object to stringify
* @param maxLength - Maximum length of the stringified output
* @returns Stringified object with sensitive data masked
*/
export function safeStringify(obj: unknown, maxLength = 1000): string {
try {
const str = JSON.stringify(obj, (key, value) => {
// Mask sensitive values
if (
key === 'client_secret' ||
key === 'Authorization' ||
key.toLowerCase().includes('token') ||
key.toLowerCase().includes('password')
) {
return typeof value === 'string' && value.length > 6
? `${value.substring(0, 3)}...${value.substring(value.length - 3)}`
: '***MASKED***';
}
return value;
});
if (str && str.length > maxLength) {
return `${str.substring(0, maxLength)}... (truncated)`;
}
return str;
} catch (error) {
return `[Error stringifying object: ${(error as Error).message}]`;
}
}
/**
* Helper to log headers without revealing sensitive information
* @param headers - Headers object to log
* @returns Stringified headers with sensitive data masked
*/
export function logHeaders(headers: Headers | undefined | null): string {
const headerObj: Record<string, string> = {};
if (!headers || typeof headers.entries !== 'function') {
return 'No headers available';
}
for (const [key, value] of headers.entries()) {
if (key.toLowerCase() === 'authorization' || key.toLowerCase().includes('secret')) {
headerObj[key] = '***MASKED***';
} else {
headerObj[key] = value;
}
}
return safeStringify(headerObj);
}