mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-02-22 18:34:08 +01:00
feat: implement Anthropic native PDF support with document preservation
- Add comprehensive debug logging throughout PDF processing pipeline - Refactor attachment processing to separate image and document handling - Create distinct addImageURLs(), addDocuments(), and processAttachments() methods - Fix critical bugs in stream handling and parameter passing - Add streamToBuffer utility for proper stream-to-buffer conversion - Remove api/agents submodule from repository 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
007570b5c6
commit
6605b6c800
53 changed files with 630 additions and 145 deletions
166
api/server/services/Files/documents/encode.js
Normal file
166
api/server/services/Files/documents/encode.js
Normal file
|
|
@ -0,0 +1,166 @@
|
|||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { validateAnthropicPdf } = require('../validation/pdfValidator');
|
||||
|
||||
/**
|
||||
* Converts a readable stream to a buffer.
|
||||
*
|
||||
* @param {NodeJS.ReadableStream} stream - The readable stream to convert.
|
||||
* @returns {Promise<Buffer>} - Promise resolving to the buffer.
|
||||
*/
|
||||
async function streamToBuffer(stream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks = [];
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
try {
|
||||
const buffer = Buffer.concat(chunks);
|
||||
chunks.length = 0; // Clear the array
|
||||
resolve(buffer);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('error', (error) => {
|
||||
chunks.length = 0;
|
||||
reject(error);
|
||||
});
|
||||
}).finally(() => {
|
||||
// Clean up the stream if required
|
||||
if (stream.destroy && typeof stream.destroy === 'function') {
|
||||
stream.destroy();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes and encodes document files for various endpoints
|
||||
*
|
||||
* @param {Express.Request} req - Express request object
|
||||
* @param {MongoFile[]} files - Array of file objects to process
|
||||
* @param {string} endpoint - The endpoint identifier (e.g., EModelEndpoint.anthropic)
|
||||
* @returns {Promise<{documents: MessageContentDocument[], files: MongoFile[]}>}
|
||||
*/
|
||||
async function encodeAndFormatDocuments(req, files, endpoint) {
|
||||
const promises = [];
|
||||
/** @type {Record<FileSources, Pick<ReturnType<typeof getStrategyFunctions>, 'prepareDocumentPayload' | 'getDownloadStream'>>} */
|
||||
const encodingMethods = {};
|
||||
/** @type {{ documents: MessageContentDocument[]; files: MongoFile[] }} */
|
||||
const result = {
|
||||
documents: [],
|
||||
files: [],
|
||||
};
|
||||
|
||||
if (!files || !files.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Filter for document files only
|
||||
const documentFiles = files.filter(
|
||||
(file) => file.type === 'application/pdf' || file.type?.startsWith('application/'), // Future: support for other document types
|
||||
);
|
||||
|
||||
if (!documentFiles.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
for (let file of documentFiles) {
|
||||
/** @type {FileSources} */
|
||||
const source = file.source ?? 'local';
|
||||
|
||||
// Only process PDFs for Anthropic for now
|
||||
if (file.type !== 'application/pdf' || endpoint !== EModelEndpoint.anthropic) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!encodingMethods[source]) {
|
||||
encodingMethods[source] = getStrategyFunctions(source);
|
||||
}
|
||||
|
||||
// Prepare file metadata
|
||||
const fileMetadata = {
|
||||
file_id: file.file_id || file._id,
|
||||
temp_file_id: file.temp_file_id,
|
||||
filepath: file.filepath,
|
||||
source: file.source,
|
||||
filename: file.filename,
|
||||
type: file.type,
|
||||
};
|
||||
|
||||
promises.push([file, fileMetadata]);
|
||||
}
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
promises.map(async ([file, fileMetadata]) => {
|
||||
if (!file || !fileMetadata) {
|
||||
return { file: null, content: null, metadata: fileMetadata };
|
||||
}
|
||||
|
||||
try {
|
||||
const source = file.source ?? 'local';
|
||||
const { getDownloadStream } = encodingMethods[source];
|
||||
|
||||
const stream = await getDownloadStream(req, file.filepath);
|
||||
const buffer = await streamToBuffer(stream);
|
||||
const documentContent = buffer.toString('base64');
|
||||
|
||||
return {
|
||||
file,
|
||||
content: documentContent,
|
||||
metadata: fileMetadata,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error processing document ${file.filename}:`, error);
|
||||
return { file, content: null, metadata: fileMetadata };
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
for (const settledResult of results) {
|
||||
if (settledResult.status === 'rejected') {
|
||||
console.error('Document processing failed:', settledResult.reason);
|
||||
continue;
|
||||
}
|
||||
|
||||
const { file, content, metadata } = settledResult.value;
|
||||
|
||||
if (!content || !file) {
|
||||
if (metadata) {
|
||||
result.files.push(metadata);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (file.type === 'application/pdf' && endpoint === EModelEndpoint.anthropic) {
|
||||
const pdfBuffer = Buffer.from(content, 'base64');
|
||||
const validation = await validateAnthropicPdf(pdfBuffer, pdfBuffer.length);
|
||||
|
||||
if (!validation.isValid) {
|
||||
throw new Error(`PDF validation failed: ${validation.error}`);
|
||||
}
|
||||
|
||||
const documentPart = {
|
||||
type: 'document',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: 'application/pdf',
|
||||
data: content,
|
||||
},
|
||||
};
|
||||
|
||||
result.documents.push(documentPart);
|
||||
result.files.push(metadata);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
encodeAndFormatDocuments,
|
||||
};
|
||||
5
api/server/services/Files/documents/index.js
Normal file
5
api/server/services/Files/documents/index.js
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
const { encodeAndFormatDocuments } = require('./encode');
|
||||
|
||||
module.exports = {
|
||||
encodeAndFormatDocuments,
|
||||
};
|
||||
|
|
@ -391,7 +391,17 @@ const processFileUpload = async ({ req, res, metadata }) => {
|
|||
const isAssistantUpload = isAssistantsEndpoint(metadata.endpoint);
|
||||
const assistantSource =
|
||||
metadata.endpoint === EModelEndpoint.azureAssistants ? FileSources.azure : FileSources.openai;
|
||||
const source = isAssistantUpload ? assistantSource : FileSources.vectordb;
|
||||
|
||||
// Use local storage for Anthropic native PDF support, vectordb for others
|
||||
const isAnthropicUpload = metadata.endpoint === EModelEndpoint.anthropic;
|
||||
let source;
|
||||
if (isAssistantUpload) {
|
||||
source = assistantSource;
|
||||
} else if (isAnthropicUpload) {
|
||||
source = FileSources.local;
|
||||
} else {
|
||||
source = FileSources.vectordb;
|
||||
}
|
||||
const { handleFileUpload } = getStrategyFunctions(source);
|
||||
const { file_id, temp_file_id } = metadata;
|
||||
|
||||
|
|
|
|||
77
api/server/services/Files/validation/pdfValidator.js
Normal file
77
api/server/services/Files/validation/pdfValidator.js
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
const { logger } = require('~/config');
|
||||
const { anthropicPdfSizeLimit } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* Validates if a PDF meets Anthropic's requirements
|
||||
* @param {Buffer} pdfBuffer - The PDF file as a buffer
|
||||
* @param {number} fileSize - The file size in bytes
|
||||
* @returns {Promise<{isValid: boolean, error?: string}>}
|
||||
*/
|
||||
async function validateAnthropicPdf(pdfBuffer, fileSize) {
|
||||
try {
|
||||
// Check file size (32MB limit)
|
||||
if (fileSize > anthropicPdfSizeLimit) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `PDF file size (${Math.round(fileSize / (1024 * 1024))}MB) exceeds Anthropic's 32MB limit`,
|
||||
};
|
||||
}
|
||||
|
||||
// Basic PDF header validation
|
||||
if (!pdfBuffer || pdfBuffer.length < 5) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid PDF file: too small or corrupted',
|
||||
};
|
||||
}
|
||||
|
||||
// Check PDF magic bytes
|
||||
const pdfHeader = pdfBuffer.subarray(0, 5).toString();
|
||||
if (!pdfHeader.startsWith('%PDF-')) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid PDF file: missing PDF header',
|
||||
};
|
||||
}
|
||||
|
||||
// Check for password protection/encryption
|
||||
const pdfContent = pdfBuffer.toString('binary');
|
||||
if (
|
||||
pdfContent.includes('/Encrypt ') ||
|
||||
pdfContent.includes('/U (') ||
|
||||
pdfContent.includes('/O (')
|
||||
) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'PDF is password-protected or encrypted. Anthropic requires unencrypted PDFs.',
|
||||
};
|
||||
}
|
||||
|
||||
// Estimate page count (this is a rough estimation)
|
||||
const pageMatches = pdfContent.match(/\/Type[\s]*\/Page[^s]/g);
|
||||
const estimatedPages = pageMatches ? pageMatches.length : 1;
|
||||
|
||||
if (estimatedPages > 100) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `PDF has approximately ${estimatedPages} pages, exceeding Anthropic's 100-page limit`,
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`PDF validation passed: ${Math.round(fileSize / 1024)}KB, ~${estimatedPages} pages`,
|
||||
);
|
||||
|
||||
return { isValid: true };
|
||||
} catch (error) {
|
||||
logger.error('PDF validation error:', error);
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Failed to validate PDF file',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
validateAnthropicPdf,
|
||||
};
|
||||
Loading…
Add table
Add a link
Reference in a new issue