mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-02 16:48:50 +01:00
📑 feat: Anthropic Direct Provider Upload (#9072)
* feat: implement Anthropic native PDF support with document preservation - Add comprehensive debug logging throughout PDF processing pipeline - Refactor attachment processing to separate image and document handling - Create distinct addImageURLs(), addDocuments(), and processAttachments() methods - Fix critical bugs in stream handling and parameter passing - Add streamToBuffer utility for proper stream-to-buffer conversion - Remove api/agents submodule from repository 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com> * chore: remove out of scope formatting changes * fix: stop duplication of file in chat on end of response stream * chore: bring back file search and ocr options * chore: localize upload to provider string in file menu * refactor: change createMenuItems args to fit new pattern introduced by anthropic-native-pdf-support * feat: add cache point for pdfs processed by anthropic endpoint since they are unlikely to change and should benefit from caching * feat: combine Upload Image into Upload to Provider since they both perform direct upload and change provider upload icon to reflect multimodal upload * feat: add citations support according to docs * refactor: remove redundant 'document' check since documents are handled properly by formatMessage in the agents repo now * refactor: change upload logic so anthropic endpoint isn't exempted from normal upload path using Agents for consistency with the rest of the upload logic * fix: include width and height in return from uploadLocalFile so images are correctly identified when going through an AgentUpload in addImageURLs * chore: remove client specific handling since the direct provider stuff is handled by the agent client * feat: handle documents in AgentClient so no need for change to agents repo * chore: removed unused changes * chore: remove auto generated comments from OG commit * feat: add logic for agents to use direct to provider uploads if supported (currently just anthropic) * fix: reintroduce role check to fix render error because of undefined value for Content Part * fix: actually fix render bug by using proper isCreatedByUser check and making sure our mutation of formattedMessage.content is consistent --------- Co-authored-by: Andres Restrepo <andres@thelinuxkid.com> Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
parent
48f6f8f2f8
commit
89843262b2
14 changed files with 398 additions and 14 deletions
164
api/server/services/Files/documents/encode.js
Normal file
164
api/server/services/Files/documents/encode.js
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { validateAnthropicPdf } = require('../validation/pdfValidator');
|
||||
|
||||
/**
|
||||
* Converts a readable stream to a buffer.
|
||||
*
|
||||
* @param {NodeJS.ReadableStream} stream - The readable stream to convert.
|
||||
* @returns {Promise<Buffer>} - Promise resolving to the buffer.
|
||||
*/
|
||||
async function streamToBuffer(stream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks = [];
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
try {
|
||||
const buffer = Buffer.concat(chunks);
|
||||
chunks.length = 0;
|
||||
resolve(buffer);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('error', (error) => {
|
||||
chunks.length = 0;
|
||||
reject(error);
|
||||
});
|
||||
}).finally(() => {
|
||||
if (stream.destroy && typeof stream.destroy === 'function') {
|
||||
stream.destroy();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes and encodes document files for various endpoints
|
||||
*
|
||||
* @param {Express.Request} req - Express request object
|
||||
* @param {MongoFile[]} files - Array of file objects to process
|
||||
* @param {string} endpoint - The endpoint identifier (e.g., EModelEndpoint.anthropic)
|
||||
* @returns {Promise<{documents: MessageContentDocument[], files: MongoFile[]}>}
|
||||
*/
|
||||
async function encodeAndFormatDocuments(req, files, endpoint) {
|
||||
const promises = [];
|
||||
/** @type {Record<FileSources, Pick<ReturnType<typeof getStrategyFunctions>, 'prepareDocumentPayload' | 'getDownloadStream'>>} */
|
||||
const encodingMethods = {};
|
||||
/** @type {{ documents: MessageContentDocument[]; files: MongoFile[] }} */
|
||||
const result = {
|
||||
documents: [],
|
||||
files: [],
|
||||
};
|
||||
|
||||
if (!files || !files.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
const documentFiles = files.filter(
|
||||
(file) => file.type === 'application/pdf' || file.type?.startsWith('application/'), // Future: support for other document types
|
||||
);
|
||||
|
||||
if (!documentFiles.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
for (let file of documentFiles) {
|
||||
/** @type {FileSources} */
|
||||
const source = file.source ?? 'local';
|
||||
|
||||
if (file.type !== 'application/pdf' || endpoint !== EModelEndpoint.anthropic) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!encodingMethods[source]) {
|
||||
encodingMethods[source] = getStrategyFunctions(source);
|
||||
}
|
||||
|
||||
const fileMetadata = {
|
||||
file_id: file.file_id || file._id,
|
||||
temp_file_id: file.temp_file_id,
|
||||
filepath: file.filepath,
|
||||
source: file.source,
|
||||
filename: file.filename,
|
||||
type: file.type,
|
||||
};
|
||||
|
||||
promises.push([file, fileMetadata]);
|
||||
}
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
promises.map(async ([file, fileMetadata]) => {
|
||||
if (!file || !fileMetadata) {
|
||||
return { file: null, content: null, metadata: fileMetadata };
|
||||
}
|
||||
|
||||
try {
|
||||
const source = file.source ?? 'local';
|
||||
const { getDownloadStream } = encodingMethods[source];
|
||||
|
||||
const stream = await getDownloadStream(req, file.filepath);
|
||||
const buffer = await streamToBuffer(stream);
|
||||
const documentContent = buffer.toString('base64');
|
||||
|
||||
return {
|
||||
file,
|
||||
content: documentContent,
|
||||
metadata: fileMetadata,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error processing document ${file.filename}:`, error);
|
||||
return { file, content: null, metadata: fileMetadata };
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
for (const settledResult of results) {
|
||||
if (settledResult.status === 'rejected') {
|
||||
console.error('Document processing failed:', settledResult.reason);
|
||||
continue;
|
||||
}
|
||||
|
||||
const { file, content, metadata } = settledResult.value;
|
||||
|
||||
if (!content || !file) {
|
||||
if (metadata) {
|
||||
result.files.push(metadata);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (file.type === 'application/pdf' && endpoint === EModelEndpoint.anthropic) {
|
||||
const pdfBuffer = Buffer.from(content, 'base64');
|
||||
const validation = await validateAnthropicPdf(pdfBuffer, pdfBuffer.length);
|
||||
|
||||
if (!validation.isValid) {
|
||||
throw new Error(`PDF validation failed: ${validation.error}`);
|
||||
}
|
||||
|
||||
const documentPart = {
|
||||
type: 'document',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: 'application/pdf',
|
||||
data: content,
|
||||
},
|
||||
cache_control: { type: 'ephemeral' },
|
||||
citations: { enabled: true },
|
||||
};
|
||||
|
||||
result.documents.push(documentPart);
|
||||
result.files.push(metadata);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
encodeAndFormatDocuments,
|
||||
};
|
||||
Loading…
Add table
Add a link
Reference in a new issue