mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-23 10:46:12 +01:00
📁 feat: Send Attachments Directly to Provider (OpenAI) (#9098)
* refactor: change references from direct upload to direct attach to better reflect functionality since we are just using base64 encoding strategy now rather than Files/File API for sending our attachments directly to the provider, the upload nomenclature no longer makes sense. direct_attach better describes the different methods of sending attachments to providers anyways even if we later introduce direct upload support * feat: add upload to provider option for openai (and agent) ui * chore: move anthropic pdf validator over to packages/api * feat: simple pdf validation according to openai docs * feat: add provider agnostic validatePdf logic to start handling multiple endpoints * feat: add handling for openai specific documentPart formatting * refactor: move require statement to proper place at top of file * chore: add in openAI endpoint for the rest of the document handling logic * feat: add direct attach support for azureOpenAI endpoint and agents * feat: add pdf validation for azureOpenAI endpoint * refactor: unify all the endpoint checks with isDocumentSupportedEndpoint * refactor: consolidate Upload to Provider vs Upload image logic for clarity * refactor: remove anthropic from anthropic_multimodal fileType since we support multiple providers now
This commit is contained in:
parent
89843262b2
commit
b5aadf1302
10 changed files with 122 additions and 64 deletions
|
|
@ -33,7 +33,16 @@ const {
|
|||
AgentCapabilities,
|
||||
bedrockInputSchema,
|
||||
removeNullishValues,
|
||||
isDocumentSupportedEndpoint,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
findPluginAuthsByKeys,
|
||||
getFormattedMemories,
|
||||
deleteMemory,
|
||||
setMemory,
|
||||
} = require('~/models');
|
||||
const { getMCPAuthMap, checkCapability, hasCustomUserVars } = require('~/server/services/Config');
|
||||
const { encodeAndFormatDocuments } = require('~/server/services/Files/documents/encode');
|
||||
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
|
||||
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
|
||||
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
|
||||
|
|
@ -223,12 +232,11 @@ class AgentClient extends BaseClient {
|
|||
}
|
||||
|
||||
async addDocuments(message, attachments) {
|
||||
const documentResult =
|
||||
await require('~/server/services/Files/documents').encodeAndFormatDocuments(
|
||||
this.options.req,
|
||||
attachments,
|
||||
this.options.agent.provider,
|
||||
);
|
||||
const documentResult = await encodeAndFormatDocuments(
|
||||
this.options.req,
|
||||
attachments,
|
||||
this.options.agent.provider,
|
||||
);
|
||||
message.documents =
|
||||
documentResult.documents && documentResult.documents.length
|
||||
? documentResult.documents
|
||||
|
|
@ -318,7 +326,7 @@ class AgentClient extends BaseClient {
|
|||
message.documents &&
|
||||
message.documents.length > 0 &&
|
||||
message.isCreatedByUser &&
|
||||
this.options.agent.provider === EModelEndpoint.anthropic
|
||||
isDocumentSupportedEndpoint(this.options.agent.provider)
|
||||
) {
|
||||
const contentParts = [];
|
||||
contentParts.push(...message.documents);
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { EModelEndpoint, isDocumentSupportedEndpoint } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { validateAnthropicPdf } = require('../validation/pdfValidator');
|
||||
const { validatePdf } = require('@librechat/api');
|
||||
|
||||
/**
|
||||
* Converts a readable stream to a buffer.
|
||||
|
|
@ -71,7 +71,7 @@ async function encodeAndFormatDocuments(req, files, endpoint) {
|
|||
/** @type {FileSources} */
|
||||
const source = file.source ?? 'local';
|
||||
|
||||
if (file.type !== 'application/pdf' || endpoint !== EModelEndpoint.anthropic) {
|
||||
if (file.type !== 'application/pdf' || !isDocumentSupportedEndpoint(endpoint)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
@ -132,26 +132,35 @@ async function encodeAndFormatDocuments(req, files, endpoint) {
|
|||
continue;
|
||||
}
|
||||
|
||||
if (file.type === 'application/pdf' && endpoint === EModelEndpoint.anthropic) {
|
||||
if (file.type === 'application/pdf' && isDocumentSupportedEndpoint(endpoint)) {
|
||||
const pdfBuffer = Buffer.from(content, 'base64');
|
||||
const validation = await validateAnthropicPdf(pdfBuffer, pdfBuffer.length);
|
||||
const validation = await validatePdf(pdfBuffer, pdfBuffer.length, endpoint);
|
||||
|
||||
if (!validation.isValid) {
|
||||
throw new Error(`PDF validation failed: ${validation.error}`);
|
||||
}
|
||||
|
||||
const documentPart = {
|
||||
type: 'document',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: 'application/pdf',
|
||||
data: content,
|
||||
},
|
||||
cache_control: { type: 'ephemeral' },
|
||||
citations: { enabled: true },
|
||||
};
|
||||
if (endpoint === EModelEndpoint.anthropic) {
|
||||
const documentPart = {
|
||||
type: 'document',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: 'application/pdf',
|
||||
data: content,
|
||||
},
|
||||
cache_control: { type: 'ephemeral' },
|
||||
citations: { enabled: true },
|
||||
};
|
||||
result.documents.push(documentPart);
|
||||
} else if (endpoint === EModelEndpoint.openAI) {
|
||||
const documentPart = {
|
||||
type: 'input_file',
|
||||
filename: file.filename,
|
||||
file_data: `data:application/pdf;base64,${content}`,
|
||||
};
|
||||
result.documents.push(documentPart);
|
||||
}
|
||||
|
||||
result.documents.push(documentPart);
|
||||
result.files.push(metadata);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,72 +0,0 @@
|
|||
const { logger } = require('~/config');
|
||||
const { anthropicPdfSizeLimit } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* Validates if a PDF meets Anthropic's requirements
|
||||
* @param {Buffer} pdfBuffer - The PDF file as a buffer
|
||||
* @param {number} fileSize - The file size in bytes
|
||||
* @returns {Promise<{isValid: boolean, error?: string}>}
|
||||
*/
|
||||
async function validateAnthropicPdf(pdfBuffer, fileSize) {
|
||||
try {
|
||||
if (fileSize > anthropicPdfSizeLimit) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `PDF file size (${Math.round(fileSize / (1024 * 1024))}MB) exceeds Anthropic's 32MB limit`,
|
||||
};
|
||||
}
|
||||
|
||||
if (!pdfBuffer || pdfBuffer.length < 5) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid PDF file: too small or corrupted',
|
||||
};
|
||||
}
|
||||
|
||||
const pdfHeader = pdfBuffer.subarray(0, 5).toString();
|
||||
if (!pdfHeader.startsWith('%PDF-')) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid PDF file: missing PDF header',
|
||||
};
|
||||
}
|
||||
|
||||
const pdfContent = pdfBuffer.toString('binary');
|
||||
if (
|
||||
pdfContent.includes('/Encrypt ') ||
|
||||
pdfContent.includes('/U (') ||
|
||||
pdfContent.includes('/O (')
|
||||
) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'PDF is password-protected or encrypted. Anthropic requires unencrypted PDFs.',
|
||||
};
|
||||
}
|
||||
|
||||
const pageMatches = pdfContent.match(/\/Type[\s]*\/Page[^s]/g);
|
||||
const estimatedPages = pageMatches ? pageMatches.length : 1;
|
||||
|
||||
if (estimatedPages > 100) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `PDF has approximately ${estimatedPages} pages, exceeding Anthropic's 100-page limit`,
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`PDF validation passed: ${Math.round(fileSize / 1024)}KB, ~${estimatedPages} pages`,
|
||||
);
|
||||
|
||||
return { isValid: true };
|
||||
} catch (error) {
|
||||
logger.error('PDF validation error:', error);
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Failed to validate PDF file',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
validateAnthropicPdf,
|
||||
};
|
||||
Loading…
Add table
Add a link
Reference in a new issue