mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-02 08:38:51 +01:00
📑 feat: Anthropic Direct Provider Upload (#9072)
* feat: implement Anthropic native PDF support with document preservation - Add comprehensive debug logging throughout PDF processing pipeline - Refactor attachment processing to separate image and document handling - Create distinct addImageURLs(), addDocuments(), and processAttachments() methods - Fix critical bugs in stream handling and parameter passing - Add streamToBuffer utility for proper stream-to-buffer conversion - Remove api/agents submodule from repository 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com> * chore: remove out of scope formatting changes * fix: stop duplication of file in chat on end of response stream * chore: bring back file search and ocr options * chore: localize upload to provider string in file menu * refactor: change createMenuItems args to fit new pattern introduced by anthropic-native-pdf-support * feat: add cache point for pdfs processed by anthropic endpoint since they are unlikely to change and should benefit from caching * feat: combine Upload Image into Upload to Provider since they both perform direct upload and change provider upload icon to reflect multimodal upload * feat: add citations support according to docs * refactor: remove redundant 'document' check since documents are handled properly by formatMessage in the agents repo now * refactor: change upload logic so anthropic endpoint isn't exempted from normal upload path using Agents for consistency with the rest of the upload logic * fix: include width and height in return from uploadLocalFile so images are correctly identified when going through an AgentUpload in addImageURLs * chore: remove client specific handling since the direct provider stuff is handled by the agent client * feat: handle documents in AgentClient so no need for change to agents repo * chore: removed unused changes * chore: remove auto generated comments from OG commit * feat: add logic for agents to use direct to provider uploads if supported (currently just anthropic) * fix: reintroduce role check to fix render error because of undefined value for Content Part * fix: actually fix render bug by using proper isCreatedByUser check and making sure our mutation of formattedMessage.content is consistent --------- Co-authored-by: Andres Restrepo <andres@thelinuxkid.com> Co-authored-by: Claude <noreply@anthropic.com>
This commit is contained in:
parent
48f6f8f2f8
commit
89843262b2
14 changed files with 398 additions and 14 deletions
|
|
@ -222,6 +222,42 @@ class AgentClient extends BaseClient {
|
|||
return files;
|
||||
}
|
||||
|
||||
async addDocuments(message, attachments) {
|
||||
const documentResult =
|
||||
await require('~/server/services/Files/documents').encodeAndFormatDocuments(
|
||||
this.options.req,
|
||||
attachments,
|
||||
this.options.agent.provider,
|
||||
);
|
||||
message.documents =
|
||||
documentResult.documents && documentResult.documents.length
|
||||
? documentResult.documents
|
||||
: undefined;
|
||||
return documentResult.files;
|
||||
}
|
||||
|
||||
async processAttachments(message, attachments) {
|
||||
const [imageFiles, documentFiles] = await Promise.all([
|
||||
this.addImageURLs(message, attachments),
|
||||
this.addDocuments(message, attachments),
|
||||
]);
|
||||
|
||||
const allFiles = [...imageFiles, ...documentFiles];
|
||||
const seenFileIds = new Set();
|
||||
const uniqueFiles = [];
|
||||
|
||||
for (const file of allFiles) {
|
||||
if (file.file_id && !seenFileIds.has(file.file_id)) {
|
||||
seenFileIds.add(file.file_id);
|
||||
uniqueFiles.push(file);
|
||||
} else if (!file.file_id) {
|
||||
uniqueFiles.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
return uniqueFiles;
|
||||
}
|
||||
|
||||
async buildMessages(
|
||||
messages,
|
||||
parentMessageId,
|
||||
|
|
@ -255,7 +291,7 @@ class AgentClient extends BaseClient {
|
|||
};
|
||||
}
|
||||
|
||||
const files = await this.addImageURLs(
|
||||
const files = await this.processAttachments(
|
||||
orderedMessages[orderedMessages.length - 1],
|
||||
attachments,
|
||||
);
|
||||
|
|
@ -278,6 +314,27 @@ class AgentClient extends BaseClient {
|
|||
assistantName: this.options?.modelLabel,
|
||||
});
|
||||
|
||||
if (
|
||||
message.documents &&
|
||||
message.documents.length > 0 &&
|
||||
message.isCreatedByUser &&
|
||||
this.options.agent.provider === EModelEndpoint.anthropic
|
||||
) {
|
||||
const contentParts = [];
|
||||
contentParts.push(...message.documents);
|
||||
if (message.image_urls && message.image_urls.length > 0) {
|
||||
contentParts.push(...message.image_urls);
|
||||
}
|
||||
|
||||
if (typeof formattedMessage.content === 'string') {
|
||||
contentParts.push({ type: 'text', text: formattedMessage.content });
|
||||
} else {
|
||||
const textPart = formattedMessage.content.find((part) => part.type === 'text');
|
||||
contentParts.push(textPart);
|
||||
}
|
||||
formattedMessage.content = contentParts;
|
||||
}
|
||||
|
||||
if (message.ocr && i !== orderedMessages.length - 1) {
|
||||
if (typeof formattedMessage.content === 'string') {
|
||||
formattedMessage.content = message.ocr + '\n' + formattedMessage.content;
|
||||
|
|
@ -793,6 +850,7 @@ class AgentClient extends BaseClient {
|
|||
};
|
||||
|
||||
const toolSet = new Set((this.options.agent.tools ?? []).map((tool) => tool && tool.name));
|
||||
|
||||
let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages(
|
||||
payload,
|
||||
this.indexTokenCountMap,
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ const axios = require('axios');
|
|||
const { logger } = require('@librechat/data-schemas');
|
||||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { generateShortLivedToken } = require('@librechat/api');
|
||||
const { resizeImageBuffer } = require('~/server/services/Files/images/resize');
|
||||
const { getBufferMetadata } = require('~/server/utils');
|
||||
const paths = require('~/config/paths');
|
||||
|
||||
|
|
@ -286,7 +287,18 @@ async function uploadLocalFile({ req, file, file_id }) {
|
|||
await fs.promises.writeFile(newPath, inputBuffer);
|
||||
const filepath = path.posix.join('/', 'uploads', req.user.id, path.basename(newPath));
|
||||
|
||||
return { filepath, bytes };
|
||||
let height, width;
|
||||
if (file.mimetype && file.mimetype.startsWith('image/')) {
|
||||
try {
|
||||
const { width: imgWidth, height: imgHeight } = await resizeImageBuffer(inputBuffer, 'high');
|
||||
height = imgHeight;
|
||||
width = imgWidth;
|
||||
} catch (error) {
|
||||
logger.warn('[uploadLocalFile] Could not get image dimensions:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
return { filepath, bytes, height, width };
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
164
api/server/services/Files/documents/encode.js
Normal file
164
api/server/services/Files/documents/encode.js
Normal file
|
|
@ -0,0 +1,164 @@
|
|||
const { EModelEndpoint } = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { validateAnthropicPdf } = require('../validation/pdfValidator');
|
||||
|
||||
/**
|
||||
* Converts a readable stream to a buffer.
|
||||
*
|
||||
* @param {NodeJS.ReadableStream} stream - The readable stream to convert.
|
||||
* @returns {Promise<Buffer>} - Promise resolving to the buffer.
|
||||
*/
|
||||
async function streamToBuffer(stream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks = [];
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
try {
|
||||
const buffer = Buffer.concat(chunks);
|
||||
chunks.length = 0;
|
||||
resolve(buffer);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
stream.on('error', (error) => {
|
||||
chunks.length = 0;
|
||||
reject(error);
|
||||
});
|
||||
}).finally(() => {
|
||||
if (stream.destroy && typeof stream.destroy === 'function') {
|
||||
stream.destroy();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes and encodes document files for various endpoints
|
||||
*
|
||||
* @param {Express.Request} req - Express request object
|
||||
* @param {MongoFile[]} files - Array of file objects to process
|
||||
* @param {string} endpoint - The endpoint identifier (e.g., EModelEndpoint.anthropic)
|
||||
* @returns {Promise<{documents: MessageContentDocument[], files: MongoFile[]}>}
|
||||
*/
|
||||
async function encodeAndFormatDocuments(req, files, endpoint) {
|
||||
const promises = [];
|
||||
/** @type {Record<FileSources, Pick<ReturnType<typeof getStrategyFunctions>, 'prepareDocumentPayload' | 'getDownloadStream'>>} */
|
||||
const encodingMethods = {};
|
||||
/** @type {{ documents: MessageContentDocument[]; files: MongoFile[] }} */
|
||||
const result = {
|
||||
documents: [],
|
||||
files: [],
|
||||
};
|
||||
|
||||
if (!files || !files.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
const documentFiles = files.filter(
|
||||
(file) => file.type === 'application/pdf' || file.type?.startsWith('application/'), // Future: support for other document types
|
||||
);
|
||||
|
||||
if (!documentFiles.length) {
|
||||
return result;
|
||||
}
|
||||
|
||||
for (let file of documentFiles) {
|
||||
/** @type {FileSources} */
|
||||
const source = file.source ?? 'local';
|
||||
|
||||
if (file.type !== 'application/pdf' || endpoint !== EModelEndpoint.anthropic) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!encodingMethods[source]) {
|
||||
encodingMethods[source] = getStrategyFunctions(source);
|
||||
}
|
||||
|
||||
const fileMetadata = {
|
||||
file_id: file.file_id || file._id,
|
||||
temp_file_id: file.temp_file_id,
|
||||
filepath: file.filepath,
|
||||
source: file.source,
|
||||
filename: file.filename,
|
||||
type: file.type,
|
||||
};
|
||||
|
||||
promises.push([file, fileMetadata]);
|
||||
}
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
promises.map(async ([file, fileMetadata]) => {
|
||||
if (!file || !fileMetadata) {
|
||||
return { file: null, content: null, metadata: fileMetadata };
|
||||
}
|
||||
|
||||
try {
|
||||
const source = file.source ?? 'local';
|
||||
const { getDownloadStream } = encodingMethods[source];
|
||||
|
||||
const stream = await getDownloadStream(req, file.filepath);
|
||||
const buffer = await streamToBuffer(stream);
|
||||
const documentContent = buffer.toString('base64');
|
||||
|
||||
return {
|
||||
file,
|
||||
content: documentContent,
|
||||
metadata: fileMetadata,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error(`Error processing document ${file.filename}:`, error);
|
||||
return { file, content: null, metadata: fileMetadata };
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
for (const settledResult of results) {
|
||||
if (settledResult.status === 'rejected') {
|
||||
console.error('Document processing failed:', settledResult.reason);
|
||||
continue;
|
||||
}
|
||||
|
||||
const { file, content, metadata } = settledResult.value;
|
||||
|
||||
if (!content || !file) {
|
||||
if (metadata) {
|
||||
result.files.push(metadata);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (file.type === 'application/pdf' && endpoint === EModelEndpoint.anthropic) {
|
||||
const pdfBuffer = Buffer.from(content, 'base64');
|
||||
const validation = await validateAnthropicPdf(pdfBuffer, pdfBuffer.length);
|
||||
|
||||
if (!validation.isValid) {
|
||||
throw new Error(`PDF validation failed: ${validation.error}`);
|
||||
}
|
||||
|
||||
const documentPart = {
|
||||
type: 'document',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: 'application/pdf',
|
||||
data: content,
|
||||
},
|
||||
cache_control: { type: 'ephemeral' },
|
||||
citations: { enabled: true },
|
||||
};
|
||||
|
||||
result.documents.push(documentPart);
|
||||
result.files.push(metadata);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
encodeAndFormatDocuments,
|
||||
};
|
||||
5
api/server/services/Files/documents/index.js
Normal file
5
api/server/services/Files/documents/index.js
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
const { encodeAndFormatDocuments } = require('./encode');
|
||||
|
||||
module.exports = {
|
||||
encodeAndFormatDocuments,
|
||||
};
|
||||
|
|
@ -419,11 +419,11 @@ const processFileUpload = async ({ req, res, metadata }) => {
|
|||
const isAssistantUpload = isAssistantsEndpoint(metadata.endpoint);
|
||||
const assistantSource =
|
||||
metadata.endpoint === EModelEndpoint.azureAssistants ? FileSources.azure : FileSources.openai;
|
||||
|
||||
// Use the configured file strategy for regular file uploads (not vectordb)
|
||||
const source = isAssistantUpload ? assistantSource : appConfig.fileStrategy;
|
||||
const { handleFileUpload } = getStrategyFunctions(source);
|
||||
const { file_id, temp_file_id = null } = metadata;
|
||||
|
||||
/** @type {OpenAI | undefined} */
|
||||
let openai;
|
||||
if (checkOpenAIStorage(source)) {
|
||||
|
|
|
|||
72
api/server/services/Files/validation/pdfValidator.js
Normal file
72
api/server/services/Files/validation/pdfValidator.js
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
const { logger } = require('~/config');
|
||||
const { anthropicPdfSizeLimit } = require('librechat-data-provider');
|
||||
|
||||
/**
|
||||
* Validates if a PDF meets Anthropic's requirements
|
||||
* @param {Buffer} pdfBuffer - The PDF file as a buffer
|
||||
* @param {number} fileSize - The file size in bytes
|
||||
* @returns {Promise<{isValid: boolean, error?: string}>}
|
||||
*/
|
||||
async function validateAnthropicPdf(pdfBuffer, fileSize) {
|
||||
try {
|
||||
if (fileSize > anthropicPdfSizeLimit) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `PDF file size (${Math.round(fileSize / (1024 * 1024))}MB) exceeds Anthropic's 32MB limit`,
|
||||
};
|
||||
}
|
||||
|
||||
if (!pdfBuffer || pdfBuffer.length < 5) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid PDF file: too small or corrupted',
|
||||
};
|
||||
}
|
||||
|
||||
const pdfHeader = pdfBuffer.subarray(0, 5).toString();
|
||||
if (!pdfHeader.startsWith('%PDF-')) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Invalid PDF file: missing PDF header',
|
||||
};
|
||||
}
|
||||
|
||||
const pdfContent = pdfBuffer.toString('binary');
|
||||
if (
|
||||
pdfContent.includes('/Encrypt ') ||
|
||||
pdfContent.includes('/U (') ||
|
||||
pdfContent.includes('/O (')
|
||||
) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'PDF is password-protected or encrypted. Anthropic requires unencrypted PDFs.',
|
||||
};
|
||||
}
|
||||
|
||||
const pageMatches = pdfContent.match(/\/Type[\s]*\/Page[^s]/g);
|
||||
const estimatedPages = pageMatches ? pageMatches.length : 1;
|
||||
|
||||
if (estimatedPages > 100) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: `PDF has approximately ${estimatedPages} pages, exceeding Anthropic's 100-page limit`,
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`PDF validation passed: ${Math.round(fileSize / 1024)}KB, ~${estimatedPages} pages`,
|
||||
);
|
||||
|
||||
return { isValid: true };
|
||||
} catch (error) {
|
||||
logger.error('PDF validation error:', error);
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Failed to validate PDF file',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
validateAnthropicPdf,
|
||||
};
|
||||
|
|
@ -21,6 +21,7 @@ export type TAgentCapabilities = {
|
|||
[AgentCapabilities.execute_code]: boolean;
|
||||
[AgentCapabilities.end_after_tools]?: boolean;
|
||||
[AgentCapabilities.hide_sequential_outputs]?: boolean;
|
||||
[AgentCapabilities.direct_upload]?: boolean;
|
||||
};
|
||||
|
||||
export type AgentForm = {
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ function AttachFileChat({ disableInputs }: { disableInputs: boolean }) {
|
|||
disabled={disableInputs}
|
||||
conversationId={conversationId}
|
||||
endpointFileConfig={endpointFileConfig}
|
||||
endpoint={endpoint}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,13 @@
|
|||
import React, { useRef, useState, useMemo } from 'react';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { useSetRecoilState } from 'recoil';
|
||||
import { FileSearch, ImageUpIcon, TerminalSquareIcon, FileType2Icon } from 'lucide-react';
|
||||
import {
|
||||
FileSearch,
|
||||
ImageUpIcon,
|
||||
TerminalSquareIcon,
|
||||
FileType2Icon,
|
||||
FileImageIcon,
|
||||
} from 'lucide-react';
|
||||
import { EToolResources, EModelEndpoint, defaultAgentCapabilities } from 'librechat-data-provider';
|
||||
import {
|
||||
FileUpload,
|
||||
|
|
@ -14,8 +20,9 @@ import type { EndpointFileConfig } from 'librechat-data-provider';
|
|||
import { useLocalize, useGetAgentsConfig, useFileHandling, useAgentCapabilities } from '~/hooks';
|
||||
import useSharePointFileHandling from '~/hooks/Files/useSharePointFileHandling';
|
||||
import { SharePointPickerDialog } from '~/components/SharePoint';
|
||||
import { useGetStartupConfig } from '~/data-provider';
|
||||
import { useGetStartupConfig, useGetAgentByIdQuery } from '~/data-provider';
|
||||
import { ephemeralAgentByConvoId } from '~/store';
|
||||
import { useChatContext } from '~/Providers/ChatContext';
|
||||
import { MenuItemProps } from '~/common';
|
||||
import { cn } from '~/utils';
|
||||
|
||||
|
|
@ -23,9 +30,15 @@ interface AttachFileMenuProps {
|
|||
conversationId: string;
|
||||
disabled?: boolean | null;
|
||||
endpointFileConfig?: EndpointFileConfig;
|
||||
endpoint?: string | null;
|
||||
}
|
||||
|
||||
const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: AttachFileMenuProps) => {
|
||||
const AttachFileMenu = ({
|
||||
disabled,
|
||||
conversationId,
|
||||
endpointFileConfig,
|
||||
endpoint,
|
||||
}: AttachFileMenuProps) => {
|
||||
const localize = useLocalize();
|
||||
const isUploadDisabled = disabled ?? false;
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
|
|
@ -46,34 +59,68 @@ const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: Attach
|
|||
|
||||
const [isSharePointDialogOpen, setIsSharePointDialogOpen] = useState(false);
|
||||
const { agentsConfig } = useGetAgentsConfig();
|
||||
const { conversation } = useChatContext();
|
||||
|
||||
// Get agent details if using an agent
|
||||
const { data: agent } = useGetAgentByIdQuery(conversation?.agent_id ?? '', {
|
||||
enabled: !!conversation?.agent_id && conversation?.agent_id !== 'ephemeral',
|
||||
});
|
||||
|
||||
/** TODO: Ephemeral Agent Capabilities
|
||||
* Allow defining agent capabilities on a per-endpoint basis
|
||||
* Use definition for agents endpoint for ephemeral agents
|
||||
* */
|
||||
const capabilities = useAgentCapabilities(agentsConfig?.capabilities ?? defaultAgentCapabilities);
|
||||
|
||||
const handleUploadClick = (isImage?: boolean) => {
|
||||
const handleUploadClick = (fileType?: 'image' | 'document' | 'anthropic_multimodal') => {
|
||||
if (!inputRef.current) {
|
||||
return;
|
||||
}
|
||||
inputRef.current.value = '';
|
||||
inputRef.current.accept = isImage === true ? 'image/*' : '';
|
||||
if (fileType === 'image') {
|
||||
inputRef.current.accept = 'image/*';
|
||||
} else if (fileType === 'document') {
|
||||
inputRef.current.accept = '.pdf,application/pdf';
|
||||
} else if (fileType === 'anthropic_multimodal') {
|
||||
inputRef.current.accept = 'image/*,.pdf,application/pdf';
|
||||
} else {
|
||||
inputRef.current.accept = '';
|
||||
}
|
||||
inputRef.current.click();
|
||||
inputRef.current.accept = '';
|
||||
};
|
||||
|
||||
const dropdownItems = useMemo(() => {
|
||||
const createMenuItems = (onAction: (isImage?: boolean) => void) => {
|
||||
const items: MenuItemProps[] = [
|
||||
{
|
||||
const createMenuItems = (
|
||||
onAction: (fileType?: 'image' | 'document' | 'anthropic_multimodal') => void,
|
||||
) => {
|
||||
const items: MenuItemProps[] = [];
|
||||
|
||||
// this is temporary until i add direct upload support for the other providers and can make a more robust solution
|
||||
const isAnthropicAgent = agent?.provider === 'anthropic';
|
||||
const shouldShowDirectUpload = endpoint === EModelEndpoint.anthropic || isAnthropicAgent;
|
||||
|
||||
if (!shouldShowDirectUpload) {
|
||||
items.push({
|
||||
label: localize('com_ui_upload_image_input'),
|
||||
onClick: () => {
|
||||
setToolResource(undefined);
|
||||
onAction(true);
|
||||
onAction('image');
|
||||
},
|
||||
icon: <ImageUpIcon className="icon-md" />,
|
||||
},
|
||||
];
|
||||
});
|
||||
}
|
||||
|
||||
if (shouldShowDirectUpload) {
|
||||
items.push({
|
||||
label: localize('com_ui_upload_provider'),
|
||||
onClick: () => {
|
||||
setToolResource(EToolResources.direct_upload);
|
||||
onAction('anthropic_multimodal');
|
||||
},
|
||||
icon: <FileImageIcon className="icon-md" />,
|
||||
});
|
||||
}
|
||||
|
||||
if (capabilities.ocrEnabled) {
|
||||
items.push({
|
||||
|
|
@ -139,6 +186,7 @@ const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: Attach
|
|||
setEphemeralAgent,
|
||||
sharePointEnabled,
|
||||
setIsSharePointDialogOpen,
|
||||
endpoint,
|
||||
]);
|
||||
|
||||
const menuTrigger = (
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ interface AgentCapabilitiesResult {
|
|||
fileSearchEnabled: boolean;
|
||||
webSearchEnabled: boolean;
|
||||
codeEnabled: boolean;
|
||||
directUploadEnabled: boolean;
|
||||
}
|
||||
|
||||
export default function useAgentCapabilities(
|
||||
|
|
@ -49,6 +50,11 @@ export default function useAgentCapabilities(
|
|||
[capabilities],
|
||||
);
|
||||
|
||||
const directUploadEnabled = useMemo(
|
||||
() => capabilities?.includes(AgentCapabilities.direct_upload) ?? false,
|
||||
[capabilities],
|
||||
);
|
||||
|
||||
return {
|
||||
ocrEnabled,
|
||||
codeEnabled,
|
||||
|
|
@ -57,5 +63,6 @@ export default function useAgentCapabilities(
|
|||
artifactsEnabled,
|
||||
webSearchEnabled,
|
||||
fileSearchEnabled,
|
||||
directUploadEnabled,
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1218,6 +1218,7 @@
|
|||
"com_ui_upload_invalid": "Invalid file for upload. Must be an image not exceeding the limit",
|
||||
"com_ui_upload_invalid_var": "Invalid file for upload. Must be an image not exceeding {{0}} MB",
|
||||
"com_ui_upload_ocr_text": "Upload as Text",
|
||||
"com_ui_upload_provider": "Upload to Provider",
|
||||
"com_ui_upload_success": "Successfully uploaded file",
|
||||
"com_ui_upload_type": "Select Upload Type",
|
||||
"com_ui_usage": "Usage",
|
||||
|
|
|
|||
|
|
@ -175,6 +175,7 @@ export enum Capabilities {
|
|||
export enum AgentCapabilities {
|
||||
hide_sequential_outputs = 'hide_sequential_outputs',
|
||||
end_after_tools = 'end_after_tools',
|
||||
direct_upload = 'direct_upload',
|
||||
execute_code = 'execute_code',
|
||||
file_search = 'file_search',
|
||||
web_search = 'web_search',
|
||||
|
|
@ -256,6 +257,7 @@ export const defaultAgentCapabilities = [
|
|||
AgentCapabilities.tools,
|
||||
AgentCapabilities.chain,
|
||||
AgentCapabilities.ocr,
|
||||
AgentCapabilities.direct_upload,
|
||||
];
|
||||
|
||||
export const agentsEndpointSchema = baseEndpointSchema
|
||||
|
|
|
|||
|
|
@ -186,6 +186,10 @@ export const mbToBytes = (mb: number): number => mb * megabyte;
|
|||
|
||||
const defaultSizeLimit = mbToBytes(512);
|
||||
const defaultTokenLimit = 100000;
|
||||
|
||||
// Anthropic PDF limits: 32MB max, 100 pages max
|
||||
export const anthropicPdfSizeLimit = mbToBytes(32);
|
||||
|
||||
const assistantsFileConfig = {
|
||||
fileLimit: 10,
|
||||
fileSizeLimit: defaultSizeLimit,
|
||||
|
|
@ -199,6 +203,14 @@ export const fileConfig = {
|
|||
[EModelEndpoint.assistants]: assistantsFileConfig,
|
||||
[EModelEndpoint.azureAssistants]: assistantsFileConfig,
|
||||
[EModelEndpoint.agents]: assistantsFileConfig,
|
||||
[EModelEndpoint.anthropic]: {
|
||||
fileLimit: 10,
|
||||
fileSizeLimit: defaultSizeLimit,
|
||||
totalSizeLimit: defaultSizeLimit,
|
||||
supportedMimeTypes,
|
||||
disabled: false,
|
||||
pdfSizeLimit: anthropicPdfSizeLimit,
|
||||
},
|
||||
default: {
|
||||
fileLimit: 10,
|
||||
fileSizeLimit: defaultSizeLimit,
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ export enum Tools {
|
|||
|
||||
export enum EToolResources {
|
||||
code_interpreter = 'code_interpreter',
|
||||
direct_upload = 'direct_upload',
|
||||
execute_code = 'execute_code',
|
||||
file_search = 'file_search',
|
||||
image_edit = 'image_edit',
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue