diff --git a/api/app/clients/prompts/createContextHandlers.js b/api/app/clients/prompts/createContextHandlers.js index b3ea9164e..87c48bcf9 100644 --- a/api/app/clients/prompts/createContextHandlers.js +++ b/api/app/clients/prompts/createContextHandlers.js @@ -1,7 +1,6 @@ const axios = require('axios'); -const { isEnabled } = require('@librechat/api'); const { logger } = require('@librechat/data-schemas'); -const { generateShortLivedToken } = require('~/server/services/AuthService'); +const { isEnabled, generateShortLivedToken } = require('@librechat/api'); const footer = `Use the context as your learned knowledge to better answer the user. diff --git a/api/app/clients/tools/util/fileSearch.js b/api/app/clients/tools/util/fileSearch.js index c7e7c8833..00fa46d43 100644 --- a/api/app/clients/tools/util/fileSearch.js +++ b/api/app/clients/tools/util/fileSearch.js @@ -2,9 +2,9 @@ const { z } = require('zod'); const axios = require('axios'); const { tool } = require('@langchain/core/tools'); const { logger } = require('@librechat/data-schemas'); +const { generateShortLivedToken } = require('@librechat/api'); const { Tools, EToolResources } = require('librechat-data-provider'); const { filterFilesByAgentAccess } = require('~/server/services/Files/permissions'); -const { generateShortLivedToken } = require('~/server/services/AuthService'); const { getFiles } = require('~/models/File'); /** diff --git a/api/server/routes/files/files.js b/api/server/routes/files/files.js index 611abf9ba..af55eb6d6 100644 --- a/api/server/routes/files/files.js +++ b/api/server/routes/files/files.js @@ -387,7 +387,8 @@ router.post('/', async (req, res) => { if ( error.message?.includes('Invalid file format') || - error.message?.includes('No OCR result') + error.message?.includes('No OCR result') || + error.message?.includes('exceeds token limit') ) { message = error.message; } diff --git a/api/server/routes/files/images.js b/api/server/routes/files/images.js index a6a5369bc..b8be413f4 100644 --- a/api/server/routes/files/images.js +++ b/api/server/routes/files/images.js @@ -29,6 +29,17 @@ router.post('/', async (req, res) => { } catch (error) { // TODO: delete remote file if it exists logger.error('[/files/images] Error processing file:', error); + + let message = 'Error processing file'; + + if ( + error.message?.includes('Invalid file format') || + error.message?.includes('No OCR result') || + error.message?.includes('exceeds token limit') + ) { + message = error.message; + } + try { const filepath = path.join( appConfig.paths.imageOutput, @@ -39,7 +50,7 @@ router.post('/', async (req, res) => { } catch (error) { logger.error('[/files/images] Error deleting file:', error); } - res.status(500).json({ message: 'Error processing file' }); + res.status(500).json({ message }); } finally { try { await fs.unlink(req.file.path); diff --git a/api/server/services/AuthService.js b/api/server/services/AuthService.js index 5f9474847..8546b221b 100644 --- a/api/server/services/AuthService.js +++ b/api/server/services/AuthService.js @@ -500,18 +500,6 @@ const resendVerificationEmail = async (req) => { }; } }; -/** - * Generate a short-lived JWT token - * @param {String} userId - The ID of the user - * @param {String} [expireIn='5m'] - The expiration time for the token (default is 5 minutes) - * @returns {String} - The generated JWT token - */ -const generateShortLivedToken = (userId, expireIn = '5m') => { - return jwt.sign({ id: userId }, process.env.JWT_SECRET, { - expiresIn: expireIn, - algorithm: 'HS256', - }); -}; module.exports = { logoutUser, @@ -522,5 +510,4 @@ module.exports = { setOpenIDAuthTokens, requestPasswordReset, resendVerificationEmail, - generateShortLivedToken, }; diff --git a/api/server/services/Endpoints/anthropic/build.js b/api/server/services/Endpoints/anthropic/build.js index 2deab4b97..1d2c29d81 100644 --- a/api/server/services/Endpoints/anthropic/build.js +++ b/api/server/services/Endpoints/anthropic/build.js @@ -6,6 +6,7 @@ const buildOptions = (endpoint, parsedBody) => { modelLabel, promptPrefix, maxContextTokens, + fileTokenLimit, resendFiles = anthropicSettings.resendFiles.default, promptCache = anthropicSettings.promptCache.default, thinking = anthropicSettings.thinking.default, @@ -29,6 +30,7 @@ const buildOptions = (endpoint, parsedBody) => { greeting, spec, maxContextTokens, + fileTokenLimit, modelOptions, }); diff --git a/api/server/services/Endpoints/bedrock/build.js b/api/server/services/Endpoints/bedrock/build.js index f5228160f..b9f281bd9 100644 --- a/api/server/services/Endpoints/bedrock/build.js +++ b/api/server/services/Endpoints/bedrock/build.js @@ -6,6 +6,7 @@ const buildOptions = (endpoint, parsedBody) => { modelLabel: name, promptPrefix, maxContextTokens, + fileTokenLimit, resendFiles = true, imageDetail, iconURL, @@ -24,6 +25,7 @@ const buildOptions = (endpoint, parsedBody) => { spec, promptPrefix, maxContextTokens, + fileTokenLimit, model_parameters, }); diff --git a/api/server/services/Endpoints/custom/build.js b/api/server/services/Endpoints/custom/build.js index add78470f..b1839ee03 100644 --- a/api/server/services/Endpoints/custom/build.js +++ b/api/server/services/Endpoints/custom/build.js @@ -7,6 +7,7 @@ const buildOptions = (endpoint, parsedBody, endpointType) => { chatGptLabel, promptPrefix, maxContextTokens, + fileTokenLimit, resendFiles = true, imageDetail, iconURL, @@ -27,6 +28,7 @@ const buildOptions = (endpoint, parsedBody, endpointType) => { greeting, spec, maxContextTokens, + fileTokenLimit, modelOptions, }); diff --git a/api/server/services/Endpoints/google/build.js b/api/server/services/Endpoints/google/build.js index 11b048694..3ac6b167c 100644 --- a/api/server/services/Endpoints/google/build.js +++ b/api/server/services/Endpoints/google/build.js @@ -12,6 +12,7 @@ const buildOptions = (endpoint, parsedBody) => { spec, artifacts, maxContextTokens, + fileTokenLimit, ...modelOptions } = parsedBody; const endpointOption = removeNullishValues({ @@ -24,6 +25,7 @@ const buildOptions = (endpoint, parsedBody) => { greeting, spec, maxContextTokens, + fileTokenLimit, modelOptions, }); diff --git a/api/server/services/Endpoints/openAI/build.js b/api/server/services/Endpoints/openAI/build.js index ff9cc484e..611546a54 100644 --- a/api/server/services/Endpoints/openAI/build.js +++ b/api/server/services/Endpoints/openAI/build.js @@ -7,6 +7,7 @@ const buildOptions = (endpoint, parsedBody) => { chatGptLabel, promptPrefix, maxContextTokens, + fileTokenLimit, resendFiles = true, imageDetail, iconURL, @@ -27,6 +28,7 @@ const buildOptions = (endpoint, parsedBody) => { greeting, spec, maxContextTokens, + fileTokenLimit, modelOptions, }); diff --git a/api/server/services/Files/Audio/STTService.js b/api/server/services/Files/Audio/STTService.js index 2b6fa1a39..9255ba30c 100644 --- a/api/server/services/Files/Audio/STTService.js +++ b/api/server/services/Files/Audio/STTService.js @@ -319,4 +319,4 @@ async function speechToText(req, res) { await sttService.processSpeechToText(req, res); } -module.exports = { speechToText }; +module.exports = { STTService, speechToText }; diff --git a/api/server/services/Files/Local/crud.js b/api/server/services/Files/Local/crud.js index f6c9ddcf3..16e75ba20 100644 --- a/api/server/services/Files/Local/crud.js +++ b/api/server/services/Files/Local/crud.js @@ -3,7 +3,7 @@ const path = require('path'); const axios = require('axios'); const { logger } = require('@librechat/data-schemas'); const { EModelEndpoint } = require('librechat-data-provider'); -const { generateShortLivedToken } = require('~/server/services/AuthService'); +const { generateShortLivedToken } = require('@librechat/api'); const { getBufferMetadata } = require('~/server/utils'); const paths = require('~/config/paths'); diff --git a/api/server/services/Files/VectorDB/crud.js b/api/server/services/Files/VectorDB/crud.js index 5e00e71b5..4eea1169c 100644 --- a/api/server/services/Files/VectorDB/crud.js +++ b/api/server/services/Files/VectorDB/crud.js @@ -1,10 +1,9 @@ const fs = require('fs'); const axios = require('axios'); const FormData = require('form-data'); -const { logAxiosError } = require('@librechat/api'); const { logger } = require('@librechat/data-schemas'); const { FileSources } = require('librechat-data-provider'); -const { generateShortLivedToken } = require('~/server/services/AuthService'); +const { logAxiosError, generateShortLivedToken } = require('@librechat/api'); /** * Deletes a file from the vector database. This function takes a file object, constructs the full path, and diff --git a/api/server/services/Files/images/encode.js b/api/server/services/Files/images/encode.js index b7f6325d3..34128e315 100644 --- a/api/server/services/Files/images/encode.js +++ b/api/server/services/Files/images/encode.js @@ -1,13 +1,16 @@ const axios = require('axios'); -const { logAxiosError } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); +const { logAxiosError, processTextWithTokenLimit } = require('@librechat/api'); const { FileSources, VisionModes, ImageDetail, ContentTypes, EModelEndpoint, + mergeFileConfig, } = require('librechat-data-provider'); const { getStrategyFunctions } = require('~/server/services/Files/strategies'); +const countTokens = require('~/server/utils/countTokens'); /** * Converts a readable stream to a base64 encoded string. @@ -102,11 +105,28 @@ async function encodeAndFormat(req, files, endpoint, mode) { return result; } + const fileTokenLimit = + req.body?.fileTokenLimit ?? mergeFileConfig(req.config?.fileConfig).fileTokenLimit; + for (let file of files) { /** @type {FileSources} */ const source = file.source ?? FileSources.local; if (source === FileSources.text && file.text) { - result.text += `${!result.text ? 'Attached document(s):\n```md' : '\n\n---\n\n'}# "${file.filename}"\n${file.text}\n`; + let fileText = file.text; + + const { text: limitedText, wasTruncated } = await processTextWithTokenLimit({ + text: fileText, + tokenLimit: fileTokenLimit, + tokenCountFn: (text) => countTokens(text), + }); + + if (wasTruncated) { + logger.debug( + `[encodeAndFormat] Text content truncated for file: ${file.filename} due to token limits`, + ); + } + + result.text += `${!result.text ? 'Attached document(s):\n```md' : '\n\n---\n\n'}# "${file.filename}"\n${limitedText}\n`; } if (!file.height) { @@ -135,7 +155,7 @@ async function encodeAndFormat(req, files, endpoint, mode) { base64Data = null; continue; } catch (error) { - // Error handling code + logger.error('Error processing image from blob storage:', error); } } else if (source !== FileSources.local && base64Only.has(endpoint)) { const [_file, imageURL] = await preparePayload(req, file); diff --git a/api/server/services/Files/process.js b/api/server/services/Files/process.js index 73fffe6ac..d7864077e 100644 --- a/api/server/services/Files/process.js +++ b/api/server/services/Files/process.js @@ -17,7 +17,8 @@ const { isAssistantsEndpoint, } = require('librechat-data-provider'); const { EnvVar } = require('@librechat/agents'); -const { sanitizeFilename } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); +const { sanitizeFilename, parseText, processAudioFile } = require('@librechat/api'); const { convertImage, resizeAndConvert, @@ -33,7 +34,7 @@ const { checkCapability } = require('~/server/services/Config'); const { LB_QueueAsyncCall } = require('~/server/utils/queue'); const { getStrategyFunctions } = require('./strategies'); const { determineFileType } = require('~/server/utils'); -const { logger } = require('~/config'); +const { STTService } = require('./Audio/STTService'); /** * Creates a modular file upload wrapper that ensures filename sanitization @@ -552,51 +553,84 @@ const processAgentFileUpload = async ({ req, res, metadata }) => { } // Note: File search processing continues to dual storage logic below } else if (tool_resource === EToolResources.ocr) { - const isOCREnabled = await checkCapability(req, AgentCapabilities.ocr); - if (!isOCREnabled) { - throw new Error('OCR capability is not enabled for Agents'); - } - - const { handleFileUpload: uploadOCR } = getStrategyFunctions( - appConfig?.ocr?.strategy ?? FileSources.mistral_ocr, - ); const { file_id, temp_file_id = null } = metadata; - const { - text, - bytes, - // TODO: OCR images support? - images: _i, - filename, - filepath: ocrFileURL, - } = await uploadOCR({ req, appConfig, file, loadAuthValues }); - - const fileInfo = removeNullishValues({ - text, - bytes, - file_id, - temp_file_id, - user: req.user.id, - type: 'text/plain', - filepath: ocrFileURL, - source: FileSources.text, - filename: filename ?? file.originalname, - model: messageAttachment ? undefined : req.body.model, - context: messageAttachment ? FileContext.message_attachment : FileContext.agents, - }); - - if (!messageAttachment && tool_resource) { - await addAgentResourceFile({ - req, + /** + * @param {object} params + * @param {string} params.text + * @param {number} params.bytes + * @param {string} params.filepath + * @param {string} params.type + * @return {Promise} + */ + const createTextFile = async ({ text, bytes, filepath, type = 'text/plain' }) => { + const fileInfo = removeNullishValues({ + text, + bytes, file_id, - agent_id, - tool_resource, + temp_file_id, + user: req.user.id, + type, + filepath: filepath ?? file.path, + source: FileSources.text, + filename: file.originalname, + model: messageAttachment ? undefined : req.body.model, + context: messageAttachment ? FileContext.message_attachment : FileContext.agents, }); + + if (!messageAttachment && tool_resource) { + await addAgentResourceFile({ + req, + file_id, + agent_id, + tool_resource, + }); + } + const result = await createFile(fileInfo, true); + return res + .status(200) + .json({ message: 'Agent file uploaded and processed successfully', ...result }); + }; + + const fileConfig = mergeFileConfig(appConfig.fileConfig); + + const shouldUseOCR = fileConfig.checkType( + file.mimetype, + fileConfig.ocr?.supportedMimeTypes || [], + ); + + if (shouldUseOCR && !(await checkCapability(req, AgentCapabilities.ocr))) { + throw new Error('OCR capability is not enabled for Agents'); + } else if (shouldUseOCR) { + const { handleFileUpload: uploadOCR } = getStrategyFunctions( + appConfig?.ocr?.strategy ?? FileSources.mistral_ocr, + ); + const { text, bytes, filepath: ocrFileURL } = await uploadOCR({ req, file, loadAuthValues }); + return await createTextFile({ text, bytes, filepath: ocrFileURL }); } - const result = await createFile(fileInfo, true); - return res - .status(200) - .json({ message: 'Agent file uploaded and processed successfully', ...result }); + + const shouldUseSTT = fileConfig.checkType( + file.mimetype, + fileConfig.stt?.supportedMimeTypes || [], + ); + + if (shouldUseSTT) { + const sttService = await STTService.getInstance(); + const { text, bytes } = await processAudioFile({ file, sttService }); + return await createTextFile({ text, bytes }); + } + + const shouldUseText = fileConfig.checkType( + file.mimetype, + fileConfig.text?.supportedMimeTypes || [], + ); + + if (!shouldUseText) { + throw new Error(`File type ${file.mimetype} is not supported for OCR or text parsing`); + } + + const { text, bytes } = await parseText({ req, file, file_id }); + return await createTextFile({ text, bytes, type: file.mimetype }); } // Dual storage pattern for RAG files: Storage + Vector DB diff --git a/api/server/services/Files/processFiles.test.js b/api/server/services/Files/processFiles.test.js index d389ca17a..8417f639e 100644 --- a/api/server/services/Files/processFiles.test.js +++ b/api/server/services/Files/processFiles.test.js @@ -101,6 +101,11 @@ jest.mock('~/server/utils', () => ({ determineFileType: jest.fn(), })); +jest.mock('@librechat/api', () => ({ + parseText: jest.fn(), + parseTextNative: jest.fn(), +})); + // Import the actual processFiles function after all mocks are set up const { processFiles } = require('./process'); const { updateFileUsage } = require('~/models/File'); diff --git a/client/src/hooks/Files/useFileHandling.ts b/client/src/hooks/Files/useFileHandling.ts index 8c8f33e57..d90c310a9 100644 --- a/client/src/hooks/Files/useFileHandling.ts +++ b/client/src/hooks/Files/useFileHandling.ts @@ -135,10 +135,14 @@ const useFileHandling = (params?: UseFileHandling) => { const file_id = body.get('file_id'); clearUploadTimer(file_id as string); deleteFileById(file_id as string); - const errorMessage = - error?.code === 'ERR_CANCELED' - ? 'com_error_files_upload_canceled' - : (error?.response?.data?.message ?? 'com_error_files_upload'); + + let errorMessage = 'com_error_files_upload'; + + if (error?.code === 'ERR_CANCELED') { + errorMessage = 'com_error_files_upload_canceled'; + } else if (error?.response?.data?.message) { + errorMessage = error.response.data.message; + } setError(errorMessage); }, }, @@ -256,6 +260,8 @@ const useFileHandling = (params?: UseFileHandling) => { fileConfig?.endpoints?.default ?? defaultFileConfig.endpoints[endpoint] ?? defaultFileConfig.endpoints.default, + toolResource: _toolResource, + fileConfig: fileConfig, }); } catch (error) { console.error('file validation error', error); diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index 6f8a13bfc..c725ac9e6 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -889,6 +889,8 @@ "com_ui_field_required": "This field is required", "com_ui_file_size": "File Size", "com_ui_files": "Files", + "com_ui_file_token_limit": "File Token Limit", + "com_ui_file_token_limit_desc": "Set maximum token limit for file processing to control costs and resource usage", "com_ui_filter_prompts": "Filter Prompts", "com_ui_filter_prompts_name": "Filter prompts by name", "com_ui_final_touch": "Final touch", diff --git a/client/src/utils/files.ts b/client/src/utils/files.ts index ab822bd50..496c6a1c0 100644 --- a/client/src/utils/files.ts +++ b/client/src/utils/files.ts @@ -3,10 +3,11 @@ import { megabyte, QueryKeys, excelMimeTypes, + EToolResources, codeTypeMapping, fileConfig as defaultFileConfig, } from 'librechat-data-provider'; -import type { TFile, EndpointFileConfig } from 'librechat-data-provider'; +import type { TFile, EndpointFileConfig, FileConfig } from 'librechat-data-provider'; import type { QueryClient } from '@tanstack/react-query'; import type { ExtendedFile } from '~/common'; @@ -203,11 +204,15 @@ export const validateFiles = ({ fileList, setError, endpointFileConfig, + toolResource, + fileConfig, }: { fileList: File[]; files: Map; setError: (error: string) => void; endpointFileConfig: EndpointFileConfig; + toolResource?: string; + fileConfig: FileConfig | null; }) => { const { fileLimit, fileSizeLimit, totalSizeLimit, supportedMimeTypes } = endpointFileConfig; const existingFiles = Array.from(files.values()); @@ -247,7 +252,16 @@ export const validateFiles = ({ fileList[i] = newFile; } - if (!checkType(originalFile.type, supportedMimeTypes)) { + let mimeTypesToCheck = supportedMimeTypes; + if (toolResource === EToolResources.ocr) { + mimeTypesToCheck = [ + ...(fileConfig?.text?.supportedMimeTypes || []), + ...(fileConfig?.ocr?.supportedMimeTypes || []), + ...(fileConfig?.stt?.supportedMimeTypes || []), + ]; + } + + if (!checkType(originalFile.type, mimeTypesToCheck)) { console.log(originalFile); setError('Currently, unsupported file type: ' + originalFile.type); return false; diff --git a/package-lock.json b/package-lock.json index c3a250f85..a4b6510d2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -51813,6 +51813,7 @@ "@types/diff": "^6.0.0", "@types/express": "^5.0.0", "@types/jest": "^29.5.2", + "@types/jsonwebtoken": "^9.0.0", "@types/multer": "^1.4.13", "@types/node": "^20.3.0", "@types/react": "^18.2.18", @@ -51837,7 +51838,9 @@ "diff": "^7.0.0", "eventsource": "^3.0.2", "express": "^4.21.2", + "form-data": "^4.0.4", "js-yaml": "^4.1.0", + "jsonwebtoken": "^9.0.0", "keyv": "^5.3.2", "librechat-data-provider": "*", "node-fetch": "2.7.0", diff --git a/packages/api/package.json b/packages/api/package.json index defd1840c..af055ef61 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -53,6 +53,7 @@ "@types/diff": "^6.0.0", "@types/express": "^5.0.0", "@types/jest": "^29.5.2", + "@types/jsonwebtoken": "^9.0.0", "@types/multer": "^1.4.13", "@types/node": "^20.3.0", "@types/react": "^18.2.18", @@ -80,7 +81,9 @@ "diff": "^7.0.0", "eventsource": "^3.0.2", "express": "^4.21.2", + "form-data": "^4.0.4", "js-yaml": "^4.1.0", + "jsonwebtoken": "^9.0.0", "keyv": "^5.3.2", "librechat-data-provider": "*", "node-fetch": "2.7.0", diff --git a/packages/api/src/crypto/index.ts b/packages/api/src/crypto/index.ts index 73ebae8b1..0821d6d8b 100644 --- a/packages/api/src/crypto/index.ts +++ b/packages/api/src/crypto/index.ts @@ -1 +1,2 @@ export * from './encryption'; +export * from './jwt'; diff --git a/packages/api/src/crypto/jwt.ts b/packages/api/src/crypto/jwt.ts new file mode 100644 index 000000000..380fc613a --- /dev/null +++ b/packages/api/src/crypto/jwt.ts @@ -0,0 +1,14 @@ +import jwt from 'jsonwebtoken'; + +/** + * Generate a short-lived JWT token + * @param {String} userId - The ID of the user + * @param {String} [expireIn='5m'] - The expiration time for the token (default is 5 minutes) + * @returns {String} - The generated JWT token + */ +export const generateShortLivedToken = (userId: string, expireIn: string = '5m'): string => { + return jwt.sign({ id: userId }, process.env.JWT_SECRET!, { + expiresIn: expireIn, + algorithm: 'HS256', + }); +}; diff --git a/packages/api/src/files/audio.ts b/packages/api/src/files/audio.ts new file mode 100644 index 000000000..27e25260c --- /dev/null +++ b/packages/api/src/files/audio.ts @@ -0,0 +1,38 @@ +import fs from 'fs'; +import { logger } from '@librechat/data-schemas'; +import type { STTService, AudioFileInfo, FileObject, AudioProcessingResult } from '~/types'; + +/** + * Processes audio files using Speech-to-Text (STT) service. + * @param {Object} params - The parameters object. + * @param {FileObject} params.file - The audio file object. + * @param {STTService} params.sttService - The STT service instance. + * @returns {Promise} A promise that resolves to an object containing text and bytes. + */ +export async function processAudioFile({ + file, + sttService, +}: { + file: FileObject; + sttService: STTService; +}): Promise { + try { + const audioBuffer = await fs.promises.readFile(file.path); + const audioFile: AudioFileInfo = { + originalname: file.originalname, + mimetype: file.mimetype, + size: file.size, + }; + + const [provider, sttSchema] = await sttService.getProviderSchema(); + const text = await sttService.sttRequest(provider, sttSchema, { audioBuffer, audioFile }); + + return { + text, + bytes: Buffer.byteLength(text, 'utf8'), + }; + } catch (error) { + logger.error('Error processing audio file with STT:', error); + throw new Error(`Failed to process audio file: ${(error as Error).message}`); + } +} diff --git a/packages/api/src/files/index.ts b/packages/api/src/files/index.ts index 1c43619fe..fa156f15f 100644 --- a/packages/api/src/files/index.ts +++ b/packages/api/src/files/index.ts @@ -1,2 +1,4 @@ export * from './mistral/crud'; +export * from './audio'; +export * from './text'; export * from './parse'; diff --git a/packages/api/src/files/text.spec.ts b/packages/api/src/files/text.spec.ts new file mode 100644 index 000000000..d1a1dad89 --- /dev/null +++ b/packages/api/src/files/text.spec.ts @@ -0,0 +1,255 @@ +import { FileSources } from 'librechat-data-provider'; +import { Readable } from 'stream'; + +jest.mock('@librechat/data-schemas', () => ({ + logger: { + debug: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + }, +})); + +import { parseTextNative, parseText } from './text'; + +jest.mock('fs', () => ({ + readFileSync: jest.fn(), + createReadStream: jest.fn(), +})); + +jest.mock('../crypto/jwt', () => ({ + generateShortLivedToken: jest.fn(), +})); + +jest.mock('axios', () => ({ + get: jest.fn(), + post: jest.fn(), + interceptors: { + request: { use: jest.fn(), eject: jest.fn() }, + response: { use: jest.fn(), eject: jest.fn() }, + }, +})); + +jest.mock('form-data', () => { + return jest.fn().mockImplementation(() => ({ + append: jest.fn(), + getHeaders: jest.fn().mockReturnValue({ 'content-type': 'multipart/form-data' }), + })); +}); + +import fs, { ReadStream } from 'fs'; +import axios from 'axios'; +import FormData from 'form-data'; +import { generateShortLivedToken } from '../crypto/jwt'; + +const mockedFs = fs as jest.Mocked; +const mockedAxios = axios as jest.Mocked; +const mockedFormData = FormData as jest.MockedClass; +const mockedGenerateShortLivedToken = generateShortLivedToken as jest.MockedFunction< + typeof generateShortLivedToken +>; + +describe('text', () => { + const mockFile: Express.Multer.File = { + fieldname: 'file', + originalname: 'test.txt', + encoding: '7bit', + mimetype: 'text/plain', + size: 100, + destination: '/tmp', + filename: 'test.txt', + path: '/tmp/test.txt', + buffer: Buffer.from('test content'), + stream: new Readable(), + }; + + const mockReq = { + user: { id: 'user123' }, + }; + + const mockFileId = 'file123'; + + beforeEach(() => { + jest.clearAllMocks(); + delete process.env.RAG_API_URL; + }); + + describe('parseTextNative', () => { + it('should successfully parse a text file', () => { + const mockText = 'Hello, world!'; + mockedFs.readFileSync.mockReturnValue(mockText); + + const result = parseTextNative(mockFile); + + expect(mockedFs.readFileSync).toHaveBeenCalledWith('/tmp/test.txt', 'utf8'); + expect(result).toEqual({ + text: mockText, + bytes: Buffer.byteLength(mockText, 'utf8'), + source: FileSources.text, + }); + }); + + it('should throw an error when file cannot be read', () => { + const mockError = new Error('File not found'); + mockedFs.readFileSync.mockImplementation(() => { + throw mockError; + }); + + expect(() => parseTextNative(mockFile)).toThrow( + 'Failed to read file as text: Error: File not found', + ); + }); + }); + + describe('parseText', () => { + beforeEach(() => { + mockedGenerateShortLivedToken.mockReturnValue('mock-jwt-token'); + + const mockFormDataInstance = { + append: jest.fn(), + getHeaders: jest.fn().mockReturnValue({ 'content-type': 'multipart/form-data' }), + }; + mockedFormData.mockImplementation(() => mockFormDataInstance as unknown as FormData); + + mockedFs.createReadStream.mockReturnValue({} as unknown as ReadStream); + }); + + it('should fall back to native parsing when RAG_API_URL is not defined', async () => { + const mockText = 'Native parsing result'; + mockedFs.readFileSync.mockReturnValue(mockText); + + const result = await parseText({ + req: mockReq, + file: mockFile, + file_id: mockFileId, + }); + + expect(result).toEqual({ + text: mockText, + bytes: Buffer.byteLength(mockText, 'utf8'), + source: FileSources.text, + }); + expect(mockedAxios.get).not.toHaveBeenCalled(); + }); + + it('should fall back to native parsing when health check fails', async () => { + process.env.RAG_API_URL = 'http://rag-api.test'; + const mockText = 'Native parsing result'; + mockedFs.readFileSync.mockReturnValue(mockText); + + mockedAxios.get.mockRejectedValue(new Error('Health check failed')); + + const result = await parseText({ + req: mockReq, + file: mockFile, + file_id: mockFileId, + }); + + expect(mockedAxios.get).toHaveBeenCalledWith('http://rag-api.test/health', { + timeout: 5000, + }); + expect(result).toEqual({ + text: mockText, + bytes: Buffer.byteLength(mockText, 'utf8'), + source: FileSources.text, + }); + }); + + it('should fall back to native parsing when health check returns non-OK status', async () => { + process.env.RAG_API_URL = 'http://rag-api.test'; + const mockText = 'Native parsing result'; + mockedFs.readFileSync.mockReturnValue(mockText); + + mockedAxios.get.mockResolvedValue({ + status: 500, + statusText: 'Internal Server Error', + }); + + const result = await parseText({ + req: mockReq, + file: mockFile, + file_id: mockFileId, + }); + + expect(result).toEqual({ + text: mockText, + bytes: Buffer.byteLength(mockText, 'utf8'), + source: FileSources.text, + }); + }); + + it('should accept empty text as valid RAG API response', async () => { + process.env.RAG_API_URL = 'http://rag-api.test'; + + mockedAxios.get.mockResolvedValue({ + status: 200, + statusText: 'OK', + }); + + mockedAxios.post.mockResolvedValue({ + data: { + text: '', + }, + }); + + const result = await parseText({ + req: mockReq, + file: mockFile, + file_id: mockFileId, + }); + + expect(result).toEqual({ + text: '', + bytes: 0, + source: FileSources.text, + }); + }); + + it('should fall back to native parsing when RAG API response lacks text property', async () => { + process.env.RAG_API_URL = 'http://rag-api.test'; + const mockText = 'Native parsing result'; + mockedFs.readFileSync.mockReturnValue(mockText); + + mockedAxios.get.mockResolvedValue({ + status: 200, + statusText: 'OK', + }); + + mockedAxios.post.mockResolvedValue({ + data: {}, + }); + + const result = await parseText({ + req: mockReq, + file: mockFile, + file_id: mockFileId, + }); + + expect(result).toEqual({ + text: mockText, + bytes: Buffer.byteLength(mockText, 'utf8'), + source: FileSources.text, + }); + }); + + it('should fall back to native parsing when user is undefined', async () => { + process.env.RAG_API_URL = 'http://rag-api.test'; + const mockText = 'Native parsing result'; + mockedFs.readFileSync.mockReturnValue(mockText); + + const result = await parseText({ + req: { user: undefined }, + file: mockFile, + file_id: mockFileId, + }); + + expect(mockedGenerateShortLivedToken).not.toHaveBeenCalled(); + expect(mockedAxios.get).not.toHaveBeenCalled(); + expect(mockedAxios.post).not.toHaveBeenCalled(); + expect(result).toEqual({ + text: mockText, + bytes: Buffer.byteLength(mockText, 'utf8'), + source: FileSources.text, + }); + }); + }); +}); diff --git a/packages/api/src/files/text.ts b/packages/api/src/files/text.ts new file mode 100644 index 000000000..41b4ca0ab --- /dev/null +++ b/packages/api/src/files/text.ts @@ -0,0 +1,113 @@ +import fs from 'fs'; +import axios from 'axios'; +import FormData from 'form-data'; +import { logger } from '@librechat/data-schemas'; +import { FileSources } from 'librechat-data-provider'; +import type { Request as ServerRequest } from 'express'; +import { generateShortLivedToken } from '~/crypto/jwt'; + +/** + * Attempts to parse text using RAG API, falls back to native text parsing + * @param {Object} params - The parameters object + * @param {Express.Request} params.req - The Express request object + * @param {Express.Multer.File} params.file - The uploaded file + * @param {string} params.file_id - The file ID + * @returns {Promise<{text: string, bytes: number, source: string}>} + */ +export async function parseText({ + req, + file, + file_id, +}: { + req: Pick & { + user?: { id: string }; + }; + file: Express.Multer.File; + file_id: string; +}): Promise<{ text: string; bytes: number; source: string }> { + if (!process.env.RAG_API_URL) { + logger.debug('[parseText] RAG_API_URL not defined, falling back to native text parsing'); + return parseTextNative(file); + } + + if (!req.user?.id) { + logger.debug('[parseText] No user ID provided, falling back to native text parsing'); + return parseTextNative(file); + } + + try { + const healthResponse = await axios.get(`${process.env.RAG_API_URL}/health`, { + timeout: 5000, + }); + if (healthResponse?.statusText !== 'OK' && healthResponse?.status !== 200) { + logger.debug('[parseText] RAG API health check failed, falling back to native parsing'); + return parseTextNative(file); + } + } catch (healthError) { + logger.debug( + '[parseText] RAG API health check failed, falling back to native parsing', + healthError, + ); + return parseTextNative(file); + } + + try { + const jwtToken = generateShortLivedToken(req.user.id); + const formData = new FormData(); + formData.append('file_id', file_id); + formData.append('file', fs.createReadStream(file.path)); + + const formHeaders = formData.getHeaders(); + + const response = await axios.post(`${process.env.RAG_API_URL}/text`, formData, { + headers: { + Authorization: `Bearer ${jwtToken}`, + accept: 'application/json', + ...formHeaders, + }, + timeout: 30000, + }); + + const responseData = response.data; + logger.debug('[parseText] Response from RAG API', responseData); + + if (!('text' in responseData)) { + throw new Error('RAG API did not return parsed text'); + } + + return { + text: responseData.text, + bytes: Buffer.byteLength(responseData.text, 'utf8'), + source: FileSources.text, + }; + } catch (error) { + logger.warn('[parseText] RAG API text parsing failed, falling back to native parsing', error); + return parseTextNative(file); + } +} + +/** + * Native JavaScript text parsing fallback + * Simple text file reading - complex formats handled by RAG API + * @param {Express.Multer.File} file - The uploaded file + * @returns {{text: string, bytes: number, source: string}} + */ +export function parseTextNative(file: Express.Multer.File): { + text: string; + bytes: number; + source: string; +} { + try { + const text = fs.readFileSync(file.path, 'utf8'); + const bytes = Buffer.byteLength(text, 'utf8'); + + return { + text, + bytes, + source: FileSources.text, + }; + } catch (error) { + console.error('[parseTextNative] Failed to parse file:', error); + throw new Error(`Failed to read file as text: ${error}`); + } +} diff --git a/packages/api/src/types/files.ts b/packages/api/src/types/files.ts new file mode 100644 index 000000000..b0540a8d2 --- /dev/null +++ b/packages/api/src/types/files.ts @@ -0,0 +1,27 @@ +export interface STTService { + getInstance(): Promise; + getProviderSchema(): Promise<[string, object]>; + sttRequest( + provider: string, + schema: object, + params: { audioBuffer: Buffer; audioFile: AudioFileInfo }, + ): Promise; +} + +export interface AudioFileInfo { + originalname: string; + mimetype: string; + size: number; +} + +export interface FileObject { + path: string; + originalname: string; + mimetype: string; + size: number; +} + +export interface AudioProcessingResult { + text: string; + bytes: number; +} diff --git a/packages/api/src/types/index.ts b/packages/api/src/types/index.ts index ff2d6cf69..f499ec4a9 100644 --- a/packages/api/src/types/index.ts +++ b/packages/api/src/types/index.ts @@ -4,6 +4,7 @@ export * from './balance'; export * from './endpoints'; export * from './events'; export * from './error'; +export * from './files'; export * from './google'; export * from './http'; export * from './mistral'; diff --git a/packages/api/src/utils/index.ts b/packages/api/src/utils/index.ts index 2ce0381af..f20550eae 100644 --- a/packages/api/src/utils/index.ts +++ b/packages/api/src/utils/index.ts @@ -11,6 +11,7 @@ export * from './llm'; export * from './math'; export * from './openid'; export * from './tempChatRetention'; +export * from './text'; export { default as Tokenizer } from './tokenizer'; export * from './yaml'; export * from './http'; diff --git a/packages/api/src/utils/llm.test.ts b/packages/api/src/utils/llm.test.ts index a7d18e0cf..c4d270f40 100644 --- a/packages/api/src/utils/llm.test.ts +++ b/packages/api/src/utils/llm.test.ts @@ -7,6 +7,7 @@ describe('extractLibreChatParams', () => { expect(result.resendFiles).toBe(true); expect(result.promptPrefix).toBeUndefined(); expect(result.maxContextTokens).toBeUndefined(); + expect(result.fileTokenLimit).toBeUndefined(); expect(result.modelLabel).toBeUndefined(); expect(result.modelOptions).toEqual({}); }); @@ -17,6 +18,7 @@ describe('extractLibreChatParams', () => { expect(result.resendFiles).toBe(true); expect(result.promptPrefix).toBeUndefined(); expect(result.maxContextTokens).toBeUndefined(); + expect(result.fileTokenLimit).toBeUndefined(); expect(result.modelLabel).toBeUndefined(); expect(result.modelOptions).toEqual({}); }); @@ -26,6 +28,7 @@ describe('extractLibreChatParams', () => { resendFiles: false, promptPrefix: 'You are a helpful assistant', maxContextTokens: 4096, + fileTokenLimit: 50000, modelLabel: 'GPT-4', model: 'gpt-4', temperature: 0.7, @@ -37,6 +40,7 @@ describe('extractLibreChatParams', () => { expect(result.resendFiles).toBe(false); expect(result.promptPrefix).toBe('You are a helpful assistant'); expect(result.maxContextTokens).toBe(4096); + expect(result.fileTokenLimit).toBe(50000); expect(result.modelLabel).toBe('GPT-4'); expect(result.modelOptions).toEqual({ model: 'gpt-4', @@ -50,6 +54,7 @@ describe('extractLibreChatParams', () => { resendFiles: true, promptPrefix: null, maxContextTokens: 2048, + fileTokenLimit: undefined, modelLabel: null, model: 'claude-3', }; @@ -59,6 +64,7 @@ describe('extractLibreChatParams', () => { expect(result.resendFiles).toBe(true); expect(result.promptPrefix).toBeNull(); expect(result.maxContextTokens).toBe(2048); + expect(result.fileTokenLimit).toBeUndefined(); expect(result.modelLabel).toBeNull(); expect(result.modelOptions).toEqual({ model: 'claude-3', @@ -77,6 +83,7 @@ describe('extractLibreChatParams', () => { expect(result.resendFiles).toBe(true); // Should use default expect(result.promptPrefix).toBe('Test prefix'); expect(result.maxContextTokens).toBeUndefined(); + expect(result.fileTokenLimit).toBeUndefined(); expect(result.modelLabel).toBeUndefined(); expect(result.modelOptions).toEqual({ model: 'gpt-3.5-turbo', @@ -90,6 +97,7 @@ describe('extractLibreChatParams', () => { expect(result.resendFiles).toBe(true); // Should use default expect(result.promptPrefix).toBeUndefined(); expect(result.maxContextTokens).toBeUndefined(); + expect(result.fileTokenLimit).toBeUndefined(); expect(result.modelLabel).toBeUndefined(); expect(result.modelOptions).toEqual({}); }); @@ -99,6 +107,7 @@ describe('extractLibreChatParams', () => { resendFiles: false, promptPrefix: 'Custom prompt', maxContextTokens: 8192, + fileTokenLimit: 25000, modelLabel: 'Custom Model', // Model options model: 'gpt-4', @@ -117,6 +126,7 @@ describe('extractLibreChatParams', () => { expect(result.resendFiles).toBe(false); expect(result.promptPrefix).toBe('Custom prompt'); expect(result.maxContextTokens).toBe(8192); + expect(result.fileTokenLimit).toBe(25000); expect(result.modelLabel).toBe('Custom Model'); // Model options should include everything else diff --git a/packages/api/src/utils/llm.ts b/packages/api/src/utils/llm.ts index 65475c805..d1e1d9a43 100644 --- a/packages/api/src/utils/llm.ts +++ b/packages/api/src/utils/llm.ts @@ -8,6 +8,7 @@ type LibreChatParams = { resendFiles: boolean; promptPrefix?: string | null; maxContextTokens?: number; + fileTokenLimit?: number; modelLabel?: string | null; }; @@ -32,6 +33,7 @@ export function extractLibreChatParams( (librechat.resendFiles.default as boolean); const promptPrefix = (delete modelOptions.promptPrefix, options.promptPrefix); const maxContextTokens = (delete modelOptions.maxContextTokens, options.maxContextTokens); + const fileTokenLimit = (delete modelOptions.fileTokenLimit, options.fileTokenLimit); const modelLabel = (delete modelOptions.modelLabel, options.modelLabel); return { @@ -40,6 +42,7 @@ export function extractLibreChatParams( LibreChatKeys >, maxContextTokens, + fileTokenLimit, promptPrefix, resendFiles, modelLabel, diff --git a/packages/api/src/utils/text.ts b/packages/api/src/utils/text.ts new file mode 100644 index 000000000..3de343bd3 --- /dev/null +++ b/packages/api/src/utils/text.ts @@ -0,0 +1,65 @@ +import { logger } from '@librechat/data-schemas'; + +/** + * Processes text content by counting tokens and truncating if it exceeds the specified limit. + * @param text - The text content to process + * @param tokenLimit - The maximum number of tokens allowed + * @param tokenCountFn - Function to count tokens + * @returns Promise resolving to object with processed text, token count, and truncation status + */ +export async function processTextWithTokenLimit({ + text, + tokenLimit, + tokenCountFn, +}: { + text: string; + tokenLimit: number; + tokenCountFn: (text: string) => number; +}): Promise<{ text: string; tokenCount: number; wasTruncated: boolean }> { + const originalTokenCount = await tokenCountFn(text); + + if (originalTokenCount <= tokenLimit) { + return { + text, + tokenCount: originalTokenCount, + wasTruncated: false, + }; + } + + /** + * Doing binary search here to find the truncation point efficiently + * (May be a better way to go about this) + */ + let low = 0; + let high = text.length; + let bestText = ''; + + logger.debug( + `[textTokenLimiter] Text content exceeds token limit: ${originalTokenCount} > ${tokenLimit}, truncating...`, + ); + + while (low <= high) { + const mid = Math.floor((low + high) / 2); + const truncatedText = text.substring(0, mid); + const tokenCount = await tokenCountFn(truncatedText); + + if (tokenCount <= tokenLimit) { + bestText = truncatedText; + low = mid + 1; + } else { + high = mid - 1; + } + } + + const finalTokenCount = await tokenCountFn(bestText); + + logger.warn( + `[textTokenLimiter] Text truncated from ${originalTokenCount} to ${finalTokenCount} tokens (limit: ${tokenLimit})`, + ); + + return { + text: bestText, + tokenCount: finalTokenCount, + wasTruncated: true, + }; +} diff --git a/packages/data-provider/specs/filetypes.spec.ts b/packages/data-provider/specs/filetypes.spec.ts index e37baca59..39711dadd 100644 --- a/packages/data-provider/specs/filetypes.spec.ts +++ b/packages/data-provider/specs/filetypes.spec.ts @@ -14,7 +14,7 @@ import { } from '../src/file-config'; describe('MIME Type Regex Patterns', () => { - const unsupportedMimeTypes = ['text/x-unknown', 'application/unknown', 'image/bmp', 'audio/mp3']; + const unsupportedMimeTypes = ['text/x-unknown', 'application/unknown', 'image/bmp']; // Testing general supported MIME types fullMimeTypesList.forEach((mimeType) => { diff --git a/packages/data-provider/src/file-config.ts b/packages/data-provider/src/file-config.ts index 08cf99fbe..f6ede89ae 100644 --- a/packages/data-provider/src/file-config.ts +++ b/packages/data-provider/src/file-config.ts @@ -122,11 +122,27 @@ export const applicationMimeTypes = export const imageMimeTypes = /^image\/(jpeg|gif|png|webp|heic|heif)$/; +export const audioMimeTypes = + /^audio\/(mp3|mpeg|mpeg3|wav|wave|x-wav|ogg|vorbis|mp4|x-m4a|flac|x-flac|webm)$/; + +export const defaultOCRMimeTypes = [ + imageMimeTypes, + /^application\/pdf$/, + /^application\/vnd\.openxmlformats-officedocument\.(wordprocessingml\.document|presentationml\.presentation|spreadsheetml\.sheet)$/, + /^application\/vnd\.ms-(word|powerpoint|excel)$/, + /^application\/epub\+zip$/, +]; + +export const defaultTextMimeTypes = [textMimeTypes]; + +export const defaultSTTMimeTypes = [audioMimeTypes]; + export const supportedMimeTypes = [ textMimeTypes, excelMimeTypes, applicationMimeTypes, imageMimeTypes, + audioMimeTypes, /** Supported by LC Code Interpreter PAI */ /^image\/(svg|svg\+xml)$/, ]; @@ -169,6 +185,7 @@ export const megabyte = 1024 * 1024; export const mbToBytes = (mb: number): number => mb * megabyte; const defaultSizeLimit = mbToBytes(512); +const defaultTokenLimit = 100000; const assistantsFileConfig = { fileLimit: 10, fileSizeLimit: defaultSizeLimit, @@ -192,12 +209,22 @@ export const fileConfig = { }, serverFileSizeLimit: defaultSizeLimit, avatarSizeLimit: mbToBytes(2), + fileTokenLimit: defaultTokenLimit, clientImageResize: { enabled: false, maxWidth: 1900, maxHeight: 1900, quality: 0.92, }, + ocr: { + supportedMimeTypes: defaultOCRMimeTypes, + }, + text: { + supportedMimeTypes: defaultTextMimeTypes, + }, + stt: { + supportedMimeTypes: defaultSTTMimeTypes, + }, checkType: function (fileType: string, supportedTypes: RegExp[] = supportedMimeTypes) { return supportedTypes.some((regex) => regex.test(fileType)); }, @@ -232,6 +259,7 @@ export const fileConfigSchema = z.object({ endpoints: z.record(endpointFileConfigSchema).optional(), serverFileSizeLimit: z.number().min(0).optional(), avatarSizeLimit: z.number().min(0).optional(), + fileTokenLimit: z.number().min(0).optional(), imageGeneration: z .object({ percentage: z.number().min(0).max(100).optional(), @@ -246,6 +274,16 @@ export const fileConfigSchema = z.object({ quality: z.number().min(0).max(1).optional(), }) .optional(), + ocr: z + .object({ + supportedMimeTypes: supportedMimeTypesSchema.optional(), + }) + .optional(), + text: z + .object({ + supportedMimeTypes: supportedMimeTypesSchema.optional(), + }) + .optional(), }); /** Helper function to safely convert string patterns to RegExp objects */ @@ -261,7 +299,21 @@ export const convertStringsToRegex = (patterns: string[]): RegExp[] => }, []); export function mergeFileConfig(dynamic: z.infer | undefined): FileConfig { - const mergedConfig = fileConfig as FileConfig; + const mergedConfig: FileConfig = { + ...fileConfig, + ocr: { + ...fileConfig.ocr, + supportedMimeTypes: fileConfig.ocr?.supportedMimeTypes || [], + }, + text: { + ...fileConfig.text, + supportedMimeTypes: fileConfig.text?.supportedMimeTypes || [], + }, + stt: { + ...fileConfig.stt, + supportedMimeTypes: fileConfig.stt?.supportedMimeTypes || [], + }, + }; if (!dynamic) { return mergedConfig; } @@ -274,6 +326,10 @@ export function mergeFileConfig(dynamic: z.infer | unde mergedConfig.avatarSizeLimit = mbToBytes(dynamic.avatarSizeLimit); } + if (dynamic.fileTokenLimit !== undefined) { + mergedConfig.fileTokenLimit = dynamic.fileTokenLimit; + } + // Merge clientImageResize configuration if (dynamic.clientImageResize !== undefined) { mergedConfig.clientImageResize = { @@ -282,6 +338,26 @@ export function mergeFileConfig(dynamic: z.infer | unde }; } + if (dynamic.ocr !== undefined) { + mergedConfig.ocr = { + ...mergedConfig.ocr, + ...dynamic.ocr, + }; + if (dynamic.ocr.supportedMimeTypes) { + mergedConfig.ocr.supportedMimeTypes = convertStringsToRegex(dynamic.ocr.supportedMimeTypes); + } + } + + if (dynamic.text !== undefined) { + mergedConfig.text = { + ...mergedConfig.text, + ...dynamic.text, + }; + if (dynamic.text.supportedMimeTypes) { + mergedConfig.text.supportedMimeTypes = convertStringsToRegex(dynamic.text.supportedMimeTypes); + } + } + if (!dynamic.endpoints) { return mergedConfig; } diff --git a/packages/data-provider/src/parameterSettings.ts b/packages/data-provider/src/parameterSettings.ts index 45604d885..93d85d237 100644 --- a/packages/data-provider/src/parameterSettings.ts +++ b/packages/data-provider/src/parameterSettings.ts @@ -139,6 +139,18 @@ export const librechat = { placeholderCode: true, optionType: 'model', } as const, + fileTokenLimit: { + key: 'fileTokenLimit', + label: 'com_ui_file_token_limit', + labelCode: true, + description: 'com_ui_file_token_limit_desc', + descriptionCode: true, + placeholder: 'com_nav_theme_system', + placeholderCode: true, + type: 'number', + component: 'input', + columnSpan: 2, + } as const, }; const openAIParams: Record = { @@ -625,6 +637,7 @@ const googleConfig: SettingsConfiguration = [ google.thinking, google.thinkingBudget, google.web_search, + librechat.fileTokenLimit, ]; const googleCol1: SettingsConfiguration = [ @@ -643,6 +656,7 @@ const googleCol2: SettingsConfiguration = [ google.thinking, google.thinkingBudget, google.web_search, + librechat.fileTokenLimit, ]; const openAI: SettingsConfiguration = [ @@ -663,6 +677,7 @@ const openAI: SettingsConfiguration = [ openAIParams.reasoning_summary, openAIParams.verbosity, openAIParams.disableStreaming, + librechat.fileTokenLimit, ]; const openAICol1: SettingsConfiguration = [ @@ -687,6 +702,7 @@ const openAICol2: SettingsConfiguration = [ openAIParams.useResponsesApi, openAIParams.web_search, openAIParams.disableStreaming, + librechat.fileTokenLimit, ]; const anthropicConfig: SettingsConfiguration = [ @@ -702,6 +718,7 @@ const anthropicConfig: SettingsConfiguration = [ anthropic.thinking, anthropic.thinkingBudget, anthropic.web_search, + librechat.fileTokenLimit, ]; const anthropicCol1: SettingsConfiguration = [ @@ -721,6 +738,7 @@ const anthropicCol2: SettingsConfiguration = [ anthropic.thinking, anthropic.thinkingBudget, anthropic.web_search, + librechat.fileTokenLimit, ]; const bedrockAnthropic: SettingsConfiguration = [ @@ -736,6 +754,7 @@ const bedrockAnthropic: SettingsConfiguration = [ bedrock.region, anthropic.thinking, anthropic.thinkingBudget, + librechat.fileTokenLimit, ]; const bedrockMistral: SettingsConfiguration = [ @@ -747,6 +766,7 @@ const bedrockMistral: SettingsConfiguration = [ mistral.topP, librechat.resendFiles, bedrock.region, + librechat.fileTokenLimit, ]; const bedrockCohere: SettingsConfiguration = [ @@ -758,6 +778,7 @@ const bedrockCohere: SettingsConfiguration = [ cohere.topP, librechat.resendFiles, bedrock.region, + librechat.fileTokenLimit, ]; const bedrockGeneral: SettingsConfiguration = [ @@ -768,6 +789,7 @@ const bedrockGeneral: SettingsConfiguration = [ meta.topP, librechat.resendFiles, bedrock.region, + librechat.fileTokenLimit, ]; const bedrockAnthropicCol1: SettingsConfiguration = [ @@ -787,6 +809,7 @@ const bedrockAnthropicCol2: SettingsConfiguration = [ bedrock.region, anthropic.thinking, anthropic.thinkingBudget, + librechat.fileTokenLimit, ]; const bedrockMistralCol1: SettingsConfiguration = [ @@ -802,6 +825,7 @@ const bedrockMistralCol2: SettingsConfiguration = [ mistral.topP, librechat.resendFiles, bedrock.region, + librechat.fileTokenLimit, ]; const bedrockCohereCol1: SettingsConfiguration = [ @@ -817,6 +841,7 @@ const bedrockCohereCol2: SettingsConfiguration = [ cohere.topP, librechat.resendFiles, bedrock.region, + librechat.fileTokenLimit, ]; const bedrockGeneralCol1: SettingsConfiguration = [ @@ -831,6 +856,7 @@ const bedrockGeneralCol2: SettingsConfiguration = [ meta.topP, librechat.resendFiles, bedrock.region, + librechat.fileTokenLimit, ]; export const paramSettings: Record = { diff --git a/packages/data-provider/src/schemas.ts b/packages/data-provider/src/schemas.ts index a3296abc8..0e9548b1d 100644 --- a/packages/data-provider/src/schemas.ts +++ b/packages/data-provider/src/schemas.ts @@ -680,6 +680,8 @@ export const tConversationSchema = z.object({ iconURL: z.string().nullable().optional(), /* temporary chat */ expiredAt: z.string().nullable().optional(), + /* file token limits */ + fileTokenLimit: coerceNumber.optional(), /** @deprecated */ resendImages: z.boolean().optional(), /** @deprecated */ @@ -794,6 +796,8 @@ export const tQueryParamsSchema = tConversationSchema * https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-instructions * */ instructions: true, + /** @endpoints openAI, google, anthropic */ + fileTokenLimit: true, }) .merge( z.object({ @@ -850,6 +854,7 @@ export const googleBaseSchema = tConversationSchema.pick({ thinking: true, thinkingBudget: true, web_search: true, + fileTokenLimit: true, iconURL: true, greeting: true, spec: true, @@ -1101,6 +1106,7 @@ export const openAIBaseSchema = tConversationSchema.pick({ useResponsesApi: true, web_search: true, disableStreaming: true, + fileTokenLimit: true, }); export const openAISchema = openAIBaseSchema @@ -1145,6 +1151,7 @@ export const anthropicBaseSchema = tConversationSchema.pick({ spec: true, maxContextTokens: true, web_search: true, + fileTokenLimit: true, }); export const anthropicSchema = anthropicBaseSchema diff --git a/packages/data-provider/src/types/files.ts b/packages/data-provider/src/types/files.ts index fd6027805..ec42520bc 100644 --- a/packages/data-provider/src/types/files.ts +++ b/packages/data-provider/src/types/files.ts @@ -47,6 +47,7 @@ export type FileConfig = { endpoints: { [key: string]: EndpointFileConfig; }; + fileTokenLimit?: number; serverFileSizeLimit?: number; avatarSizeLimit?: number; clientImageResize?: { @@ -55,6 +56,39 @@ export type FileConfig = { maxHeight?: number; quality?: number; }; + ocr?: { + supportedMimeTypes?: RegExp[]; + }; + text?: { + supportedMimeTypes?: RegExp[]; + }; + stt?: { + supportedMimeTypes?: RegExp[]; + }; + checkType?: (fileType: string, supportedTypes: RegExp[]) => boolean; +}; + +export type FileConfigInput = { + endpoints?: { + [key: string]: EndpointFileConfig; + }; + serverFileSizeLimit?: number; + avatarSizeLimit?: number; + clientImageResize?: { + enabled?: boolean; + maxWidth?: number; + maxHeight?: number; + quality?: number; + }; + ocr?: { + supportedMimeTypes?: string[]; + }; + text?: { + supportedMimeTypes?: string[]; + }; + stt?: { + supportedMimeTypes?: string[]; + }; checkType?: (fileType: string, supportedTypes: RegExp[]) => boolean; }; diff --git a/packages/data-schemas/src/schema/defaults.ts b/packages/data-schemas/src/schema/defaults.ts index b432f283f..43ac33b34 100644 --- a/packages/data-schemas/src/schema/defaults.ts +++ b/packages/data-schemas/src/schema/defaults.ts @@ -141,6 +141,9 @@ export const conversationPreset = { disableStreaming: { type: Boolean, }, + fileTokenLimit: { + type: Number, + }, /** Reasoning models only */ reasoning_effort: { type: String, diff --git a/packages/data-schemas/src/schema/preset.ts b/packages/data-schemas/src/schema/preset.ts index d95e7d132..36e2c9e8b 100644 --- a/packages/data-schemas/src/schema/preset.ts +++ b/packages/data-schemas/src/schema/preset.ts @@ -51,6 +51,7 @@ export interface IPreset extends Document { useResponsesApi?: boolean; web_search?: boolean; disableStreaming?: boolean; + fileTokenLimit?: number; // end of additional fields agentOptions?: unknown; } diff --git a/packages/data-schemas/src/types/convo.ts b/packages/data-schemas/src/types/convo.ts index fb8392c0f..9e77dc905 100644 --- a/packages/data-schemas/src/types/convo.ts +++ b/packages/data-schemas/src/types/convo.ts @@ -50,6 +50,7 @@ export interface IConversation extends Document { useResponsesApi?: boolean; web_search?: boolean; disableStreaming?: boolean; + fileTokenLimit?: number; // Additional fields files?: string[]; expiredAt?: Date;