mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 06:00:56 +02:00
📎 feat: Upload as Text Support for Plaintext, STT, RAG, and Token Limits (#8868)
* 🪶 feat: Add Support for Uploading Plaintext Files feat: delineate between OCR and text handling in fileConfig field of config file - also adds support for passing in mimetypes as just plain file extensions feat: add showLabel bool to support future synthetic component DynamicDropdownInput feat: add new combination dropdown-input component in params panel to support file type token limits refactor: move hovercard to side to align with other hovercards chore: clean up autogenerated comments feat: add delineation to file upload path between text and ocr configured filetypes feat: add token limit checks during file upload refactor: move textParsing out of ocrEnabled logic refactor: clean up types for filetype config refactor: finish decoupling DynamicDropdownInput from fileTokenLimits fix: move image token cost function into file to fix circular dependency causing unittest to fail and remove unused var for linter chore: remove out of scope code following review refactor: make fileTokenLimit conform to existing styles chore: remove unused localization string chore: undo changes to DynamicInput and other strays feat: add fileTokenLimit to all provider config panels fix: move textParsing back into ocr tool_resource block for now so that it doesn't interfere with other upload types * 📤 feat: Add RAG API Endpoint Support for Text Parsing (#8849) * feat: implement RAG API integration for text parsing with fallback to native parsing * chore: remove TODO now that placeholder and fllback are implemented * ✈️ refactor: Migrate Text Parsing to TS (#8892) * refactor: move generateShortLivedToken to packages/api * refactor: move textParsing logic into packages/api * refactor: reduce nesting and dry code with createTextFile * fix: add proper source handling * fix: mock new parseText and parseTextNative functions in jest file * ci: add test coverage for textParser * 💬 feat: Add Audio File Support to Upload as Text (#8893) * feat: add STT support for Upload as Text * refactor: move processAudioFile to packages/api * refactor: move textParsing from utils to files * fix: remove audio/mp3 from unsupported mimetypes test since it is now supported * ✂️ feat: Configurable File Token Limits and Truncation (#8911) * feat: add configurable fileTokenLimit default value * fix: add stt to fileConfig merge logic * fix: add fileTokenLimit to mergeFileConfig logic so configurable value is actually respected from yaml * feat: add token limiting to parsed text files * fix: add extraction logic and update tests so fileTokenLimit isnt sent to LLM providers * fix: address comments * refactor: rename textTokenLimiter.ts to text.ts * chore: update form-data package to address CVE-2025-7783 and update package-lock * feat: use default supported mime types for ocr on frontend file validation * fix: should be using logger.debug not console.debug * fix: mock existsSync in text.spec.ts * fix: mock logger rather than every one of its function calls * fix: reorganize imports and streamline file upload processing logic * refactor: update createTextFile function to use destructured parameters and improve readability * chore: update file validation to use EToolResources for improved type safety * chore: update import path for types in audio processing module * fix: update file configuration access and replace console.debug with logger.debug for improved logging --------- Co-authored-by: Dustin Healy <dustinhealy1@gmail.com> Co-authored-by: Dustin Healy <54083382+dustinhealy@users.noreply.github.com>
This commit is contained in:
parent
74bc0440f0
commit
48f6f8f2f8
41 changed files with 847 additions and 75 deletions
|
@ -14,7 +14,7 @@ import {
|
|||
} from '../src/file-config';
|
||||
|
||||
describe('MIME Type Regex Patterns', () => {
|
||||
const unsupportedMimeTypes = ['text/x-unknown', 'application/unknown', 'image/bmp', 'audio/mp3'];
|
||||
const unsupportedMimeTypes = ['text/x-unknown', 'application/unknown', 'image/bmp'];
|
||||
|
||||
// Testing general supported MIME types
|
||||
fullMimeTypesList.forEach((mimeType) => {
|
||||
|
|
|
@ -122,11 +122,27 @@ export const applicationMimeTypes =
|
|||
|
||||
export const imageMimeTypes = /^image\/(jpeg|gif|png|webp|heic|heif)$/;
|
||||
|
||||
export const audioMimeTypes =
|
||||
/^audio\/(mp3|mpeg|mpeg3|wav|wave|x-wav|ogg|vorbis|mp4|x-m4a|flac|x-flac|webm)$/;
|
||||
|
||||
export const defaultOCRMimeTypes = [
|
||||
imageMimeTypes,
|
||||
/^application\/pdf$/,
|
||||
/^application\/vnd\.openxmlformats-officedocument\.(wordprocessingml\.document|presentationml\.presentation|spreadsheetml\.sheet)$/,
|
||||
/^application\/vnd\.ms-(word|powerpoint|excel)$/,
|
||||
/^application\/epub\+zip$/,
|
||||
];
|
||||
|
||||
export const defaultTextMimeTypes = [textMimeTypes];
|
||||
|
||||
export const defaultSTTMimeTypes = [audioMimeTypes];
|
||||
|
||||
export const supportedMimeTypes = [
|
||||
textMimeTypes,
|
||||
excelMimeTypes,
|
||||
applicationMimeTypes,
|
||||
imageMimeTypes,
|
||||
audioMimeTypes,
|
||||
/** Supported by LC Code Interpreter PAI */
|
||||
/^image\/(svg|svg\+xml)$/,
|
||||
];
|
||||
|
@ -169,6 +185,7 @@ export const megabyte = 1024 * 1024;
|
|||
export const mbToBytes = (mb: number): number => mb * megabyte;
|
||||
|
||||
const defaultSizeLimit = mbToBytes(512);
|
||||
const defaultTokenLimit = 100000;
|
||||
const assistantsFileConfig = {
|
||||
fileLimit: 10,
|
||||
fileSizeLimit: defaultSizeLimit,
|
||||
|
@ -192,12 +209,22 @@ export const fileConfig = {
|
|||
},
|
||||
serverFileSizeLimit: defaultSizeLimit,
|
||||
avatarSizeLimit: mbToBytes(2),
|
||||
fileTokenLimit: defaultTokenLimit,
|
||||
clientImageResize: {
|
||||
enabled: false,
|
||||
maxWidth: 1900,
|
||||
maxHeight: 1900,
|
||||
quality: 0.92,
|
||||
},
|
||||
ocr: {
|
||||
supportedMimeTypes: defaultOCRMimeTypes,
|
||||
},
|
||||
text: {
|
||||
supportedMimeTypes: defaultTextMimeTypes,
|
||||
},
|
||||
stt: {
|
||||
supportedMimeTypes: defaultSTTMimeTypes,
|
||||
},
|
||||
checkType: function (fileType: string, supportedTypes: RegExp[] = supportedMimeTypes) {
|
||||
return supportedTypes.some((regex) => regex.test(fileType));
|
||||
},
|
||||
|
@ -232,6 +259,7 @@ export const fileConfigSchema = z.object({
|
|||
endpoints: z.record(endpointFileConfigSchema).optional(),
|
||||
serverFileSizeLimit: z.number().min(0).optional(),
|
||||
avatarSizeLimit: z.number().min(0).optional(),
|
||||
fileTokenLimit: z.number().min(0).optional(),
|
||||
imageGeneration: z
|
||||
.object({
|
||||
percentage: z.number().min(0).max(100).optional(),
|
||||
|
@ -246,6 +274,16 @@ export const fileConfigSchema = z.object({
|
|||
quality: z.number().min(0).max(1).optional(),
|
||||
})
|
||||
.optional(),
|
||||
ocr: z
|
||||
.object({
|
||||
supportedMimeTypes: supportedMimeTypesSchema.optional(),
|
||||
})
|
||||
.optional(),
|
||||
text: z
|
||||
.object({
|
||||
supportedMimeTypes: supportedMimeTypesSchema.optional(),
|
||||
})
|
||||
.optional(),
|
||||
});
|
||||
|
||||
/** Helper function to safely convert string patterns to RegExp objects */
|
||||
|
@ -261,7 +299,21 @@ export const convertStringsToRegex = (patterns: string[]): RegExp[] =>
|
|||
}, []);
|
||||
|
||||
export function mergeFileConfig(dynamic: z.infer<typeof fileConfigSchema> | undefined): FileConfig {
|
||||
const mergedConfig = fileConfig as FileConfig;
|
||||
const mergedConfig: FileConfig = {
|
||||
...fileConfig,
|
||||
ocr: {
|
||||
...fileConfig.ocr,
|
||||
supportedMimeTypes: fileConfig.ocr?.supportedMimeTypes || [],
|
||||
},
|
||||
text: {
|
||||
...fileConfig.text,
|
||||
supportedMimeTypes: fileConfig.text?.supportedMimeTypes || [],
|
||||
},
|
||||
stt: {
|
||||
...fileConfig.stt,
|
||||
supportedMimeTypes: fileConfig.stt?.supportedMimeTypes || [],
|
||||
},
|
||||
};
|
||||
if (!dynamic) {
|
||||
return mergedConfig;
|
||||
}
|
||||
|
@ -274,6 +326,10 @@ export function mergeFileConfig(dynamic: z.infer<typeof fileConfigSchema> | unde
|
|||
mergedConfig.avatarSizeLimit = mbToBytes(dynamic.avatarSizeLimit);
|
||||
}
|
||||
|
||||
if (dynamic.fileTokenLimit !== undefined) {
|
||||
mergedConfig.fileTokenLimit = dynamic.fileTokenLimit;
|
||||
}
|
||||
|
||||
// Merge clientImageResize configuration
|
||||
if (dynamic.clientImageResize !== undefined) {
|
||||
mergedConfig.clientImageResize = {
|
||||
|
@ -282,6 +338,26 @@ export function mergeFileConfig(dynamic: z.infer<typeof fileConfigSchema> | unde
|
|||
};
|
||||
}
|
||||
|
||||
if (dynamic.ocr !== undefined) {
|
||||
mergedConfig.ocr = {
|
||||
...mergedConfig.ocr,
|
||||
...dynamic.ocr,
|
||||
};
|
||||
if (dynamic.ocr.supportedMimeTypes) {
|
||||
mergedConfig.ocr.supportedMimeTypes = convertStringsToRegex(dynamic.ocr.supportedMimeTypes);
|
||||
}
|
||||
}
|
||||
|
||||
if (dynamic.text !== undefined) {
|
||||
mergedConfig.text = {
|
||||
...mergedConfig.text,
|
||||
...dynamic.text,
|
||||
};
|
||||
if (dynamic.text.supportedMimeTypes) {
|
||||
mergedConfig.text.supportedMimeTypes = convertStringsToRegex(dynamic.text.supportedMimeTypes);
|
||||
}
|
||||
}
|
||||
|
||||
if (!dynamic.endpoints) {
|
||||
return mergedConfig;
|
||||
}
|
||||
|
|
|
@ -139,6 +139,18 @@ export const librechat = {
|
|||
placeholderCode: true,
|
||||
optionType: 'model',
|
||||
} as const,
|
||||
fileTokenLimit: {
|
||||
key: 'fileTokenLimit',
|
||||
label: 'com_ui_file_token_limit',
|
||||
labelCode: true,
|
||||
description: 'com_ui_file_token_limit_desc',
|
||||
descriptionCode: true,
|
||||
placeholder: 'com_nav_theme_system',
|
||||
placeholderCode: true,
|
||||
type: 'number',
|
||||
component: 'input',
|
||||
columnSpan: 2,
|
||||
} as const,
|
||||
};
|
||||
|
||||
const openAIParams: Record<string, SettingDefinition> = {
|
||||
|
@ -625,6 +637,7 @@ const googleConfig: SettingsConfiguration = [
|
|||
google.thinking,
|
||||
google.thinkingBudget,
|
||||
google.web_search,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const googleCol1: SettingsConfiguration = [
|
||||
|
@ -643,6 +656,7 @@ const googleCol2: SettingsConfiguration = [
|
|||
google.thinking,
|
||||
google.thinkingBudget,
|
||||
google.web_search,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const openAI: SettingsConfiguration = [
|
||||
|
@ -663,6 +677,7 @@ const openAI: SettingsConfiguration = [
|
|||
openAIParams.reasoning_summary,
|
||||
openAIParams.verbosity,
|
||||
openAIParams.disableStreaming,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const openAICol1: SettingsConfiguration = [
|
||||
|
@ -687,6 +702,7 @@ const openAICol2: SettingsConfiguration = [
|
|||
openAIParams.useResponsesApi,
|
||||
openAIParams.web_search,
|
||||
openAIParams.disableStreaming,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const anthropicConfig: SettingsConfiguration = [
|
||||
|
@ -702,6 +718,7 @@ const anthropicConfig: SettingsConfiguration = [
|
|||
anthropic.thinking,
|
||||
anthropic.thinkingBudget,
|
||||
anthropic.web_search,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const anthropicCol1: SettingsConfiguration = [
|
||||
|
@ -721,6 +738,7 @@ const anthropicCol2: SettingsConfiguration = [
|
|||
anthropic.thinking,
|
||||
anthropic.thinkingBudget,
|
||||
anthropic.web_search,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const bedrockAnthropic: SettingsConfiguration = [
|
||||
|
@ -736,6 +754,7 @@ const bedrockAnthropic: SettingsConfiguration = [
|
|||
bedrock.region,
|
||||
anthropic.thinking,
|
||||
anthropic.thinkingBudget,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const bedrockMistral: SettingsConfiguration = [
|
||||
|
@ -747,6 +766,7 @@ const bedrockMistral: SettingsConfiguration = [
|
|||
mistral.topP,
|
||||
librechat.resendFiles,
|
||||
bedrock.region,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const bedrockCohere: SettingsConfiguration = [
|
||||
|
@ -758,6 +778,7 @@ const bedrockCohere: SettingsConfiguration = [
|
|||
cohere.topP,
|
||||
librechat.resendFiles,
|
||||
bedrock.region,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const bedrockGeneral: SettingsConfiguration = [
|
||||
|
@ -768,6 +789,7 @@ const bedrockGeneral: SettingsConfiguration = [
|
|||
meta.topP,
|
||||
librechat.resendFiles,
|
||||
bedrock.region,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const bedrockAnthropicCol1: SettingsConfiguration = [
|
||||
|
@ -787,6 +809,7 @@ const bedrockAnthropicCol2: SettingsConfiguration = [
|
|||
bedrock.region,
|
||||
anthropic.thinking,
|
||||
anthropic.thinkingBudget,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const bedrockMistralCol1: SettingsConfiguration = [
|
||||
|
@ -802,6 +825,7 @@ const bedrockMistralCol2: SettingsConfiguration = [
|
|||
mistral.topP,
|
||||
librechat.resendFiles,
|
||||
bedrock.region,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const bedrockCohereCol1: SettingsConfiguration = [
|
||||
|
@ -817,6 +841,7 @@ const bedrockCohereCol2: SettingsConfiguration = [
|
|||
cohere.topP,
|
||||
librechat.resendFiles,
|
||||
bedrock.region,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
const bedrockGeneralCol1: SettingsConfiguration = [
|
||||
|
@ -831,6 +856,7 @@ const bedrockGeneralCol2: SettingsConfiguration = [
|
|||
meta.topP,
|
||||
librechat.resendFiles,
|
||||
bedrock.region,
|
||||
librechat.fileTokenLimit,
|
||||
];
|
||||
|
||||
export const paramSettings: Record<string, SettingsConfiguration | undefined> = {
|
||||
|
|
|
@ -680,6 +680,8 @@ export const tConversationSchema = z.object({
|
|||
iconURL: z.string().nullable().optional(),
|
||||
/* temporary chat */
|
||||
expiredAt: z.string().nullable().optional(),
|
||||
/* file token limits */
|
||||
fileTokenLimit: coerceNumber.optional(),
|
||||
/** @deprecated */
|
||||
resendImages: z.boolean().optional(),
|
||||
/** @deprecated */
|
||||
|
@ -794,6 +796,8 @@ export const tQueryParamsSchema = tConversationSchema
|
|||
* https://platform.openai.com/docs/api-reference/runs/createRun#runs-createrun-instructions
|
||||
* */
|
||||
instructions: true,
|
||||
/** @endpoints openAI, google, anthropic */
|
||||
fileTokenLimit: true,
|
||||
})
|
||||
.merge(
|
||||
z.object({
|
||||
|
@ -850,6 +854,7 @@ export const googleBaseSchema = tConversationSchema.pick({
|
|||
thinking: true,
|
||||
thinkingBudget: true,
|
||||
web_search: true,
|
||||
fileTokenLimit: true,
|
||||
iconURL: true,
|
||||
greeting: true,
|
||||
spec: true,
|
||||
|
@ -1101,6 +1106,7 @@ export const openAIBaseSchema = tConversationSchema.pick({
|
|||
useResponsesApi: true,
|
||||
web_search: true,
|
||||
disableStreaming: true,
|
||||
fileTokenLimit: true,
|
||||
});
|
||||
|
||||
export const openAISchema = openAIBaseSchema
|
||||
|
@ -1145,6 +1151,7 @@ export const anthropicBaseSchema = tConversationSchema.pick({
|
|||
spec: true,
|
||||
maxContextTokens: true,
|
||||
web_search: true,
|
||||
fileTokenLimit: true,
|
||||
});
|
||||
|
||||
export const anthropicSchema = anthropicBaseSchema
|
||||
|
|
|
@ -47,6 +47,7 @@ export type FileConfig = {
|
|||
endpoints: {
|
||||
[key: string]: EndpointFileConfig;
|
||||
};
|
||||
fileTokenLimit?: number;
|
||||
serverFileSizeLimit?: number;
|
||||
avatarSizeLimit?: number;
|
||||
clientImageResize?: {
|
||||
|
@ -55,6 +56,39 @@ export type FileConfig = {
|
|||
maxHeight?: number;
|
||||
quality?: number;
|
||||
};
|
||||
ocr?: {
|
||||
supportedMimeTypes?: RegExp[];
|
||||
};
|
||||
text?: {
|
||||
supportedMimeTypes?: RegExp[];
|
||||
};
|
||||
stt?: {
|
||||
supportedMimeTypes?: RegExp[];
|
||||
};
|
||||
checkType?: (fileType: string, supportedTypes: RegExp[]) => boolean;
|
||||
};
|
||||
|
||||
export type FileConfigInput = {
|
||||
endpoints?: {
|
||||
[key: string]: EndpointFileConfig;
|
||||
};
|
||||
serverFileSizeLimit?: number;
|
||||
avatarSizeLimit?: number;
|
||||
clientImageResize?: {
|
||||
enabled?: boolean;
|
||||
maxWidth?: number;
|
||||
maxHeight?: number;
|
||||
quality?: number;
|
||||
};
|
||||
ocr?: {
|
||||
supportedMimeTypes?: string[];
|
||||
};
|
||||
text?: {
|
||||
supportedMimeTypes?: string[];
|
||||
};
|
||||
stt?: {
|
||||
supportedMimeTypes?: string[];
|
||||
};
|
||||
checkType?: (fileType: string, supportedTypes: RegExp[]) => boolean;
|
||||
};
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue