mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-23 20:00:15 +01:00
Merge branch 'main' into feat/model-spec-group-icons
This commit is contained in:
commit
7844e27161
14 changed files with 393 additions and 49 deletions
|
|
@ -1,6 +1,10 @@
|
|||
const { sendEvent } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Constants } = require('librechat-data-provider');
|
||||
const {
|
||||
sendEvent,
|
||||
sanitizeFileForTransmit,
|
||||
sanitizeMessageForTransmit,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
handleAbortError,
|
||||
createAbortController,
|
||||
|
|
@ -224,13 +228,13 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
|||
conversation.title =
|
||||
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
|
||||
|
||||
// Process files if needed
|
||||
// Process files if needed (sanitize to remove large text fields before transmission)
|
||||
if (req.body.files && client.options?.attachments) {
|
||||
userMessage.files = [];
|
||||
const messageFiles = new Set(req.body.files.map((file) => file.file_id));
|
||||
for (let attachment of client.options.attachments) {
|
||||
for (const attachment of client.options.attachments) {
|
||||
if (messageFiles.has(attachment.file_id)) {
|
||||
userMessage.files.push({ ...attachment });
|
||||
userMessage.files.push(sanitizeFileForTransmit(attachment));
|
||||
}
|
||||
}
|
||||
delete userMessage.image_urls;
|
||||
|
|
@ -245,7 +249,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
|||
final: true,
|
||||
conversation,
|
||||
title: conversation.title,
|
||||
requestMessage: userMessage,
|
||||
requestMessage: sanitizeMessageForTransmit(userMessage),
|
||||
responseMessage: finalResponse,
|
||||
});
|
||||
res.end();
|
||||
|
|
@ -273,7 +277,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
|
|||
final: true,
|
||||
conversation,
|
||||
title: conversation.title,
|
||||
requestMessage: userMessage,
|
||||
requestMessage: sanitizeMessageForTransmit(userMessage),
|
||||
responseMessage: finalResponse,
|
||||
error: { message: 'Request was aborted during completion' },
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const { logger } = require('@librechat/data-schemas');
|
||||
const { countTokens, isEnabled, sendEvent } = require('@librechat/api');
|
||||
const { countTokens, isEnabled, sendEvent, sanitizeMessageForTransmit } = require('@librechat/api');
|
||||
const { isAssistantsEndpoint, ErrorTypes, Constants } = require('librechat-data-provider');
|
||||
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
||||
const clearPendingReq = require('~/cache/clearPendingReq');
|
||||
|
|
@ -290,7 +290,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
|
|||
title: conversation && !conversation.title ? null : conversation?.title || 'New Chat',
|
||||
final: true,
|
||||
conversation,
|
||||
requestMessage: userMessage,
|
||||
requestMessage: sanitizeMessageForTransmit(userMessage),
|
||||
responseMessage: responseMessage,
|
||||
};
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const crypto = require('crypto');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { parseConvo } = require('librechat-data-provider');
|
||||
const { sendEvent, handleError } = require('@librechat/api');
|
||||
const { sendEvent, handleError, sanitizeMessageForTransmit } = require('@librechat/api');
|
||||
const { saveMessage, getMessages } = require('~/models/Message');
|
||||
const { getConvo } = require('~/models/Conversation');
|
||||
|
||||
|
|
@ -71,7 +71,7 @@ const sendError = async (req, res, options, callback) => {
|
|||
|
||||
return sendEvent(res, {
|
||||
final: true,
|
||||
requestMessage: query?.[0] ? query[0] : requestMessage,
|
||||
requestMessage: sanitizeMessageForTransmit(query?.[0] ?? requestMessage),
|
||||
responseMessage: errorMessage,
|
||||
conversation: convo,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import React, { useMemo } from 'react';
|
|||
import { useRecoilValue } from 'recoil';
|
||||
import { OGDialog, OGDialogTemplate } from '@librechat/client';
|
||||
import {
|
||||
inferMimeType,
|
||||
EToolResources,
|
||||
EModelEndpoint,
|
||||
defaultAgentCapabilities,
|
||||
|
|
@ -56,18 +57,26 @@ const DragDropModal = ({ onOptionSelect, setShowModal, files, isVisible }: DragD
|
|||
const _options: FileOption[] = [];
|
||||
const currentProvider = provider || endpoint;
|
||||
|
||||
/** Helper to get inferred MIME type for a file */
|
||||
const getFileType = (file: File) => inferMimeType(file.name, file.type);
|
||||
|
||||
// Check if provider supports document upload
|
||||
if (isDocumentSupportedProvider(endpointType) || isDocumentSupportedProvider(currentProvider)) {
|
||||
const isGoogleProvider = currentProvider === EModelEndpoint.google;
|
||||
const validFileTypes = isGoogleProvider
|
||||
? files.every(
|
||||
(file) =>
|
||||
file.type?.startsWith('image/') ||
|
||||
file.type?.startsWith('video/') ||
|
||||
file.type?.startsWith('audio/') ||
|
||||
file.type === 'application/pdf',
|
||||
)
|
||||
: files.every((file) => file.type?.startsWith('image/') || file.type === 'application/pdf');
|
||||
? files.every((file) => {
|
||||
const type = getFileType(file);
|
||||
return (
|
||||
type?.startsWith('image/') ||
|
||||
type?.startsWith('video/') ||
|
||||
type?.startsWith('audio/') ||
|
||||
type === 'application/pdf'
|
||||
);
|
||||
})
|
||||
: files.every((file) => {
|
||||
const type = getFileType(file);
|
||||
return type?.startsWith('image/') || type === 'application/pdf';
|
||||
});
|
||||
|
||||
_options.push({
|
||||
label: localize('com_ui_upload_provider'),
|
||||
|
|
@ -81,7 +90,7 @@ const DragDropModal = ({ onOptionSelect, setShowModal, files, isVisible }: DragD
|
|||
label: localize('com_ui_upload_image_input'),
|
||||
value: undefined,
|
||||
icon: <ImageUpIcon className="icon-md" />,
|
||||
condition: files.every((file) => file.type?.startsWith('image/')),
|
||||
condition: files.every((file) => getFileType(file)?.startsWith('image/')),
|
||||
});
|
||||
}
|
||||
if (capabilities.fileSearchEnabled && fileSearchAllowedByAgent) {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
import { EModelEndpoint, isDocumentSupportedProvider } from 'librechat-data-provider';
|
||||
import {
|
||||
EModelEndpoint,
|
||||
isDocumentSupportedProvider,
|
||||
inferMimeType,
|
||||
} from 'librechat-data-provider';
|
||||
|
||||
describe('DragDropModal - Provider Detection', () => {
|
||||
describe('endpointType priority over currentProvider', () => {
|
||||
|
|
@ -118,4 +122,59 @@ describe('DragDropModal - Provider Detection', () => {
|
|||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('HEIC/HEIF file type inference', () => {
|
||||
it('should infer image/heic for .heic files when browser returns empty type', () => {
|
||||
const fileName = 'photo.heic';
|
||||
const browserType = '';
|
||||
|
||||
const inferredType = inferMimeType(fileName, browserType);
|
||||
expect(inferredType).toBe('image/heic');
|
||||
});
|
||||
|
||||
it('should infer image/heif for .heif files when browser returns empty type', () => {
|
||||
const fileName = 'photo.heif';
|
||||
const browserType = '';
|
||||
|
||||
const inferredType = inferMimeType(fileName, browserType);
|
||||
expect(inferredType).toBe('image/heif');
|
||||
});
|
||||
|
||||
it('should handle uppercase .HEIC extension', () => {
|
||||
const fileName = 'IMG_1234.HEIC';
|
||||
const browserType = '';
|
||||
|
||||
const inferredType = inferMimeType(fileName, browserType);
|
||||
expect(inferredType).toBe('image/heic');
|
||||
});
|
||||
|
||||
it('should preserve browser-provided type when available', () => {
|
||||
const fileName = 'photo.jpg';
|
||||
const browserType = 'image/jpeg';
|
||||
|
||||
const inferredType = inferMimeType(fileName, browserType);
|
||||
expect(inferredType).toBe('image/jpeg');
|
||||
});
|
||||
|
||||
it('should not override browser type even if extension differs', () => {
|
||||
const fileName = 'renamed.heic';
|
||||
const browserType = 'image/png';
|
||||
|
||||
const inferredType = inferMimeType(fileName, browserType);
|
||||
expect(inferredType).toBe('image/png');
|
||||
});
|
||||
|
||||
it('should correctly identify HEIC as image type for upload options', () => {
|
||||
const heicType = inferMimeType('photo.heic', '');
|
||||
expect(heicType.startsWith('image/')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return empty string for unknown extension with no browser type', () => {
|
||||
const fileName = 'file.xyz';
|
||||
const browserType = '';
|
||||
|
||||
const inferredType = inferMimeType(fileName, browserType);
|
||||
expect(inferredType).toBe('');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -9,9 +9,9 @@ import {
|
|||
import {
|
||||
megabyte,
|
||||
QueryKeys,
|
||||
inferMimeType,
|
||||
excelMimeTypes,
|
||||
EToolResources,
|
||||
codeTypeMapping,
|
||||
fileConfig as defaultFileConfig,
|
||||
} from 'librechat-data-provider';
|
||||
import type { TFile, EndpointFileConfig, FileConfig } from 'librechat-data-provider';
|
||||
|
|
@ -257,14 +257,7 @@ export const validateFiles = ({
|
|||
|
||||
for (let i = 0; i < fileList.length; i++) {
|
||||
let originalFile = fileList[i];
|
||||
let fileType = originalFile.type;
|
||||
const extension = originalFile.name.split('.').pop() ?? '';
|
||||
const knownCodeType = codeTypeMapping[extension];
|
||||
|
||||
// Infer MIME type for Known Code files when the type is empty or a mismatch
|
||||
if (knownCodeType && (!fileType || fileType !== knownCodeType)) {
|
||||
fileType = knownCodeType;
|
||||
}
|
||||
const fileType = inferMimeType(originalFile.name, originalFile.type);
|
||||
|
||||
// Check if the file type is still empty after the extension check
|
||||
if (!fileType) {
|
||||
|
|
|
|||
|
|
@ -121,9 +121,12 @@ export function getSafetySettings(
|
|||
export function getGoogleConfig(
|
||||
credentials: string | t.GoogleCredentials | undefined,
|
||||
options: t.GoogleConfigOptions = {},
|
||||
acceptRawApiKey = false,
|
||||
) {
|
||||
let creds: t.GoogleCredentials = {};
|
||||
if (typeof credentials === 'string') {
|
||||
if (acceptRawApiKey && typeof credentials === 'string') {
|
||||
creds[AuthKeys.GOOGLE_API_KEY] = credentials;
|
||||
} else if (typeof credentials === 'string') {
|
||||
try {
|
||||
creds = JSON.parse(credentials);
|
||||
} catch (err: unknown) {
|
||||
|
|
|
|||
|
|
@ -69,6 +69,26 @@ describe('getOpenAIConfig - Google Compatibility', () => {
|
|||
expect(result.tools).toEqual([]);
|
||||
});
|
||||
|
||||
it('should filter out googleSearch when web_search is only in modelOptions (not explicitly in addParams/defaultParams)', () => {
|
||||
const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' });
|
||||
const endpoint = 'Gemini (Custom)';
|
||||
const options = {
|
||||
modelOptions: {
|
||||
model: 'gemini-2.0-flash-exp',
|
||||
web_search: true,
|
||||
},
|
||||
customParams: {
|
||||
defaultParamsEndpoint: 'google',
|
||||
},
|
||||
reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai',
|
||||
};
|
||||
|
||||
const result = getOpenAIConfig(apiKey, options, endpoint);
|
||||
|
||||
/** googleSearch should be filtered out since web_search was not explicitly added via addParams or defaultParams */
|
||||
expect(result.tools).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle web_search with mixed Google and OpenAI params in addParams', () => {
|
||||
const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' });
|
||||
const endpoint = 'Gemini (Custom)';
|
||||
|
|
|
|||
|
|
@ -77,23 +77,29 @@ export function getOpenAIConfig(
|
|||
headers = Object.assign(headers ?? {}, transformed.configOptions?.defaultHeaders);
|
||||
}
|
||||
} else if (isGoogle) {
|
||||
const googleResult = getGoogleConfig(apiKey, {
|
||||
modelOptions,
|
||||
reverseProxyUrl: baseURL ?? undefined,
|
||||
authHeader: true,
|
||||
addParams,
|
||||
dropParams,
|
||||
defaultParams,
|
||||
});
|
||||
const googleResult = getGoogleConfig(
|
||||
apiKey,
|
||||
{
|
||||
modelOptions,
|
||||
reverseProxyUrl: baseURL ?? undefined,
|
||||
authHeader: true,
|
||||
addParams,
|
||||
dropParams,
|
||||
defaultParams,
|
||||
},
|
||||
true,
|
||||
);
|
||||
/** Transform handles addParams/dropParams - it knows about OpenAI params */
|
||||
const transformed = transformToOpenAIConfig({
|
||||
addParams,
|
||||
dropParams,
|
||||
defaultParams,
|
||||
tools: googleResult.tools,
|
||||
llmConfig: googleResult.llmConfig,
|
||||
fromEndpoint: EModelEndpoint.google,
|
||||
});
|
||||
llmConfig = transformed.llmConfig;
|
||||
tools = googleResult.tools;
|
||||
tools = transformed.tools;
|
||||
} else {
|
||||
const openaiResult = getOpenAILLMConfig({
|
||||
azure,
|
||||
|
|
|
|||
|
|
@ -1,28 +1,48 @@
|
|||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { GoogleAIToolType } from '@langchain/google-common';
|
||||
import type { ClientOptions } from '@librechat/agents';
|
||||
import type * as t from '~/types';
|
||||
import { knownOpenAIParams } from './llm';
|
||||
|
||||
const anthropicExcludeParams = new Set(['anthropicApiUrl']);
|
||||
const googleExcludeParams = new Set(['safetySettings', 'location', 'baseUrl', 'customHeaders']);
|
||||
const googleExcludeParams = new Set([
|
||||
'safetySettings',
|
||||
'location',
|
||||
'baseUrl',
|
||||
'customHeaders',
|
||||
'thinkingConfig',
|
||||
'thinkingBudget',
|
||||
'includeThoughts',
|
||||
]);
|
||||
|
||||
/** Google-specific tool types that have no OpenAI-compatible equivalent */
|
||||
const googleToolsToFilter = new Set(['googleSearch']);
|
||||
|
||||
export type ConfigTools = Array<Record<string, unknown>> | Array<GoogleAIToolType>;
|
||||
|
||||
/**
|
||||
* Transforms a Non-OpenAI LLM config to an OpenAI-conformant config.
|
||||
* Non-OpenAI parameters are moved to modelKwargs.
|
||||
* Also extracts configuration options that belong in configOptions.
|
||||
* Handles addParams and dropParams for parameter customization.
|
||||
* Filters out provider-specific tools that have no OpenAI equivalent.
|
||||
*/
|
||||
export function transformToOpenAIConfig({
|
||||
tools,
|
||||
addParams,
|
||||
dropParams,
|
||||
defaultParams,
|
||||
llmConfig,
|
||||
fromEndpoint,
|
||||
}: {
|
||||
tools?: ConfigTools;
|
||||
addParams?: Record<string, unknown>;
|
||||
dropParams?: string[];
|
||||
defaultParams?: Record<string, unknown>;
|
||||
llmConfig: ClientOptions;
|
||||
fromEndpoint: string;
|
||||
}): {
|
||||
tools: ConfigTools;
|
||||
llmConfig: t.OAIClientOptions;
|
||||
configOptions: Partial<t.OpenAIConfiguration>;
|
||||
} {
|
||||
|
|
@ -58,18 +78,9 @@ export function transformToOpenAIConfig({
|
|||
hasModelKwargs = true;
|
||||
continue;
|
||||
} else if (isGoogle && key === 'authOptions') {
|
||||
// Handle Google authOptions
|
||||
modelKwargs = Object.assign({}, modelKwargs, value as Record<string, unknown>);
|
||||
hasModelKwargs = true;
|
||||
continue;
|
||||
} else if (
|
||||
isGoogle &&
|
||||
(key === 'thinkingConfig' || key === 'thinkingBudget' || key === 'includeThoughts')
|
||||
) {
|
||||
// Handle Google thinking configuration
|
||||
modelKwargs = Object.assign({}, modelKwargs, { [key]: value });
|
||||
hasModelKwargs = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (knownOpenAIParams.has(key)) {
|
||||
|
|
@ -121,7 +132,34 @@ export function transformToOpenAIConfig({
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter out provider-specific tools that have no OpenAI equivalent.
|
||||
* Exception: If web_search was explicitly enabled via addParams or defaultParams,
|
||||
* preserve googleSearch tools (pass through in Google-native format).
|
||||
*/
|
||||
const webSearchExplicitlyEnabled =
|
||||
addParams?.web_search === true || defaultParams?.web_search === true;
|
||||
|
||||
const filterGoogleTool = (tool: unknown): boolean => {
|
||||
if (!isGoogle) {
|
||||
return true;
|
||||
}
|
||||
if (typeof tool !== 'object' || tool === null) {
|
||||
return false;
|
||||
}
|
||||
const toolKeys = Object.keys(tool as Record<string, unknown>);
|
||||
const isGoogleSpecificTool = toolKeys.some((key) => googleToolsToFilter.has(key));
|
||||
/** Preserve googleSearch if web_search was explicitly enabled */
|
||||
if (isGoogleSpecificTool && webSearchExplicitlyEnabled) {
|
||||
return true;
|
||||
}
|
||||
return !isGoogleSpecificTool;
|
||||
};
|
||||
|
||||
const filteredTools = Array.isArray(tools) ? tools.filter(filterGoogleTool) : [];
|
||||
|
||||
return {
|
||||
tools: filteredTools,
|
||||
llmConfig: openAIConfig as t.OAIClientOptions,
|
||||
configOptions,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -21,3 +21,4 @@ export { default as Tokenizer, countTokens } from './tokenizer';
|
|||
export * from './yaml';
|
||||
export * from './http';
|
||||
export * from './tokens';
|
||||
export * from './message';
|
||||
|
|
|
|||
122
packages/api/src/utils/message.spec.ts
Normal file
122
packages/api/src/utils/message.spec.ts
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
import { sanitizeFileForTransmit, sanitizeMessageForTransmit } from './message';
|
||||
|
||||
describe('sanitizeFileForTransmit', () => {
|
||||
it('should remove text field from file', () => {
|
||||
const file = {
|
||||
file_id: 'test-123',
|
||||
filename: 'test.txt',
|
||||
text: 'This is a very long text content that should be stripped',
|
||||
bytes: 1000,
|
||||
};
|
||||
|
||||
const result = sanitizeFileForTransmit(file);
|
||||
|
||||
expect(result.file_id).toBe('test-123');
|
||||
expect(result.filename).toBe('test.txt');
|
||||
expect(result.bytes).toBe(1000);
|
||||
expect(result).not.toHaveProperty('text');
|
||||
});
|
||||
|
||||
it('should remove _id and __v fields', () => {
|
||||
const file = {
|
||||
file_id: 'test-123',
|
||||
_id: 'mongo-id',
|
||||
__v: 0,
|
||||
filename: 'test.txt',
|
||||
};
|
||||
|
||||
const result = sanitizeFileForTransmit(file);
|
||||
|
||||
expect(result.file_id).toBe('test-123');
|
||||
expect(result).not.toHaveProperty('_id');
|
||||
expect(result).not.toHaveProperty('__v');
|
||||
});
|
||||
|
||||
it('should not modify original file object', () => {
|
||||
const file = {
|
||||
file_id: 'test-123',
|
||||
text: 'original text',
|
||||
};
|
||||
|
||||
sanitizeFileForTransmit(file);
|
||||
|
||||
expect(file.text).toBe('original text');
|
||||
});
|
||||
});
|
||||
|
||||
describe('sanitizeMessageForTransmit', () => {
|
||||
it('should remove fileContext from message', () => {
|
||||
const message = {
|
||||
messageId: 'msg-123',
|
||||
text: 'Hello world',
|
||||
fileContext: 'This is a very long context that should be stripped',
|
||||
};
|
||||
|
||||
const result = sanitizeMessageForTransmit(message);
|
||||
|
||||
expect(result.messageId).toBe('msg-123');
|
||||
expect(result.text).toBe('Hello world');
|
||||
expect(result).not.toHaveProperty('fileContext');
|
||||
});
|
||||
|
||||
it('should sanitize files array', () => {
|
||||
const message = {
|
||||
messageId: 'msg-123',
|
||||
files: [
|
||||
{ file_id: 'file-1', text: 'long text 1', filename: 'a.txt' },
|
||||
{ file_id: 'file-2', text: 'long text 2', filename: 'b.txt' },
|
||||
],
|
||||
};
|
||||
|
||||
const result = sanitizeMessageForTransmit(message);
|
||||
|
||||
expect(result.files).toHaveLength(2);
|
||||
expect(result.files?.[0].file_id).toBe('file-1');
|
||||
expect(result.files?.[0].filename).toBe('a.txt');
|
||||
expect(result.files?.[0]).not.toHaveProperty('text');
|
||||
expect(result.files?.[1]).not.toHaveProperty('text');
|
||||
});
|
||||
|
||||
it('should handle null/undefined message', () => {
|
||||
expect(sanitizeMessageForTransmit(null as unknown as object)).toBeNull();
|
||||
expect(sanitizeMessageForTransmit(undefined as unknown as object)).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle message without files', () => {
|
||||
const message = {
|
||||
messageId: 'msg-123',
|
||||
text: 'Hello',
|
||||
};
|
||||
|
||||
const result = sanitizeMessageForTransmit(message);
|
||||
|
||||
expect(result.messageId).toBe('msg-123');
|
||||
expect(result.text).toBe('Hello');
|
||||
});
|
||||
|
||||
it('should create new array reference for empty files array (immutability)', () => {
|
||||
const message = {
|
||||
messageId: 'msg-123',
|
||||
files: [] as { file_id: string }[],
|
||||
};
|
||||
|
||||
const result = sanitizeMessageForTransmit(message);
|
||||
|
||||
expect(result.files).toEqual([]);
|
||||
// New array reference ensures full immutability even for empty arrays
|
||||
expect(result.files).not.toBe(message.files);
|
||||
});
|
||||
|
||||
it('should not modify original message object', () => {
|
||||
const message = {
|
||||
messageId: 'msg-123',
|
||||
fileContext: 'original context',
|
||||
files: [{ file_id: 'file-1', text: 'original text' }],
|
||||
};
|
||||
|
||||
sanitizeMessageForTransmit(message);
|
||||
|
||||
expect(message.fileContext).toBe('original context');
|
||||
expect(message.files[0].text).toBe('original text');
|
||||
});
|
||||
});
|
||||
68
packages/api/src/utils/message.ts
Normal file
68
packages/api/src/utils/message.ts
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
import type { TFile, TMessage } from 'librechat-data-provider';
|
||||
|
||||
/** Fields to strip from files before client transmission */
|
||||
const FILE_STRIP_FIELDS = ['text', '_id', '__v'] as const;
|
||||
|
||||
/** Fields to strip from messages before client transmission */
|
||||
const MESSAGE_STRIP_FIELDS = ['fileContext'] as const;
|
||||
|
||||
/**
|
||||
* Strips large/unnecessary fields from a file object before transmitting to client.
|
||||
* Use this within existing loops when building file arrays to avoid extra iterations.
|
||||
*
|
||||
* @param file - The file object to sanitize
|
||||
* @returns A new file object without the stripped fields
|
||||
*
|
||||
* @example
|
||||
* // Use in existing file processing loop:
|
||||
* for (const attachment of client.options.attachments) {
|
||||
* if (messageFiles.has(attachment.file_id)) {
|
||||
* userMessage.files.push(sanitizeFileForTransmit(attachment));
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
export function sanitizeFileForTransmit<T extends Partial<TFile>>(
|
||||
file: T,
|
||||
): Omit<T, (typeof FILE_STRIP_FIELDS)[number]> {
|
||||
const sanitized = { ...file };
|
||||
for (const field of FILE_STRIP_FIELDS) {
|
||||
delete sanitized[field as keyof typeof sanitized];
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitizes a message object before transmitting to client.
|
||||
* Removes large fields like `fileContext` and strips `text` from embedded files.
|
||||
*
|
||||
* @param message - The message object to sanitize
|
||||
* @returns A new message object safe for client transmission
|
||||
*
|
||||
* @example
|
||||
* sendEvent(res, {
|
||||
* final: true,
|
||||
* requestMessage: sanitizeMessageForTransmit(userMessage),
|
||||
* responseMessage: response,
|
||||
* });
|
||||
*/
|
||||
export function sanitizeMessageForTransmit<T extends Partial<TMessage>>(
|
||||
message: T,
|
||||
): Omit<T, (typeof MESSAGE_STRIP_FIELDS)[number]> {
|
||||
if (!message) {
|
||||
return message as Omit<T, (typeof MESSAGE_STRIP_FIELDS)[number]>;
|
||||
}
|
||||
|
||||
const sanitized = { ...message };
|
||||
|
||||
// Remove message-level fields
|
||||
for (const field of MESSAGE_STRIP_FIELDS) {
|
||||
delete sanitized[field as keyof typeof sanitized];
|
||||
}
|
||||
|
||||
// Always create a new array when files exist to maintain full immutability
|
||||
if (Array.isArray(sanitized.files)) {
|
||||
sanitized.files = sanitized.files.map((file) => sanitizeFileForTransmit(file));
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
|
@ -200,6 +200,27 @@ export const codeTypeMapping: { [key: string]: string } = {
|
|||
tsv: 'text/tab-separated-values',
|
||||
};
|
||||
|
||||
/** Maps image extensions to MIME types for formats browsers may not recognize */
|
||||
export const imageTypeMapping: { [key: string]: string } = {
|
||||
heic: 'image/heic',
|
||||
heif: 'image/heif',
|
||||
};
|
||||
|
||||
/**
|
||||
* Infers the MIME type from a file's extension when the browser doesn't recognize it
|
||||
* @param fileName - The name of the file including extension
|
||||
* @param currentType - The current MIME type reported by the browser (may be empty)
|
||||
* @returns The inferred MIME type if browser didn't provide one, otherwise the original type
|
||||
*/
|
||||
export function inferMimeType(fileName: string, currentType: string): string {
|
||||
if (currentType) {
|
||||
return currentType;
|
||||
}
|
||||
|
||||
const extension = fileName.split('.').pop()?.toLowerCase() ?? '';
|
||||
return codeTypeMapping[extension] || imageTypeMapping[extension] || currentType;
|
||||
}
|
||||
|
||||
export const retrievalMimeTypes = [
|
||||
/^(text\/(x-c|x-c\+\+|x-h|html|x-java|markdown|x-php|x-python|x-script\.python|x-ruby|x-tex|plain|vtt|xml))$/,
|
||||
/^(application\/(json|pdf|vnd\.openxmlformats-officedocument\.(wordprocessingml\.document|presentationml\.presentation)))$/,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue