mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 08:20:14 +01:00
🛂 feat: Payload limits and Validation for User-created Memories (#8974)
This commit is contained in:
parent
21e00168b1
commit
edf33bedcb
9 changed files with 71 additions and 76 deletions
|
|
@ -13,6 +13,8 @@ const { getRoleByName } = require('~/models/Role');
|
|||
|
||||
const router = express.Router();
|
||||
|
||||
const memoryPayloadLimit = express.json({ limit: '100kb' });
|
||||
|
||||
const checkMemoryRead = generateCheckAccess({
|
||||
permissionType: PermissionTypes.MEMORIES,
|
||||
permissions: [Permissions.USE, Permissions.READ],
|
||||
|
|
@ -60,6 +62,7 @@ router.get('/', checkMemoryRead, async (req, res) => {
|
|||
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const tokenLimit = memoryConfig?.tokenLimit;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
let usagePercentage = null;
|
||||
if (tokenLimit && tokenLimit > 0) {
|
||||
|
|
@ -70,6 +73,7 @@ router.get('/', checkMemoryRead, async (req, res) => {
|
|||
memories: sortedMemories,
|
||||
totalTokens,
|
||||
tokenLimit: tokenLimit || null,
|
||||
charLimit,
|
||||
usagePercentage,
|
||||
});
|
||||
} catch (error) {
|
||||
|
|
@ -83,7 +87,7 @@ router.get('/', checkMemoryRead, async (req, res) => {
|
|||
* Body: { key: string, value: string }
|
||||
* Returns 201 and { created: true, memory: <createdDoc> } when successful.
|
||||
*/
|
||||
router.post('/', checkMemoryCreate, async (req, res) => {
|
||||
router.post('/', memoryPayloadLimit, checkMemoryCreate, async (req, res) => {
|
||||
const { key, value } = req.body;
|
||||
|
||||
if (typeof key !== 'string' || key.trim() === '') {
|
||||
|
|
@ -94,13 +98,25 @@ router.post('/', checkMemoryCreate, async (req, res) => {
|
|||
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
|
||||
}
|
||||
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
if (key.length > 1000) {
|
||||
return res.status(400).json({
|
||||
error: `Key exceeds maximum length of 1000 characters. Current length: ${key.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
if (value.length > charLimit) {
|
||||
return res.status(400).json({
|
||||
error: `Value exceeds maximum length of ${charLimit} characters. Current length: ${value.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
|
||||
|
||||
const memories = await getAllUserMemories(req.user.id);
|
||||
|
||||
// Check token limit
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const tokenLimit = memoryConfig?.tokenLimit;
|
||||
|
||||
if (tokenLimit) {
|
||||
|
|
@ -175,7 +191,7 @@ router.patch('/preferences', checkMemoryOptOut, async (req, res) => {
|
|||
* Body: { key?: string, value: string }
|
||||
* Returns 200 and { updated: true, memory: <updatedDoc> } when successful.
|
||||
*/
|
||||
router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
||||
router.patch('/:key', memoryPayloadLimit, checkMemoryUpdate, async (req, res) => {
|
||||
const { key: urlKey } = req.params;
|
||||
const { key: bodyKey, value } = req.body || {};
|
||||
|
||||
|
|
@ -183,9 +199,23 @@ router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
|||
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
|
||||
}
|
||||
|
||||
// Use the key from the body if provided, otherwise use the key from the URL
|
||||
const newKey = bodyKey || urlKey;
|
||||
|
||||
const memoryConfig = req.app.locals?.memory;
|
||||
const charLimit = memoryConfig?.charLimit || 10000;
|
||||
|
||||
if (newKey.length > 1000) {
|
||||
return res.status(400).json({
|
||||
error: `Key exceeds maximum length of 1000 characters. Current length: ${newKey.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
if (value.length > charLimit) {
|
||||
return res.status(400).json({
|
||||
error: `Value exceeds maximum length of ${charLimit} characters. Current length: ${value.length} characters.`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
|
||||
|
||||
|
|
@ -196,7 +226,6 @@ router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
|||
return res.status(404).json({ error: 'Memory not found.' });
|
||||
}
|
||||
|
||||
// If the key is changing, we need to handle it specially
|
||||
if (newKey !== urlKey) {
|
||||
const keyExists = memories.find((m) => m.key === newKey);
|
||||
if (keyExists) {
|
||||
|
|
@ -219,7 +248,6 @@ router.patch('/:key', checkMemoryUpdate, async (req, res) => {
|
|||
return res.status(500).json({ error: 'Failed to delete old memory.' });
|
||||
}
|
||||
} else {
|
||||
// Key is not changing, just update the value
|
||||
const result = await setMemory({
|
||||
userId: req.user.id,
|
||||
key: newKey,
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
const { agentsConfigSetup, loadWebSearchConfig } = require('@librechat/api');
|
||||
const { loadMemoryConfig, agentsConfigSetup, loadWebSearchConfig } = require('@librechat/api');
|
||||
const {
|
||||
FileSources,
|
||||
loadOCRConfig,
|
||||
EModelEndpoint,
|
||||
loadMemoryConfig,
|
||||
getConfigDefaults,
|
||||
} = require('librechat-data-provider');
|
||||
const {
|
||||
|
|
|
|||
|
|
@ -2,11 +2,11 @@ const {
|
|||
SystemRoles,
|
||||
Permissions,
|
||||
PermissionTypes,
|
||||
isMemoryEnabled,
|
||||
removeNullishValues,
|
||||
} = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isMemoryEnabled } = require('@librechat/api');
|
||||
const { updateAccessPermissions } = require('~/models/Role');
|
||||
const { logger } = require('~/config');
|
||||
|
||||
/**
|
||||
* Loads the default interface object.
|
||||
|
|
|
|||
|
|
@ -15,6 +15,8 @@ export * from './crypto';
|
|||
export * from './flow/manager';
|
||||
/* Middleware */
|
||||
export * from './middleware';
|
||||
/* Memory */
|
||||
export * from './memory';
|
||||
/* Agents */
|
||||
export * from './agents';
|
||||
/* Endpoints */
|
||||
|
|
|
|||
28
packages/api/src/memory/config.ts
Normal file
28
packages/api/src/memory/config.ts
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import { memorySchema } from 'librechat-data-provider';
|
||||
import type { TCustomConfig, TMemoryConfig } from 'librechat-data-provider';
|
||||
|
||||
const hasValidAgent = (agent: TMemoryConfig['agent']) =>
|
||||
!!agent &&
|
||||
(('id' in agent && !!agent.id) ||
|
||||
('provider' in agent && 'model' in agent && !!agent.provider && !!agent.model));
|
||||
|
||||
const isDisabled = (config?: TMemoryConfig | TCustomConfig['memory']) =>
|
||||
!config || config.disabled === true;
|
||||
|
||||
export function loadMemoryConfig(config: TCustomConfig['memory']): TMemoryConfig | undefined {
|
||||
if (!config) return undefined;
|
||||
if (isDisabled(config)) return config as TMemoryConfig;
|
||||
|
||||
if (!hasValidAgent(config.agent)) {
|
||||
return { ...config, disabled: true } as TMemoryConfig;
|
||||
}
|
||||
|
||||
const charLimit = memorySchema.shape.charLimit.safeParse(config.charLimit).data ?? 10000;
|
||||
|
||||
return { ...config, charLimit };
|
||||
}
|
||||
|
||||
export function isMemoryEnabled(config: TMemoryConfig | undefined): boolean {
|
||||
if (isDisabled(config)) return false;
|
||||
return hasValidAgent(config!.agent);
|
||||
}
|
||||
1
packages/api/src/memory/index.ts
Normal file
1
packages/api/src/memory/index.ts
Normal file
|
|
@ -0,0 +1 @@
|
|||
export * from './config';
|
||||
|
|
@ -727,6 +727,7 @@ export const memorySchema = z.object({
|
|||
disabled: z.boolean().optional(),
|
||||
validKeys: z.array(z.string()).optional(),
|
||||
tokenLimit: z.number().optional(),
|
||||
charLimit: z.number().optional().default(10000),
|
||||
personalize: z.boolean().default(true),
|
||||
messageWindowSize: z.number().optional().default(5),
|
||||
agent: z
|
||||
|
|
|
|||
|
|
@ -13,8 +13,6 @@ export * from './generate';
|
|||
export * from './models';
|
||||
/* mcp */
|
||||
export * from './mcp';
|
||||
/* memory */
|
||||
export * from './memory';
|
||||
/* RBAC */
|
||||
export * from './permissions';
|
||||
export * from './roles';
|
||||
|
|
|
|||
|
|
@ -1,62 +0,0 @@
|
|||
import type { TCustomConfig, TMemoryConfig } from './config';
|
||||
|
||||
/**
|
||||
* Loads the memory configuration and validates it
|
||||
* @param config - The memory configuration from librechat.yaml
|
||||
* @returns The validated memory configuration
|
||||
*/
|
||||
export function loadMemoryConfig(config: TCustomConfig['memory']): TMemoryConfig | undefined {
|
||||
if (!config) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// If disabled is explicitly true, return the config as-is
|
||||
if (config.disabled === true) {
|
||||
return config;
|
||||
}
|
||||
|
||||
// Check if the agent configuration is valid
|
||||
const hasValidAgent =
|
||||
config.agent &&
|
||||
(('id' in config.agent && !!config.agent.id) ||
|
||||
('provider' in config.agent &&
|
||||
'model' in config.agent &&
|
||||
!!config.agent.provider &&
|
||||
!!config.agent.model));
|
||||
|
||||
// If agent config is invalid, treat as disabled
|
||||
if (!hasValidAgent) {
|
||||
return {
|
||||
...config,
|
||||
disabled: true,
|
||||
};
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if memory feature is enabled based on the configuration
|
||||
* @param config - The memory configuration
|
||||
* @returns True if memory is enabled, false otherwise
|
||||
*/
|
||||
export function isMemoryEnabled(config: TMemoryConfig | undefined): boolean {
|
||||
if (!config) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (config.disabled === true) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if agent configuration is valid
|
||||
const hasValidAgent =
|
||||
config.agent &&
|
||||
(('id' in config.agent && !!config.agent.id) ||
|
||||
('provider' in config.agent &&
|
||||
'model' in config.agent &&
|
||||
!!config.agent.provider &&
|
||||
!!config.agent.model));
|
||||
|
||||
return !!hasValidAgent;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue