💾 feat: Custom Endpoint Support for Memory LLM Config (#11214)

* feat: add support for designating custom endpoints to use with memory tool

* test: add tests for header resolution in processMemory

* chore: address comments
This commit is contained in:
Dustin Healy 2026-01-06 08:25:07 -08:00 committed by GitHub
parent 04fd231b61
commit b5aa38ff33
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 314 additions and 1 deletions

View file

@ -676,6 +676,7 @@ class AgentClient extends BaseClient {
getFormattedMemories: db.getFormattedMemories,
},
res: this.options.res,
user: createSafeUser(this.options.req.user),
});
this.processMemory = processMemory;

View file

@ -0,0 +1,298 @@
import { Types } from 'mongoose';
import type { Response } from 'express';
import { Run } from '@librechat/agents';
import type { IUser } from '@librechat/data-schemas';
import { createSafeUser } from '~/utils/env';
import { processMemory } from './memory';
jest.mock('~/stream/GenerationJobManager');
jest.mock('~/utils', () => ({
Tokenizer: {
getTokenCount: jest.fn(() => 10),
},
}));
jest.mock('@librechat/agents', () => ({
Run: {
create: jest.fn(() => ({
processStream: jest.fn(() => Promise.resolve('success')),
})),
},
Providers: {
OPENAI: 'openai',
},
GraphEvents: {
TOOL_END: 'tool_end',
},
}));
function createTestUser(overrides: Partial<IUser> = {}): IUser {
return {
_id: new Types.ObjectId(),
id: new Types.ObjectId().toString(),
username: 'testuser',
email: 'test@example.com',
name: 'Test User',
avatar: 'https://example.com/avatar.png',
provider: 'email',
role: 'user',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
emailVerified: true,
...overrides,
} as IUser;
}
describe('Memory Agent Header Resolution', () => {
let testUser: IUser;
let mockRes: Response;
let mockMemoryMethods: {
setMemory: jest.Mock;
deleteMemory: jest.Mock;
getFormattedMemories: jest.Mock;
};
beforeEach(() => {
process.env.CUSTOM_API_KEY = 'sk-custom-test-key';
process.env.TEST_CUSTOM_API_KEY = 'sk-custom-test-key';
testUser = createTestUser({
id: 'user-123',
email: 'test@example.com',
});
mockRes = {
write: jest.fn(),
end: jest.fn(),
headersSent: false,
} as unknown as Response;
mockMemoryMethods = {
setMemory: jest.fn(),
deleteMemory: jest.fn(),
getFormattedMemories: jest.fn(() =>
Promise.resolve({
withKeys: 'formatted memories',
withoutKeys: 'memories without keys',
totalTokens: 100,
}),
),
};
jest.clearAllMocks();
});
afterEach(() => {
delete process.env.CUSTOM_API_KEY;
delete process.env.TEST_CUSTOM_API_KEY;
});
it('should resolve environment variables in custom endpoint headers', async () => {
const llmConfig = {
provider: 'custom',
model: 'gpt-4o-mini',
configuration: {
defaultHeaders: {
'x-custom-api-key': '${CUSTOM_API_KEY}',
'api-key': '${TEST_CUSTOM_API_KEY}',
},
},
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'test memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: testUser,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
const runConfig = (Run.create as jest.Mock).mock.calls[0][0];
expect(runConfig.graphConfig.llmConfig.configuration.defaultHeaders).toEqual({
'x-custom-api-key': 'sk-custom-test-key',
'api-key': 'sk-custom-test-key',
});
});
it('should resolve user placeholders in custom endpoint headers', async () => {
const llmConfig = {
provider: 'custom',
model: 'gpt-4o-mini',
configuration: {
defaultHeaders: {
'X-User-Identifier': '{{LIBRECHAT_USER_EMAIL}}',
'X-User-ID': '{{LIBRECHAT_USER_ID}}',
},
},
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'test memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: testUser,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
const runConfig = (Run.create as jest.Mock).mock.calls[0][0];
expect(runConfig.graphConfig.llmConfig.configuration.defaultHeaders).toEqual({
'X-User-Identifier': 'test@example.com',
'X-User-ID': 'user-123',
});
});
it('should handle mixed environment variables and user placeholders', async () => {
const llmConfig = {
provider: 'custom',
model: 'gpt-4o-mini',
configuration: {
defaultHeaders: {
'x-custom-api-key': '${CUSTOM_API_KEY}',
'X-User-Identifier': '{{LIBRECHAT_USER_EMAIL}}',
'X-Application-Identifier': 'LibreChat - Test',
},
},
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'test memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: testUser,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
const runConfig = (Run.create as jest.Mock).mock.calls[0][0];
expect(runConfig.graphConfig.llmConfig.configuration.defaultHeaders).toEqual({
'x-custom-api-key': 'sk-custom-test-key',
'X-User-Identifier': 'test@example.com',
'X-Application-Identifier': 'LibreChat - Test',
});
});
it('should resolve env vars when user is undefined', async () => {
const llmConfig = {
provider: 'custom',
model: 'gpt-4o-mini',
configuration: {
defaultHeaders: {
'x-custom-api-key': '${CUSTOM_API_KEY}',
},
},
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'test memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: undefined,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
const runConfig = (Run.create as jest.Mock).mock.calls[0][0];
expect(runConfig.graphConfig.llmConfig.configuration.defaultHeaders).toEqual({
'x-custom-api-key': 'sk-custom-test-key',
});
});
it('should not throw when llmConfig has no configuration', async () => {
const llmConfig = {
provider: 'openai',
model: 'gpt-4o-mini',
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'test memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: testUser,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
const runConfig = (Run.create as jest.Mock).mock.calls[0][0];
expect(runConfig.graphConfig.llmConfig.configuration).toBeUndefined();
});
it('should use createSafeUser to sanitize user data', async () => {
const userWithSensitiveData = createTestUser({
id: 'user-123',
email: 'test@example.com',
password: 'sensitive-password',
refreshToken: 'sensitive-token',
} as unknown as Partial<IUser>);
const llmConfig = {
provider: 'openai',
model: 'gpt-4o-mini',
configuration: {
defaultHeaders: {
'X-User-ID': '{{LIBRECHAT_USER_ID}}',
},
},
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'test memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: userWithSensitiveData,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
// Verify createSafeUser was used - the user object passed to Run.create should not have sensitive fields
const safeUser = createSafeUser(userWithSensitiveData);
expect(safeUser).not.toHaveProperty('password');
expect(safeUser).not.toHaveProperty('refreshToken');
expect(safeUser).toHaveProperty('id');
expect(safeUser).toHaveProperty('email');
});
});

View file

@ -14,10 +14,11 @@ import type {
LLMConfig,
} from '@librechat/agents';
import type { TAttachment, MemoryArtifact } from 'librechat-data-provider';
import type { ObjectId, MemoryMethods } from '@librechat/data-schemas';
import type { ObjectId, MemoryMethods, IUser } from '@librechat/data-schemas';
import type { BaseMessage, ToolMessage } from '@langchain/core/messages';
import type { Response as ServerResponse } from 'express';
import { GenerationJobManager } from '~/stream/GenerationJobManager';
import { resolveHeaders, createSafeUser } from '~/utils/env';
import { Tokenizer } from '~/utils';
type RequiredMemoryMethods = Pick<
@ -285,6 +286,7 @@ export async function processMemory({
tokenLimit,
totalTokens = 0,
streamId = null,
user,
}: {
res: ServerResponse;
setMemory: MemoryMethods['setMemory'];
@ -300,6 +302,7 @@ export async function processMemory({
totalTokens?: number;
llmConfig?: Partial<LLMConfig>;
streamId?: string | null;
user?: IUser;
}): Promise<(TAttachment | null)[] | undefined> {
try {
const memoryTool = createMemoryTool({
@ -366,6 +369,14 @@ ${memory ?? 'No existing memories'}`;
}
}
const llmConfigWithHeaders = finalLLMConfig as OpenAIClientOptions;
if (llmConfigWithHeaders?.configuration?.defaultHeaders != null) {
llmConfigWithHeaders.configuration.defaultHeaders = resolveHeaders({
headers: llmConfigWithHeaders.configuration.defaultHeaders as Record<string, string>,
user: user ? createSafeUser(user) : undefined,
});
}
const artifactPromises: Promise<TAttachment | null>[] = [];
const memoryCallback = createMemoryCallback({ res, artifactPromises, streamId });
const customHandlers = {
@ -421,6 +432,7 @@ export async function createMemoryProcessor({
conversationId,
config = {},
streamId = null,
user,
}: {
res: ServerResponse;
messageId: string;
@ -429,6 +441,7 @@ export async function createMemoryProcessor({
memoryMethods: RequiredMemoryMethods;
config?: MemoryConfig;
streamId?: string | null;
user?: IUser;
}): Promise<[string, (messages: BaseMessage[]) => Promise<(TAttachment | null)[] | undefined>]> {
const { validKeys, instructions, llmConfig, tokenLimit } = config;
const finalInstructions = instructions || getDefaultInstructions(validKeys, tokenLimit);
@ -456,6 +469,7 @@ export async function createMemoryProcessor({
instructions: finalInstructions,
setMemory: memoryMethods.setMemory,
deleteMemory: memoryMethods.deleteMemory,
user,
});
} catch (error) {
logger.error('Memory Agent failed to process memory', error);