🧩 fix: Missing Memory Agent Assignment for Matching IDs (#11514)
Some checks failed
Docker Dev Branch Images Build / build (Dockerfile, lc-dev, node) (push) Waiting to run
Docker Dev Branch Images Build / build (Dockerfile.multi, lc-dev-api, api-build) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile, librechat-dev, node) (push) Has been cancelled
Docker Dev Images Build / build (Dockerfile.multi, librechat-dev-api, api-build) (push) Has been cancelled
Sync Locize Translations & Create Translation PR / Sync Translation Keys with Locize (push) Has been cancelled
Sync Locize Translations & Create Translation PR / Create Translation PR on Version Published (push) Has been cancelled

* fix: `useMemory` in AgentClient for PrelimAgent Assignment

* Updated the useMemory method in AgentClient to handle prelimAgent assignment based on memory configuration.
* Added logic to return early if prelimAgent is undefined, improving flow control.
* Introduced comprehensive unit tests to validate behavior for various memory configurations, including scenarios for matching and differing agent IDs, as well as handling of ephemeral agents.
* Mocked necessary dependencies in tests to ensure isolation and reliability of the new functionality.

* fix: Update temperature handling for Bedrock and Anthropic providers in memory management

* fix: Replace hardcoded provider strings with constants in memory agent tests

* fix: Replace hardcoded provider string with constant in allowedProviders for AgentClient

* fix: memory agent tests to use actual Providers and GraphEvents constants
This commit is contained in:
Danny Avila 2026-01-25 12:08:52 -05:00 committed by GitHub
parent 6a49861861
commit 0b4deac953
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 378 additions and 20 deletions

View file

@ -1,5 +1,5 @@
import { Types } from 'mongoose';
import { Run } from '@librechat/agents';
import { Run, Providers } from '@librechat/agents';
import type { IUser } from '@librechat/data-schemas';
import type { Response } from 'express';
import { processMemory } from './memory';
@ -37,20 +37,18 @@ jest.mock('~/utils', () => ({
const { createSafeUser } = jest.requireMock('~/utils');
jest.mock('@librechat/agents', () => ({
Run: {
create: jest.fn(() => ({
processStream: jest.fn(() => Promise.resolve('success')),
})),
},
Providers: {
OPENAI: 'openai',
BEDROCK: 'bedrock',
},
GraphEvents: {
TOOL_END: 'tool_end',
},
}));
jest.mock('@librechat/agents', () => {
const actual = jest.requireActual('@librechat/agents');
return {
Run: {
create: jest.fn(() => ({
processStream: jest.fn(() => Promise.resolve('success')),
})),
},
Providers: actual.Providers,
GraphEvents: actual.GraphEvents,
};
});
function createTestUser(overrides: Partial<IUser> = {}): IUser {
return {
@ -255,7 +253,7 @@ describe('Memory Agent Header Resolution', () => {
it('should not throw when llmConfig has no configuration', async () => {
const llmConfig = {
provider: 'openai',
provider: Providers.OPENAI,
model: 'gpt-4o-mini',
};
@ -288,7 +286,7 @@ describe('Memory Agent Header Resolution', () => {
} as unknown as Partial<IUser>);
const llmConfig = {
provider: 'openai',
provider: Providers.OPENAI,
model: 'gpt-4o-mini',
configuration: {
defaultHeaders: {
@ -324,7 +322,7 @@ describe('Memory Agent Header Resolution', () => {
it('should include instructions in user message for Bedrock provider', async () => {
const llmConfig = {
provider: 'bedrock',
provider: Providers.BEDROCK,
model: 'us.anthropic.claude-haiku-4-5-20251001-v1:0',
};
@ -356,7 +354,7 @@ describe('Memory Agent Header Resolution', () => {
it('should pass instructions to graphConfig for non-Bedrock providers', async () => {
const llmConfig = {
provider: 'openai',
provider: Providers.OPENAI,
model: 'gpt-4o-mini',
};
@ -382,4 +380,161 @@ describe('Memory Agent Header Resolution', () => {
expect(runConfig.graphConfig.instructions).toBe('test instructions');
expect(runConfig.graphConfig.additional_instructions).toBeDefined();
});
it('should set temperature to 1 for Bedrock with thinking enabled', async () => {
const llmConfig = {
provider: Providers.BEDROCK,
model: 'us.anthropic.claude-sonnet-4-20250514-v1:0',
temperature: 0.7,
additionalModelRequestFields: {
thinking: {
type: 'enabled',
budget_tokens: 5000,
},
},
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'existing memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: testUser,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
const runConfig = (Run.create as jest.Mock).mock.calls[0][0];
expect(runConfig.graphConfig.llmConfig.temperature).toBe(1);
});
it('should not modify temperature for Bedrock without thinking enabled', async () => {
const llmConfig = {
provider: Providers.BEDROCK,
model: 'us.anthropic.claude-haiku-4-5-20251001-v1:0',
temperature: 0.7,
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'existing memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: testUser,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
const runConfig = (Run.create as jest.Mock).mock.calls[0][0];
expect(runConfig.graphConfig.llmConfig.temperature).toBe(0.7);
});
it('should remove temperature for Anthropic with thinking enabled', async () => {
const llmConfig = {
provider: Providers.ANTHROPIC,
model: 'claude-sonnet-4-20250514',
temperature: 0.7,
thinking: {
type: 'enabled',
budget_tokens: 5000,
},
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'existing memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: testUser,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
const runConfig = (Run.create as jest.Mock).mock.calls[0][0];
expect(runConfig.graphConfig.llmConfig.temperature).toBeUndefined();
expect(runConfig.graphConfig.llmConfig.thinking).toEqual({
type: 'enabled',
budget_tokens: 5000,
});
});
it('should not modify temperature for Anthropic without thinking enabled', async () => {
const llmConfig = {
provider: Providers.ANTHROPIC,
model: 'claude-sonnet-4-20250514',
temperature: 0.7,
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'existing memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: testUser,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
const runConfig = (Run.create as jest.Mock).mock.calls[0][0];
expect(runConfig.graphConfig.llmConfig.temperature).toBe(0.7);
});
it('should not modify temperature for Anthropic with thinking type not enabled', async () => {
const llmConfig = {
provider: Providers.ANTHROPIC,
model: 'claude-sonnet-4-20250514',
temperature: 0.7,
thinking: {
type: 'disabled',
},
};
await processMemory({
res: mockRes,
userId: 'user-123',
setMemory: mockMemoryMethods.setMemory,
deleteMemory: mockMemoryMethods.deleteMemory,
messages: [],
memory: 'existing memory',
messageId: 'msg-123',
conversationId: 'conv-123',
validKeys: ['preferences'],
instructions: 'test instructions',
llmConfig,
user: testUser,
});
expect(Run.create as jest.Mock).toHaveBeenCalled();
const runConfig = (Run.create as jest.Mock).mock.calls[0][0];
expect(runConfig.graphConfig.llmConfig.temperature).toBe(0.7);
});
});

View file

@ -369,7 +369,6 @@ ${memory ?? 'No existing memories'}`;
}
}
// Handle Bedrock with thinking enabled - temperature must be 1
const bedrockConfig = finalLLMConfig as {
additionalModelRequestFields?: { thinking?: unknown };
temperature?: number;
@ -382,6 +381,18 @@ ${memory ?? 'No existing memories'}`;
(finalLLMConfig as unknown as Record<string, unknown>).temperature = 1;
}
const anthropicConfig = finalLLMConfig as {
thinking?: { type?: string };
temperature?: number;
};
if (
llmConfig?.provider === Providers.ANTHROPIC &&
anthropicConfig.thinking?.type === 'enabled' &&
anthropicConfig.temperature != null
) {
delete (finalLLMConfig as Record<string, unknown>).temperature;
}
const llmConfigWithHeaders = finalLLMConfig as OpenAIClientOptions;
if (llmConfigWithHeaders?.configuration?.defaultHeaders != null) {
llmConfigWithHeaders.configuration.defaultHeaders = resolveHeaders({