mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 08:50:15 +01:00
* feat: Add support for agent handoffs with edges in agent forms and schemas chore: Mark `agent_ids` field as deprecated in favor of edges across various schemas and types chore: Update dependencies for @langchain/core and @librechat/agents to latest versions chore: Update peer dependency for @librechat/agents to version 3.0.0-rc2 in package.json chore: Update @librechat/agents dependency to version 3.0.0-rc3 in package.json and package-lock.json feat: first pass, multi-agent handoffs fix: update output type to ToolMessage in memory handling functions fix: improve type checking for graphConfig in createRun function refactor: remove unused content filtering logic in AgentClient chore: update @librechat/agents dependency to version 3.0.0-rc4 in package.json and package-lock.json fix: update @langchain/core peer dependency version to ^0.3.72 in package.json and package-lock.json fix: update @librechat/agents dependency to version 3.0.0-rc6 in package.json and package-lock.json; refactor stream rate handling in various endpoints feat: Agent handoff UI chore: update @librechat/agents dependency to version 3.0.0-rc8 in package.json and package-lock.json fix: improve hasInfo condition and adjust UI element classes in AgentHandoff component refactor: remove current fixed agent display from AgentHandoffs component due to redundancy feat: enhance AgentHandoffs UI with localized beta label and improved layout chore: update @librechat/agents dependency to version 3.0.0-rc10 in package.json and package-lock.json feat: add `createSequentialChainEdges` function to add back agent chaining via multi-agents feat: update `createSequentialChainEdges` call to only provide conversation context between agents feat: deprecate Agent Chain functionality and update related methods for improved clarity * chore: update @librechat/agents dependency to version 3.0.0-rc11 in package.json and package-lock.json * refactor: remove unused addCacheControl function and related imports and import from @librechat/agents * chore: remove unused i18n keys * refactor: remove unused format export from index.ts * chore: update @librechat/agents to v3.0.0-rc13 * chore: remove BEDROCK_LEGACY provider from Providers enum * chore: update @librechat/agents to version 3.0.2 in package.json
102 lines
3.2 KiB
JavaScript
102 lines
3.2 KiB
JavaScript
const initializeClient = require('./initialize');
|
|
|
|
jest.mock('@librechat/api', () => ({
|
|
...jest.requireActual('@librechat/api'),
|
|
resolveHeaders: jest.fn(),
|
|
getOpenAIConfig: jest.fn(),
|
|
getCustomEndpointConfig: jest.fn().mockReturnValue({
|
|
apiKey: 'test-key',
|
|
baseURL: 'https://test.com',
|
|
headers: { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
|
|
models: { default: ['test-model'] },
|
|
}),
|
|
}));
|
|
|
|
jest.mock('~/server/services/UserService', () => ({
|
|
getUserKeyValues: jest.fn(),
|
|
checkUserKeyExpiry: jest.fn(),
|
|
}));
|
|
|
|
// Config is now passed via req.config, not getAppConfig
|
|
|
|
jest.mock('~/server/services/ModelService', () => ({
|
|
fetchModels: jest.fn(),
|
|
}));
|
|
|
|
jest.mock('~/app/clients/OpenAIClient', () => {
|
|
return jest.fn().mockImplementation(() => ({
|
|
options: {},
|
|
}));
|
|
});
|
|
|
|
jest.mock('~/cache/getLogStores', () =>
|
|
jest.fn().mockReturnValue({
|
|
get: jest.fn(),
|
|
}),
|
|
);
|
|
|
|
describe('custom/initializeClient', () => {
|
|
const mockRequest = {
|
|
body: { endpoint: 'test-endpoint' },
|
|
user: { id: 'user-123', email: 'test@example.com', role: 'user' },
|
|
app: { locals: {} },
|
|
config: {
|
|
endpoints: {
|
|
all: {
|
|
streamRate: 25,
|
|
},
|
|
},
|
|
},
|
|
};
|
|
const mockResponse = {};
|
|
|
|
beforeEach(() => {
|
|
jest.clearAllMocks();
|
|
const { getCustomEndpointConfig, resolveHeaders, getOpenAIConfig } = require('@librechat/api');
|
|
getCustomEndpointConfig.mockReturnValue({
|
|
apiKey: 'test-key',
|
|
baseURL: 'https://test.com',
|
|
headers: { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
|
|
models: { default: ['test-model'] },
|
|
});
|
|
resolveHeaders.mockReturnValue({ 'x-user': 'user-123', 'x-email': 'test@example.com' });
|
|
getOpenAIConfig.mockReturnValue({
|
|
useLegacyContent: true,
|
|
endpointTokenConfig: null,
|
|
llmConfig: {
|
|
callbacks: [],
|
|
},
|
|
});
|
|
});
|
|
|
|
it('calls resolveHeaders with headers, user, and body for body placeholder support', async () => {
|
|
const { resolveHeaders } = require('@librechat/api');
|
|
await initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true });
|
|
expect(resolveHeaders).toHaveBeenCalledWith({
|
|
headers: { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' },
|
|
user: { id: 'user-123', email: 'test@example.com', role: 'user' },
|
|
/**
|
|
* Note: Request-based Header Resolution is deferred until right before LLM request is made
|
|
body: { endpoint: 'test-endpoint' }, // body - supports {{LIBRECHAT_BODY_*}} placeholders
|
|
*/
|
|
});
|
|
});
|
|
|
|
it('throws if endpoint config is missing', async () => {
|
|
const { getCustomEndpointConfig } = require('@librechat/api');
|
|
getCustomEndpointConfig.mockReturnValueOnce(null);
|
|
await expect(
|
|
initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true }),
|
|
).rejects.toThrow('Config not found for the test-endpoint custom endpoint.');
|
|
});
|
|
|
|
it('throws if user is missing', async () => {
|
|
await expect(
|
|
initializeClient({
|
|
req: { ...mockRequest, user: undefined },
|
|
res: mockResponse,
|
|
optionsOnly: true,
|
|
}),
|
|
).rejects.toThrow("Cannot read properties of undefined (reading 'id')");
|
|
});
|
|
});
|