mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-22 02:06:12 +01:00
fix: update @librechat/agents dependency to version 3.0.0-rc6 in package.json and package-lock.json; refactor stream rate handling in various endpoints
This commit is contained in:
parent
6d91fa1fe5
commit
6e0e47d5dd
10 changed files with 21 additions and 54 deletions
|
|
@ -4,7 +4,6 @@ const {
|
|||
isUserProvided,
|
||||
getOpenAIConfig,
|
||||
getCustomEndpointConfig,
|
||||
createHandleLLMNewToken,
|
||||
} = require('@librechat/api');
|
||||
const {
|
||||
CacheKeys,
|
||||
|
|
@ -159,11 +158,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
|
|||
if (!clientOptions.streamRate) {
|
||||
return options;
|
||||
}
|
||||
options.llmConfig.callbacks = [
|
||||
{
|
||||
handleLLMNewToken: createHandleLLMNewToken(clientOptions.streamRate),
|
||||
},
|
||||
];
|
||||
options.llmConfig._lc_stream_delay = clientOptions.streamRate;
|
||||
return options;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ jest.mock('@librechat/api', () => ({
|
|||
...jest.requireActual('@librechat/api'),
|
||||
resolveHeaders: jest.fn(),
|
||||
getOpenAIConfig: jest.fn(),
|
||||
createHandleLLMNewToken: jest.fn(),
|
||||
getCustomEndpointConfig: jest.fn().mockReturnValue({
|
||||
apiKey: 'test-key',
|
||||
baseURL: 'https://test.com',
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue