fix: update @librechat/agents dependency to version 3.0.0-rc6 in package.json and package-lock.json; refactor stream rate handling in various endpoints

This commit is contained in:
Danny Avila 2025-09-04 02:00:39 -04:00
parent 6d91fa1fe5
commit 6e0e47d5dd
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
10 changed files with 21 additions and 54 deletions

View file

@ -49,7 +49,7 @@
"@langchain/google-vertexai": "^0.2.13",
"@langchain/openai": "^0.5.18",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^3.0.0-rc4",
"@librechat/agents": "^3.0.0-rc6",
"@librechat/api": "*",
"@librechat/data-schemas": "*",
"@microsoft/microsoft-graph-client": "^3.0.7",

View file

@ -27,13 +27,13 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio
const anthropicConfig = appConfig.endpoints?.[EModelEndpoint.anthropic];
if (anthropicConfig) {
clientOptions.streamRate = anthropicConfig.streamRate;
clientOptions._lc_stream_delay = anthropicConfig.streamRate;
clientOptions.titleModel = anthropicConfig.titleModel;
}
const allConfig = appConfig.endpoints?.all;
if (allConfig) {
clientOptions.streamRate = allConfig.streamRate;
clientOptions._lc_stream_delay = allConfig.streamRate;
}
if (optionsOnly) {

View file

@ -1,8 +1,6 @@
const { HttpsProxyAgent } = require('https-proxy-agent');
const { createHandleLLMNewToken } = require('@librechat/api');
const {
AuthType,
Constants,
EModelEndpoint,
bedrockInputParser,
bedrockOutputParser,
@ -11,7 +9,6 @@ const {
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const getOptions = async ({ req, overrideModel, endpointOption }) => {
const appConfig = req.config;
const {
BEDROCK_AWS_SECRET_ACCESS_KEY,
BEDROCK_AWS_ACCESS_KEY_ID,
@ -47,10 +44,12 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => {
checkUserKeyExpiry(expiresAt, EModelEndpoint.bedrock);
}
/** @type {number} */
/*
Callback for stream rate no longer awaits and may end the stream prematurely
/** @type {number}
let streamRate = Constants.DEFAULT_STREAM_RATE;
/** @type {undefined | TBaseEndpoint} */
/** @type {undefined | TBaseEndpoint}
const bedrockConfig = appConfig.endpoints?.[EModelEndpoint.bedrock];
if (bedrockConfig && bedrockConfig.streamRate) {
@ -61,6 +60,7 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => {
if (allConfig && allConfig.streamRate) {
streamRate = allConfig.streamRate;
}
*/
/** @type {BedrockClientOptions} */
const requestOptions = {
@ -88,12 +88,6 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => {
llmConfig.endpointHost = BEDROCK_REVERSE_PROXY;
}
llmConfig.callbacks = [
{
handleLLMNewToken: createHandleLLMNewToken(streamRate),
},
];
return {
/** @type {BedrockClientOptions} */
llmConfig,

View file

@ -4,7 +4,6 @@ const {
isUserProvided,
getOpenAIConfig,
getCustomEndpointConfig,
createHandleLLMNewToken,
} = require('@librechat/api');
const {
CacheKeys,
@ -159,11 +158,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
if (!clientOptions.streamRate) {
return options;
}
options.llmConfig.callbacks = [
{
handleLLMNewToken: createHandleLLMNewToken(clientOptions.streamRate),
},
];
options.llmConfig._lc_stream_delay = clientOptions.streamRate;
return options;
}

View file

@ -4,7 +4,6 @@ jest.mock('@librechat/api', () => ({
...jest.requireActual('@librechat/api'),
resolveHeaders: jest.fn(),
getOpenAIConfig: jest.fn(),
createHandleLLMNewToken: jest.fn(),
getCustomEndpointConfig: jest.fn().mockReturnValue({
apiKey: 'test-key',
baseURL: 'https://test.com',

View file

@ -5,7 +5,6 @@ const {
isUserProvided,
getOpenAIConfig,
getAzureCredentials,
createHandleLLMNewToken,
} = require('@librechat/api');
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const OpenAIClient = require('~/app/clients/OpenAIClient');
@ -151,11 +150,7 @@ const initializeClient = async ({
if (!streamRate) {
return options;
}
options.llmConfig.callbacks = [
{
handleLLMNewToken: createHandleLLMNewToken(streamRate),
},
];
options.llmConfig._lc_stream_delay = streamRate;
return options;
}