diff --git a/api/package.json b/api/package.json index fa06fc0432..bc212227d3 100644 --- a/api/package.json +++ b/api/package.json @@ -44,7 +44,7 @@ "@google/genai": "^1.19.0", "@keyv/redis": "^4.3.3", "@langchain/core": "^0.3.80", - "@librechat/agents": "^3.1.45", + "@librechat/agents": "^3.1.50", "@librechat/api": "*", "@librechat/data-schemas": "*", "@microsoft/microsoft-graph-client": "^3.0.7", diff --git a/api/server/controllers/agents/callbacks.js b/api/server/controllers/agents/callbacks.js index 163fc4ebba..0bb935795d 100644 --- a/api/server/controllers/agents/callbacks.js +++ b/api/server/controllers/agents/callbacks.js @@ -1,22 +1,13 @@ const { nanoid } = require('nanoid'); -const { Constants } = require('@librechat/agents'); const { logger } = require('@librechat/data-schemas'); +const { Constants, EnvVar, GraphEvents, ToolEndHandler } = require('@librechat/agents'); +const { Tools, StepTypes, FileContext, ErrorTypes } = require('librechat-data-provider'); const { sendEvent, GenerationJobManager, writeAttachmentEvent, createToolExecuteHandler, } = require('@librechat/api'); -const { Tools, StepTypes, FileContext, ErrorTypes } = require('librechat-data-provider'); -const { - EnvVar, - Providers, - GraphEvents, - getMessageId, - ToolEndHandler, - handleToolCalls, - ChatModelStreamHandler, -} = require('@librechat/agents'); const { processFileCitations } = require('~/server/services/Files/Citations'); const { processCodeOutput } = require('~/server/services/Files/Code/process'); const { loadAuthValues } = require('~/server/services/Tools/credentials'); @@ -57,8 +48,6 @@ class ModelEndHandler { let errorMessage; try { const agentContext = graph.getAgentContext(metadata); - const isGoogle = agentContext.provider === Providers.GOOGLE; - const streamingDisabled = !!agentContext.clientOptions?.disableStreaming; if (data?.output?.additional_kwargs?.stop_reason === 'refusal') { const info = { ...data.output.additional_kwargs }; errorMessage = JSON.stringify({ @@ -73,21 +62,6 @@ class ModelEndHandler { }); } - const toolCalls = data?.output?.tool_calls; - let hasUnprocessedToolCalls = false; - if (Array.isArray(toolCalls) && toolCalls.length > 0 && graph?.toolCallStepIds?.has) { - try { - hasUnprocessedToolCalls = toolCalls.some( - (tc) => tc?.id && !graph.toolCallStepIds.has(tc.id), - ); - } catch { - hasUnprocessedToolCalls = false; - } - } - if (isGoogle || streamingDisabled || hasUnprocessedToolCalls) { - await handleToolCalls(toolCalls, metadata, graph); - } - const usage = data?.output?.usage_metadata; if (!usage) { return this.finalize(errorMessage); @@ -98,38 +72,6 @@ class ModelEndHandler { } this.collectedUsage.push(usage); - if (!streamingDisabled) { - return this.finalize(errorMessage); - } - if (!data.output.content) { - return this.finalize(errorMessage); - } - const stepKey = graph.getStepKey(metadata); - const message_id = getMessageId(stepKey, graph) ?? ''; - if (message_id) { - await graph.dispatchRunStep(stepKey, { - type: StepTypes.MESSAGE_CREATION, - message_creation: { - message_id, - }, - }); - } - const stepId = graph.getStepIdByKey(stepKey); - const content = data.output.content; - if (typeof content === 'string') { - await graph.dispatchMessageDelta(stepId, { - content: [ - { - type: 'text', - text: content, - }, - ], - }); - } else if (content.every((c) => c.type?.startsWith('text'))) { - await graph.dispatchMessageDelta(stepId, { - content, - }); - } } catch (error) { logger.error('Error handling model end event:', error); return this.finalize(errorMessage); @@ -200,7 +142,6 @@ function getDefaultHandlers({ const handlers = { [GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(collectedUsage), [GraphEvents.TOOL_END]: new ToolEndHandler(toolEndCallback, logger), - [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(), [GraphEvents.ON_RUN_STEP]: { /** * Handle ON_RUN_STEP event. diff --git a/package-lock.json b/package-lock.json index b1af66176a..b3c2b41593 100644 --- a/package-lock.json +++ b/package-lock.json @@ -59,7 +59,7 @@ "@google/genai": "^1.19.0", "@keyv/redis": "^4.3.3", "@langchain/core": "^0.3.80", - "@librechat/agents": "^3.1.45", + "@librechat/agents": "^3.1.50", "@librechat/api": "*", "@librechat/data-schemas": "*", "@microsoft/microsoft-graph-client": "^3.0.7", @@ -11208,9 +11208,9 @@ } }, "node_modules/@librechat/agents": { - "version": "3.1.45", - "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-3.1.45.tgz", - "integrity": "sha512-izapt5PScVF52xhDH5N5uzCbjK1BMaGsUiKFNVeO20AjDWul+ipDm5zWmXnfX4veD8YHqY5rPnRU0oAecljZIg==", + "version": "3.1.50", + "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-3.1.50.tgz", + "integrity": "sha512-+gdfUJ7X3PJ20/c+8lETY68D6QpxFlCIlGUQBF4A8VKv+Po9J/TO5rWE+OmzmPByYpye7GrcxVCBLfRTvZKraw==", "license": "MIT", "dependencies": { "@anthropic-ai/sdk": "^0.73.0", @@ -42205,7 +42205,7 @@ "@google/genai": "^1.19.0", "@keyv/redis": "^4.3.3", "@langchain/core": "^0.3.80", - "@librechat/agents": "^3.1.45", + "@librechat/agents": "^3.1.50", "@librechat/data-schemas": "*", "@modelcontextprotocol/sdk": "^1.26.0", "@smithy/node-http-handler": "^4.4.5", diff --git a/packages/api/package.json b/packages/api/package.json index 5ee9e3bb70..107a660315 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -87,7 +87,7 @@ "@google/genai": "^1.19.0", "@keyv/redis": "^4.3.3", "@langchain/core": "^0.3.80", - "@librechat/agents": "^3.1.45", + "@librechat/agents": "^3.1.50", "@librechat/data-schemas": "*", "@modelcontextprotocol/sdk": "^1.26.0", "@smithy/node-http-handler": "^4.4.5",