mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-02-19 17:08:10 +01:00
🐛 fix: Normalize output_text blocks in Responses API input conversion (#11835)
* 🐛 fix: Normalize `output_text` blocks in Responses API input conversion
Treat `output_text` content blocks the same as `input_text` when
converting Responses API input to internal message format. Previously,
assistant messages containing `output_text` blocks fell through to the
default handler, producing `{ type: 'output_text' }` without a `text`
field, which caused downstream provider adapters (e.g. Bedrock) to fail
with "Unsupported content block type: output_text".
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* refactor: Remove ChatModelStreamHandler from OpenAI and Responses controllers
Eliminated the ChatModelStreamHandler from both OpenAIChatCompletionController and createResponse functions to streamline event handling. This change simplifies the code by relying on existing handlers for message deltas and reasoning deltas, enhancing maintainability and reducing complexity in the agent's event processing logic.
* feat: Enhance input conversion in Responses API
Updated the `convertInputToMessages` function to handle additional content types, including `input_file` and `refusal` blocks, ensuring they are converted to appropriate message formats. Implemented null filtering for content arrays and default values for missing fields, improving robustness. Added comprehensive unit tests to validate these changes and ensure correct behavior across various input scenarios.
* fix: Forward upstream provider status codes in error responses
Updated error handling in OpenAIChatCompletionController and createResponse functions to forward upstream provider status codes (e.g., Anthropic 400s) instead of masking them as 500. This change improves error reporting by providing more accurate status codes and error types, enhancing the clarity of error responses for clients.
---------
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
3bf715e05e
commit
5ea59ecb2b
8 changed files with 573 additions and 65 deletions
|
|
@ -1,13 +1,8 @@
|
|||
const { nanoid } = require('nanoid');
|
||||
const { v4: uuidv4 } = require('uuid');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { Callback, ToolEndHandler, formatAgentMessages } = require('@librechat/agents');
|
||||
const { EModelEndpoint, ResourceType, PermissionBits } = require('librechat-data-provider');
|
||||
const {
|
||||
Callback,
|
||||
ToolEndHandler,
|
||||
formatAgentMessages,
|
||||
ChatModelStreamHandler,
|
||||
} = require('@librechat/agents');
|
||||
const {
|
||||
createRun,
|
||||
buildToolSet,
|
||||
|
|
@ -410,9 +405,6 @@ const createResponse = async (req, res) => {
|
|||
// Collect usage for balance tracking
|
||||
const collectedUsage = [];
|
||||
|
||||
// Built-in handler for processing raw model stream chunks
|
||||
const chatModelStreamHandler = new ChatModelStreamHandler();
|
||||
|
||||
// Artifact promises for processing tool outputs
|
||||
/** @type {Promise<import('librechat-data-provider').TAttachment | null>[]} */
|
||||
const artifactPromises = [];
|
||||
|
|
@ -443,11 +435,6 @@ const createResponse = async (req, res) => {
|
|||
|
||||
// Combine handlers
|
||||
const handlers = {
|
||||
on_chat_model_stream: {
|
||||
handle: async (event, data, metadata, graph) => {
|
||||
await chatModelStreamHandler.handle(event, data, metadata, graph);
|
||||
},
|
||||
},
|
||||
on_message_delta: responsesHandlers.on_message_delta,
|
||||
on_reasoning_delta: responsesHandlers.on_reasoning_delta,
|
||||
on_run_step: responsesHandlers.on_run_step,
|
||||
|
|
@ -570,8 +557,6 @@ const createResponse = async (req, res) => {
|
|||
} else {
|
||||
const aggregatorHandlers = createAggregatorEventHandlers(aggregator);
|
||||
|
||||
const chatModelStreamHandler = new ChatModelStreamHandler();
|
||||
|
||||
// Collect usage for balance tracking
|
||||
const collectedUsage = [];
|
||||
|
||||
|
|
@ -596,11 +581,6 @@ const createResponse = async (req, res) => {
|
|||
};
|
||||
|
||||
const handlers = {
|
||||
on_chat_model_stream: {
|
||||
handle: async (event, data, metadata, graph) => {
|
||||
await chatModelStreamHandler.handle(event, data, metadata, graph);
|
||||
},
|
||||
},
|
||||
on_message_delta: aggregatorHandlers.on_message_delta,
|
||||
on_reasoning_delta: aggregatorHandlers.on_reasoning_delta,
|
||||
on_run_step: aggregatorHandlers.on_run_step,
|
||||
|
|
@ -727,7 +707,13 @@ const createResponse = async (req, res) => {
|
|||
writeDone(res);
|
||||
res.end();
|
||||
} else {
|
||||
sendResponsesErrorResponse(res, 500, errorMessage, 'server_error');
|
||||
// Forward upstream provider status codes (e.g., Anthropic 400s) instead of masking as 500
|
||||
const statusCode =
|
||||
typeof error?.status === 'number' && error.status >= 400 && error.status < 600
|
||||
? error.status
|
||||
: 500;
|
||||
const errorType = statusCode >= 400 && statusCode < 500 ? 'invalid_request' : 'server_error';
|
||||
sendResponsesErrorResponse(res, statusCode, errorMessage, errorType);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue