mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-23 03:40:14 +01:00
🔄 fix: Assistants Endpoint & Minor Issues (#7274)
* 🔄 fix: Include usage in stream options for OpenAI and Azure endpoints * fix: Agents support for Azure serverless endpoints * fix: Refactor condition for assistants and azureAssistants endpoint handling * AWS Titan via Bedrock: model doesn't support system messages, Closes #6456 * fix: Add EndpointSchemaKey type to endpoint parameters in buildDefaultConvo and ensure assistantId is always defined * fix: Handle new conversation state for assistants endpoint in finalHandler * fix: Add spec and iconURL parameters to `saveAssistantMessage` to persist modelSpec fields * fix: Handle assistant unlinking even if no valid files to delete * chore: move type definitions from callbacks.js to typedefs.js * chore: Add StandardGraph typedef to typedefs.js * chore: Update parameter type for graph in ModelEndHandler to StandardGraph --------- Co-authored-by: Andres Restrepo <andres@enric.ai>
This commit is contained in:
parent
3606349a0f
commit
71105cd49c
13 changed files with 119 additions and 35 deletions
|
|
@ -14,15 +14,6 @@ const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
|||
const { saveBase64Image } = require('~/server/services/Files/process');
|
||||
const { logger, sendEvent } = require('~/config');
|
||||
|
||||
/** @typedef {import('@librechat/agents').Graph} Graph */
|
||||
/** @typedef {import('@librechat/agents').EventHandler} EventHandler */
|
||||
/** @typedef {import('@librechat/agents').ModelEndData} ModelEndData */
|
||||
/** @typedef {import('@librechat/agents').ToolEndData} ToolEndData */
|
||||
/** @typedef {import('@librechat/agents').ToolEndCallback} ToolEndCallback */
|
||||
/** @typedef {import('@librechat/agents').ChatModelStreamHandler} ChatModelStreamHandler */
|
||||
/** @typedef {import('@librechat/agents').ContentAggregatorResult['aggregateContent']} ContentAggregator */
|
||||
/** @typedef {import('@librechat/agents').GraphEvents} GraphEvents */
|
||||
|
||||
class ModelEndHandler {
|
||||
/**
|
||||
* @param {Array<UsageMetadata>} collectedUsage
|
||||
|
|
@ -38,7 +29,7 @@ class ModelEndHandler {
|
|||
* @param {string} event
|
||||
* @param {ModelEndData | undefined} data
|
||||
* @param {Record<string, unknown> | undefined} metadata
|
||||
* @param {Graph} graph
|
||||
* @param {StandardGraph} graph
|
||||
* @returns
|
||||
*/
|
||||
handle(event, data, metadata, graph) {
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ const payloadParser = ({ req, agent, endpoint }) => {
|
|||
|
||||
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
|
||||
|
||||
const noSystemModelRegex = [/\b(o1-preview|o1-mini)\b/gi];
|
||||
const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
|
||||
|
||||
// const { processMemory, memoryInstructions } = require('~/server/services/Endpoints/agents/memory');
|
||||
// const { getFormattedMemories } = require('~/models/Memory');
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ const chatV1 = async (req, res) => {
|
|||
} else if (/Files.*are invalid/.test(error.message)) {
|
||||
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||
endpoint === EModelEndpoint.azureAssistants
|
||||
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||
? " If using Azure OpenAI, files are only available in the region of the assistant's model at the time of upload."
|
||||
: ''
|
||||
}`;
|
||||
return sendResponse(req, res, messageData, errorMessage);
|
||||
|
|
@ -379,8 +379,8 @@ const chatV1 = async (req, res) => {
|
|||
body.additional_instructions ? `${body.additional_instructions}\n` : ''
|
||||
}The user has uploaded ${imageCount} image${pluralized}.
|
||||
Use the \`${ImageVisionTool.function.name}\` tool to retrieve ${
|
||||
plural ? '' : 'a '
|
||||
}detailed text description${pluralized} for ${plural ? 'each' : 'the'} image${pluralized}.`;
|
||||
plural ? '' : 'a '
|
||||
}detailed text description${pluralized} for ${plural ? 'each' : 'the'} image${pluralized}.`;
|
||||
|
||||
return files;
|
||||
};
|
||||
|
|
@ -576,6 +576,8 @@ const chatV1 = async (req, res) => {
|
|||
thread_id,
|
||||
model: assistant_id,
|
||||
endpoint,
|
||||
spec: endpointOption.spec,
|
||||
iconURL: endpointOption.iconURL,
|
||||
};
|
||||
|
||||
sendMessage(res, {
|
||||
|
|
|
|||
|
|
@ -428,6 +428,8 @@ const chatV2 = async (req, res) => {
|
|||
thread_id,
|
||||
model: assistant_id,
|
||||
endpoint,
|
||||
spec: endpointOption.spec,
|
||||
iconURL: endpointOption.iconURL,
|
||||
};
|
||||
|
||||
sendMessage(res, {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue