mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-18 09:20:15 +01:00
🔄 fix: Assistants Endpoint & Minor Issues (#7274)
* 🔄 fix: Include usage in stream options for OpenAI and Azure endpoints * fix: Agents support for Azure serverless endpoints * fix: Refactor condition for assistants and azureAssistants endpoint handling * AWS Titan via Bedrock: model doesn't support system messages, Closes #6456 * fix: Add EndpointSchemaKey type to endpoint parameters in buildDefaultConvo and ensure assistantId is always defined * fix: Handle new conversation state for assistants endpoint in finalHandler * fix: Add spec and iconURL parameters to `saveAssistantMessage` to persist modelSpec fields * fix: Handle assistant unlinking even if no valid files to delete * chore: move type definitions from callbacks.js to typedefs.js * chore: Add StandardGraph typedef to typedefs.js * chore: Update parameter type for graph in ModelEndHandler to StandardGraph --------- Co-authored-by: Andres Restrepo <andres@enric.ai>
This commit is contained in:
parent
3606349a0f
commit
71105cd49c
13 changed files with 119 additions and 35 deletions
|
|
@ -1285,6 +1285,14 @@ ${convo}
|
||||||
modelOptions.messages[0].role = 'user';
|
modelOptions.messages[0].role = 'user';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
(this.options.endpoint === EModelEndpoint.openAI ||
|
||||||
|
this.options.endpoint === EModelEndpoint.azureOpenAI) &&
|
||||||
|
modelOptions.stream === true
|
||||||
|
) {
|
||||||
|
modelOptions.stream_options = { include_usage: true };
|
||||||
|
}
|
||||||
|
|
||||||
if (this.options.addParams && typeof this.options.addParams === 'object') {
|
if (this.options.addParams && typeof this.options.addParams === 'object') {
|
||||||
const addParams = { ...this.options.addParams };
|
const addParams = { ...this.options.addParams };
|
||||||
modelOptions = {
|
modelOptions = {
|
||||||
|
|
@ -1387,12 +1395,6 @@ ${convo}
|
||||||
...modelOptions,
|
...modelOptions,
|
||||||
stream: true,
|
stream: true,
|
||||||
};
|
};
|
||||||
if (
|
|
||||||
this.options.endpoint === EModelEndpoint.openAI ||
|
|
||||||
this.options.endpoint === EModelEndpoint.azureOpenAI
|
|
||||||
) {
|
|
||||||
params.stream_options = { include_usage: true };
|
|
||||||
}
|
|
||||||
const stream = await openai.beta.chat.completions
|
const stream = await openai.beta.chat.completions
|
||||||
.stream(params)
|
.stream(params)
|
||||||
.on('abort', () => {
|
.on('abort', () => {
|
||||||
|
|
|
||||||
|
|
@ -14,15 +14,6 @@ const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||||
const { saveBase64Image } = require('~/server/services/Files/process');
|
const { saveBase64Image } = require('~/server/services/Files/process');
|
||||||
const { logger, sendEvent } = require('~/config');
|
const { logger, sendEvent } = require('~/config');
|
||||||
|
|
||||||
/** @typedef {import('@librechat/agents').Graph} Graph */
|
|
||||||
/** @typedef {import('@librechat/agents').EventHandler} EventHandler */
|
|
||||||
/** @typedef {import('@librechat/agents').ModelEndData} ModelEndData */
|
|
||||||
/** @typedef {import('@librechat/agents').ToolEndData} ToolEndData */
|
|
||||||
/** @typedef {import('@librechat/agents').ToolEndCallback} ToolEndCallback */
|
|
||||||
/** @typedef {import('@librechat/agents').ChatModelStreamHandler} ChatModelStreamHandler */
|
|
||||||
/** @typedef {import('@librechat/agents').ContentAggregatorResult['aggregateContent']} ContentAggregator */
|
|
||||||
/** @typedef {import('@librechat/agents').GraphEvents} GraphEvents */
|
|
||||||
|
|
||||||
class ModelEndHandler {
|
class ModelEndHandler {
|
||||||
/**
|
/**
|
||||||
* @param {Array<UsageMetadata>} collectedUsage
|
* @param {Array<UsageMetadata>} collectedUsage
|
||||||
|
|
@ -38,7 +29,7 @@ class ModelEndHandler {
|
||||||
* @param {string} event
|
* @param {string} event
|
||||||
* @param {ModelEndData | undefined} data
|
* @param {ModelEndData | undefined} data
|
||||||
* @param {Record<string, unknown> | undefined} metadata
|
* @param {Record<string, unknown> | undefined} metadata
|
||||||
* @param {Graph} graph
|
* @param {StandardGraph} graph
|
||||||
* @returns
|
* @returns
|
||||||
*/
|
*/
|
||||||
handle(event, data, metadata, graph) {
|
handle(event, data, metadata, graph) {
|
||||||
|
|
|
||||||
|
|
@ -58,7 +58,7 @@ const payloadParser = ({ req, agent, endpoint }) => {
|
||||||
|
|
||||||
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
|
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
|
||||||
|
|
||||||
const noSystemModelRegex = [/\b(o1-preview|o1-mini)\b/gi];
|
const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
|
||||||
|
|
||||||
// const { processMemory, memoryInstructions } = require('~/server/services/Endpoints/agents/memory');
|
// const { processMemory, memoryInstructions } = require('~/server/services/Endpoints/agents/memory');
|
||||||
// const { getFormattedMemories } = require('~/models/Memory');
|
// const { getFormattedMemories } = require('~/models/Memory');
|
||||||
|
|
|
||||||
|
|
@ -119,7 +119,7 @@ const chatV1 = async (req, res) => {
|
||||||
} else if (/Files.*are invalid/.test(error.message)) {
|
} else if (/Files.*are invalid/.test(error.message)) {
|
||||||
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||||
endpoint === EModelEndpoint.azureAssistants
|
endpoint === EModelEndpoint.azureAssistants
|
||||||
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
? " If using Azure OpenAI, files are only available in the region of the assistant's model at the time of upload."
|
||||||
: ''
|
: ''
|
||||||
}`;
|
}`;
|
||||||
return sendResponse(req, res, messageData, errorMessage);
|
return sendResponse(req, res, messageData, errorMessage);
|
||||||
|
|
@ -380,7 +380,7 @@ const chatV1 = async (req, res) => {
|
||||||
}The user has uploaded ${imageCount} image${pluralized}.
|
}The user has uploaded ${imageCount} image${pluralized}.
|
||||||
Use the \`${ImageVisionTool.function.name}\` tool to retrieve ${
|
Use the \`${ImageVisionTool.function.name}\` tool to retrieve ${
|
||||||
plural ? '' : 'a '
|
plural ? '' : 'a '
|
||||||
}detailed text description${pluralized} for ${plural ? 'each' : 'the'} image${pluralized}.`;
|
}detailed text description${pluralized} for ${plural ? 'each' : 'the'} image${pluralized}.`;
|
||||||
|
|
||||||
return files;
|
return files;
|
||||||
};
|
};
|
||||||
|
|
@ -576,6 +576,8 @@ const chatV1 = async (req, res) => {
|
||||||
thread_id,
|
thread_id,
|
||||||
model: assistant_id,
|
model: assistant_id,
|
||||||
endpoint,
|
endpoint,
|
||||||
|
spec: endpointOption.spec,
|
||||||
|
iconURL: endpointOption.iconURL,
|
||||||
};
|
};
|
||||||
|
|
||||||
sendMessage(res, {
|
sendMessage(res, {
|
||||||
|
|
|
||||||
|
|
@ -428,6 +428,8 @@ const chatV2 = async (req, res) => {
|
||||||
thread_id,
|
thread_id,
|
||||||
model: assistant_id,
|
model: assistant_id,
|
||||||
endpoint,
|
endpoint,
|
||||||
|
spec: endpointOption.spec,
|
||||||
|
iconURL: endpointOption.iconURL,
|
||||||
};
|
};
|
||||||
|
|
||||||
sendMessage(res, {
|
sendMessage(res, {
|
||||||
|
|
|
||||||
|
|
@ -21,6 +21,7 @@ const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
|
||||||
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
const { loadAuthValues } = require('~/server/services/Tools/credentials');
|
||||||
const { refreshS3FileUrls } = require('~/server/services/Files/S3/crud');
|
const { refreshS3FileUrls } = require('~/server/services/Files/S3/crud');
|
||||||
const { getFiles, batchUpdateFiles } = require('~/models/File');
|
const { getFiles, batchUpdateFiles } = require('~/models/File');
|
||||||
|
const { getAssistant } = require('~/models/Assistant');
|
||||||
const { getAgent } = require('~/models/Agent');
|
const { getAgent } = require('~/models/Agent');
|
||||||
const { getLogStores } = require('~/cache');
|
const { getLogStores } = require('~/cache');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
@ -94,7 +95,7 @@ router.delete('/', async (req, res) => {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Handle entity unlinking even if no valid files to delete */
|
/* Handle agent unlinking even if no valid files to delete */
|
||||||
if (req.body.agent_id && req.body.tool_resource && dbFiles.length === 0) {
|
if (req.body.agent_id && req.body.tool_resource && dbFiles.length === 0) {
|
||||||
const agent = await getAgent({
|
const agent = await getAgent({
|
||||||
id: req.body.agent_id,
|
id: req.body.agent_id,
|
||||||
|
|
@ -104,7 +105,21 @@ router.delete('/', async (req, res) => {
|
||||||
const agentFiles = files.filter((f) => toolResourceFiles.includes(f.file_id));
|
const agentFiles = files.filter((f) => toolResourceFiles.includes(f.file_id));
|
||||||
|
|
||||||
await processDeleteRequest({ req, files: agentFiles });
|
await processDeleteRequest({ req, files: agentFiles });
|
||||||
res.status(200).json({ message: 'File associations removed successfully' });
|
res.status(200).json({ message: 'File associations removed successfully from agent' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Handle assistant unlinking even if no valid files to delete */
|
||||||
|
if (req.body.assistant_id && req.body.tool_resource && dbFiles.length === 0) {
|
||||||
|
const assistant = await getAssistant({
|
||||||
|
id: req.body.assistant_id,
|
||||||
|
});
|
||||||
|
|
||||||
|
const toolResourceFiles = assistant.tool_resources?.[req.body.tool_resource]?.file_ids ?? [];
|
||||||
|
const assistantFiles = files.filter((f) => toolResourceFiles.includes(f.file_id));
|
||||||
|
|
||||||
|
await processDeleteRequest({ req, files: assistantFiles });
|
||||||
|
res.status(200).json({ message: 'File associations removed successfully from assistant' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -233,6 +233,13 @@ const initializeAgentOptions = async ({
|
||||||
endpointOption: _endpointOption,
|
endpointOption: _endpointOption,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (
|
||||||
|
agent.endpoint === EModelEndpoint.azureOpenAI &&
|
||||||
|
options.llmConfig?.azureOpenAIApiInstanceName == null
|
||||||
|
) {
|
||||||
|
agent.provider = Providers.OPENAI;
|
||||||
|
}
|
||||||
|
|
||||||
if (options.provider != null) {
|
if (options.provider != null) {
|
||||||
agent.provider = options.provider;
|
agent.provider = options.provider;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@ const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
|
||||||
const { getAssistant } = require('~/models/Assistant');
|
const { getAssistant } = require('~/models/Assistant');
|
||||||
|
|
||||||
const buildOptions = async (endpoint, parsedBody) => {
|
const buildOptions = async (endpoint, parsedBody) => {
|
||||||
|
|
||||||
const { promptPrefix, assistant_id, iconURL, greeting, spec, artifacts, ...modelOptions } =
|
const { promptPrefix, assistant_id, iconURL, greeting, spec, artifacts, ...modelOptions } =
|
||||||
parsedBody;
|
parsedBody;
|
||||||
const endpointOption = removeNullishValues({
|
const endpointOption = removeNullishValues({
|
||||||
|
|
|
||||||
|
|
@ -132,6 +132,8 @@ async function saveUserMessage(req, params) {
|
||||||
* @param {string} params.endpoint - The conversation endpoint
|
* @param {string} params.endpoint - The conversation endpoint
|
||||||
* @param {string} params.parentMessageId - The latest user message that triggered this response.
|
* @param {string} params.parentMessageId - The latest user message that triggered this response.
|
||||||
* @param {string} [params.instructions] - Optional: from preset for `instructions` field.
|
* @param {string} [params.instructions] - Optional: from preset for `instructions` field.
|
||||||
|
* @param {string} [params.spec] - Optional: Model spec identifier.
|
||||||
|
* @param {string} [params.iconURL]
|
||||||
* Overrides the instructions of the assistant.
|
* Overrides the instructions of the assistant.
|
||||||
* @param {string} [params.promptPrefix] - Optional: from preset for `additional_instructions` field.
|
* @param {string} [params.promptPrefix] - Optional: from preset for `additional_instructions` field.
|
||||||
* @return {Promise<Run>} A promise that resolves to the created run object.
|
* @return {Promise<Run>} A promise that resolves to the created run object.
|
||||||
|
|
@ -154,6 +156,8 @@ async function saveAssistantMessage(req, params) {
|
||||||
text: params.text,
|
text: params.text,
|
||||||
unfinished: false,
|
unfinished: false,
|
||||||
// tokenCount,
|
// tokenCount,
|
||||||
|
iconURL: params.iconURL,
|
||||||
|
spec: params.spec,
|
||||||
});
|
});
|
||||||
|
|
||||||
await saveConvo(
|
await saveConvo(
|
||||||
|
|
@ -165,6 +169,8 @@ async function saveAssistantMessage(req, params) {
|
||||||
instructions: params.instructions,
|
instructions: params.instructions,
|
||||||
assistant_id: params.assistant_id,
|
assistant_id: params.assistant_id,
|
||||||
model: params.model,
|
model: params.model,
|
||||||
|
iconURL: params.iconURL,
|
||||||
|
spec: params.spec,
|
||||||
},
|
},
|
||||||
{ context: 'api/server/services/Threads/manage.js #saveAssistantMessage' },
|
{ context: 'api/server/services/Threads/manage.js #saveAssistantMessage' },
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -43,6 +43,60 @@
|
||||||
* @memberof typedefs
|
* @memberof typedefs
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports Graph
|
||||||
|
* @typedef {import('@librechat/agents').Graph} Graph
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports StandardGraph
|
||||||
|
* @typedef {import('@librechat/agents').StandardGraph} StandardGraph
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports EventHandler
|
||||||
|
* @typedef {import('@librechat/agents').EventHandler} EventHandler
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports ModelEndData
|
||||||
|
* @typedef {import('@librechat/agents').ModelEndData} ModelEndData
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports ToolEndData
|
||||||
|
* @typedef {import('@librechat/agents').ToolEndData} ToolEndData
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports ToolEndCallback
|
||||||
|
* @typedef {import('@librechat/agents').ToolEndCallback} ToolEndCallback
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports ChatModelStreamHandler
|
||||||
|
* @typedef {import('@librechat/agents').ChatModelStreamHandler} ChatModelStreamHandler
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports ContentAggregator
|
||||||
|
* @typedef {import('@librechat/agents').ContentAggregatorResult['aggregateContent']} ContentAggregator
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports GraphEvents
|
||||||
|
* @typedef {import('@librechat/agents').GraphEvents} GraphEvents
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @exports AgentRun
|
* @exports AgentRun
|
||||||
* @typedef {import('@librechat/agents').Run} AgentRun
|
* @typedef {import('@librechat/agents').Run} AgentRun
|
||||||
|
|
@ -97,12 +151,6 @@
|
||||||
* @memberof typedefs
|
* @memberof typedefs
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
|
||||||
* @exports ToolEndData
|
|
||||||
* @typedef {import('@librechat/agents').ToolEndData} ToolEndData
|
|
||||||
* @memberof typedefs
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @exports BaseMessage
|
* @exports BaseMessage
|
||||||
* @typedef {import('@langchain/core/messages').BaseMessage} BaseMessage
|
* @typedef {import('@langchain/core/messages').BaseMessage} BaseMessage
|
||||||
|
|
|
||||||
|
|
@ -55,8 +55,12 @@ export default function useSideNavLinks({
|
||||||
const links: NavLink[] = [];
|
const links: NavLink[] = [];
|
||||||
if (
|
if (
|
||||||
isAssistantsEndpoint(endpoint) &&
|
isAssistantsEndpoint(endpoint) &&
|
||||||
|
((endpoint === EModelEndpoint.assistants &&
|
||||||
endpointsConfig?.[EModelEndpoint.assistants] &&
|
endpointsConfig?.[EModelEndpoint.assistants] &&
|
||||||
endpointsConfig[EModelEndpoint.assistants].disableBuilder !== true &&
|
endpointsConfig[EModelEndpoint.assistants].disableBuilder !== true) ||
|
||||||
|
(endpoint === EModelEndpoint.azureAssistants &&
|
||||||
|
endpointsConfig?.[EModelEndpoint.azureAssistants] &&
|
||||||
|
endpointsConfig[EModelEndpoint.azureAssistants].disableBuilder !== true)) &&
|
||||||
keyProvided
|
keyProvided
|
||||||
) {
|
) {
|
||||||
links.push({
|
links.push({
|
||||||
|
|
|
||||||
|
|
@ -467,6 +467,14 @@ export default function useEventHandlers({
|
||||||
[QueryKeys.messages, conversation.conversationId],
|
[QueryKeys.messages, conversation.conversationId],
|
||||||
finalMessages,
|
finalMessages,
|
||||||
);
|
);
|
||||||
|
} else if (
|
||||||
|
isAssistantsEndpoint(submissionConvo.endpoint) &&
|
||||||
|
(!submissionConvo.conversationId || submissionConvo.conversationId === Constants.NEW_CONVO)
|
||||||
|
) {
|
||||||
|
queryClient.setQueryData<TMessage[]>(
|
||||||
|
[QueryKeys.messages, conversation.conversationId],
|
||||||
|
[...currentMessages],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const isNewConvo = conversation.conversationId !== submissionConvo.conversationId;
|
const isNewConvo = conversation.conversationId !== submissionConvo.conversationId;
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ import {
|
||||||
isAssistantsEndpoint,
|
isAssistantsEndpoint,
|
||||||
isAgentsEndpoint,
|
isAgentsEndpoint,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
import type { TConversation } from 'librechat-data-provider';
|
import type { TConversation, EndpointSchemaKey } from 'librechat-data-provider';
|
||||||
import { getLocalStorageItems } from './localStorage';
|
import { getLocalStorageItems } from './localStorage';
|
||||||
|
|
||||||
const buildDefaultConvo = ({
|
const buildDefaultConvo = ({
|
||||||
|
|
@ -51,8 +51,8 @@ const buildDefaultConvo = ({
|
||||||
}
|
}
|
||||||
|
|
||||||
const convo = parseConvo({
|
const convo = parseConvo({
|
||||||
endpoint,
|
endpoint: endpoint as EndpointSchemaKey,
|
||||||
endpointType,
|
endpointType: endpointType as EndpointSchemaKey,
|
||||||
conversation: lastConversationSetup,
|
conversation: lastConversationSetup,
|
||||||
possibleValues: {
|
possibleValues: {
|
||||||
models: possibleModels,
|
models: possibleModels,
|
||||||
|
|
@ -68,7 +68,7 @@ const buildDefaultConvo = ({
|
||||||
};
|
};
|
||||||
|
|
||||||
// Ensures assistant_id is always defined
|
// Ensures assistant_id is always defined
|
||||||
const assistantId = convo?.assistant_id ?? '';
|
const assistantId = convo?.assistant_id ?? conversation?.assistant_id ?? '';
|
||||||
const defaultAssistantId = lastConversationSetup?.assistant_id ?? '';
|
const defaultAssistantId = lastConversationSetup?.assistant_id ?? '';
|
||||||
if (isAssistantsEndpoint(endpoint) && !defaultAssistantId && assistantId) {
|
if (isAssistantsEndpoint(endpoint) && !defaultAssistantId && assistantId) {
|
||||||
defaultConvo.assistant_id = assistantId;
|
defaultConvo.assistant_id = assistantId;
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue