diff --git a/api/app/clients/llm/createLLM.js b/api/app/clients/llm/createLLM.js index 020fba6503..de5fa18e77 100644 --- a/api/app/clients/llm/createLLM.js +++ b/api/app/clients/llm/createLLM.js @@ -2,38 +2,6 @@ const { ChatOpenAI } = require('langchain/chat_models/openai'); const { sanitizeModelName } = require('../../../utils'); const { isEnabled } = require('../../../server/utils'); -/** - * @typedef {Object} ModelOptions - * @property {string} modelName - The name of the model. - * @property {number} [temperature] - The temperature setting for the model. - * @property {number} [presence_penalty] - The presence penalty setting. - * @property {number} [frequency_penalty] - The frequency penalty setting. - * @property {number} [max_tokens] - The maximum number of tokens to generate. - */ - -/** - * @typedef {Object} ConfigOptions - * @property {string} [basePath] - The base path for the API requests. - * @property {Object} [baseOptions] - Base options for the API requests, including headers. - * @property {Object} [httpAgent] - The HTTP agent for the request. - * @property {Object} [httpsAgent] - The HTTPS agent for the request. - */ - -/** - * @typedef {Object} Callbacks - * @property {Function} [handleChatModelStart] - A callback function for handleChatModelStart - * @property {Function} [handleLLMEnd] - A callback function for handleLLMEnd - * @property {Function} [handleLLMError] - A callback function for handleLLMError - */ - -/** - * @typedef {Object} AzureOptions - * @property {string} [azureOpenAIApiKey] - The Azure OpenAI API key. - * @property {string} [azureOpenAIApiInstanceName] - The Azure OpenAI API instance name. - * @property {string} [azureOpenAIApiDeploymentName] - The Azure OpenAI API deployment name. - * @property {string} [azureOpenAIApiVersion] - The Azure OpenAI API version. - */ - /** * Creates a new instance of a language model (LLM) for chat interactions. * @@ -96,6 +64,7 @@ function createLLM({ configuration, ...azureOptions, ...modelOptions, + ...credentials, callbacks, }, configOptions, diff --git a/api/typedefs.js b/api/typedefs.js index e96d7dba29..e40a097634 100644 --- a/api/typedefs.js +++ b/api/typedefs.js @@ -337,3 +337,39 @@ * @property {number} order - The order of the endpoint. * @memberof typedefs */ + +/** + * @typedef {Object} ModelOptions + * @property {string} modelName - The name of the model. + * @property {number} [temperature] - The temperature setting for the model. + * @property {number} [presence_penalty] - The presence penalty setting. + * @property {number} [frequency_penalty] - The frequency penalty setting. + * @property {number} [max_tokens] - The maximum number of tokens to generate. + * @memberof typedefs + */ + +/** + * @typedef {Object} ConfigOptions + * @property {string} [basePath] - The base path for the API requests. + * @property {Object} [baseOptions] - Base options for the API requests, including headers. + * @property {Object} [httpAgent] - The HTTP agent for the request. + * @property {Object} [httpsAgent] - The HTTPS agent for the request. + * @memberof typedefs + */ + +/** + * @typedef {Object} Callbacks + * @property {Function} [handleChatModelStart] - A callback function for handleChatModelStart + * @property {Function} [handleLLMEnd] - A callback function for handleLLMEnd + * @property {Function} [handleLLMError] - A callback function for handleLLMError + * @memberof typedefs + */ + +/** + * @typedef {Object} AzureOptions + * @property {string} [azureOpenAIApiKey] - The Azure OpenAI API key. + * @property {string} [azureOpenAIApiInstanceName] - The Azure OpenAI API instance name. + * @property {string} [azureOpenAIApiDeploymentName] - The Azure OpenAI API deployment name. + * @property {string} [azureOpenAIApiVersion] - The Azure OpenAI API version. + * @memberof typedefs + */ diff --git a/client/src/hooks/useSSE.ts b/client/src/hooks/useSSE.ts index 64b2091484..0cc7957f5b 100644 --- a/client/src/hooks/useSSE.ts +++ b/client/src/hooks/useSSE.ts @@ -209,16 +209,23 @@ export default function useSSE(submission: TSubmission | null, index = 0) { }; const errorHandler = ({ data, submission }: { data?: TResData; submission: TSubmission }) => { - const { messages, message } = submission; + const { messages, message, initialResponse } = submission; const conversationId = message?.conversationId ?? submission?.conversationId; const parseErrorResponse = (data: TResData | Partial) => { const metadata = data['responseMessage'] ?? data; - return tMessageSchema.parse({ + const errorMessage = { + ...initialResponse, ...metadata, error: true, parentMessageId: message?.messageId, - }); + }; + + if (!errorMessage.messageId) { + errorMessage.messageId = v4(); + } + + return tMessageSchema.parse(errorMessage); }; if (!data) {