mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-20 10:20:15 +01:00
🤖 Assistants V2 Support: Part 1
- Separated Azure Assistants to its own endpoint - File Search / Vector Store integration is incomplete, but can toggle and use storage from playground - Code Interpreter resource files can be added but not deleted - GPT-4o is supported - Many improvements to the Assistants Endpoint overall data-provider v2 changes copy existing route as v1 chore: rename new endpoint to reduce comparison operations and add new azure filesource api: add azureAssistants part 1 force use of version for assistants/assistantsAzure chore: switch name back to azureAssistants refactor type version: string | number Ensure assistants endpoints have version set fix: isArchived type issue in ConversationListParams refactor: update assistants mutations/queries with endpoint/version definitions, update Assistants Map structure chore: FilePreview component ExtendedFile type assertion feat: isAssistantsEndpoint helper chore: remove unused useGenerations chore(buildTree): type issue chore(Advanced): type issue (unused component, maybe in future) first pass for multi-assistant endpoint rewrite fix(listAssistants): pass params correctly feat: list separate assistants by endpoint fix(useTextarea): access assistantMap correctly fix: assistant endpoint switching, resetting ID fix: broken during rewrite, selecting assistant mention fix: set/invalidate assistants endpoint query data correctly feat: Fix issue with assistant ID not being reset correctly getOpenAIClient helper function feat: add toast for assistant deletion fix: assistants delete right after create issue for azure fix: assistant patching refactor: actions to use getOpenAIClient refactor: consolidate logic into helpers file fix: issue where conversation data was not initially available v1 chat support refactor(spendTokens): only early return if completionTokens isNaN fix(OpenAIClient): ensure spendTokens has all necessary params refactor: route/controller logic fix(assistants/initializeClient): use defaultHeaders field fix: sanitize default operation id chore: bump openai package first pass v2 action service feat: retroactive domain parsing for actions added via v1 feat: delete db records of actions/assistants on openai assistant deletion chore: remove vision tools from v2 assistants feat: v2 upload and delete assistant vision images WIP first pass, thread attachments fix: show assistant vision files (save local/firebase copy) v2 image continue fix: annotations fix: refine annotations show analyze as error if is no longer submitting before progress reaches 1 and show file_search as retrieval tool fix: abort run, undefined endpoint issue refactor: consolidate capabilities logic and anticipate versioning frontend version 2 changes fix: query selection and filter add endpoint to unknown filepath add file ids to resource, deleting in progress enable/disable file search remove version log
This commit is contained in:
parent
f0e8cca5df
commit
2bdbff5141
118 changed files with 3358 additions and 1039 deletions
|
|
@ -756,6 +756,8 @@ class OpenAIClient extends BaseClient {
|
||||||
* In case of failure, it will return the default title, "New Chat".
|
* In case of failure, it will return the default title, "New Chat".
|
||||||
*/
|
*/
|
||||||
async titleConvo({ text, conversationId, responseText = '' }) {
|
async titleConvo({ text, conversationId, responseText = '' }) {
|
||||||
|
this.conversationId = conversationId;
|
||||||
|
|
||||||
if (this.options.attachments) {
|
if (this.options.attachments) {
|
||||||
delete this.options.attachments;
|
delete this.options.attachments;
|
||||||
}
|
}
|
||||||
|
|
@ -838,13 +840,17 @@ ${convo}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let useChatCompletion = true;
|
let useChatCompletion = true;
|
||||||
|
|
||||||
if (this.options.reverseProxyUrl === CohereConstants.API_URL) {
|
if (this.options.reverseProxyUrl === CohereConstants.API_URL) {
|
||||||
useChatCompletion = false;
|
useChatCompletion = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
title = (
|
title = (
|
||||||
await this.sendPayload(instructionsPayload, { modelOptions, useChatCompletion })
|
await this.sendPayload(instructionsPayload, { modelOptions, useChatCompletion })
|
||||||
).replaceAll('"', '');
|
).replaceAll('"', '');
|
||||||
|
|
||||||
const completionTokens = this.getTokenCount(title);
|
const completionTokens = this.getTokenCount(title);
|
||||||
|
|
||||||
this.recordTokenUsage({ promptTokens, completionTokens, context: 'title' });
|
this.recordTokenUsage({ promptTokens, completionTokens, context: 'title' });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.error(
|
logger.error(
|
||||||
|
|
@ -868,6 +874,7 @@ ${convo}
|
||||||
context: 'title',
|
context: 'title',
|
||||||
tokenBuffer: 150,
|
tokenBuffer: 150,
|
||||||
});
|
});
|
||||||
|
|
||||||
title = await runTitleChain({ llm, text, convo, signal: this.abortController.signal });
|
title = await runTitleChain({ llm, text, convo, signal: this.abortController.signal });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (e?.message?.toLowerCase()?.includes('abort')) {
|
if (e?.message?.toLowerCase()?.includes('abort')) {
|
||||||
|
|
@ -1005,9 +1012,9 @@ ${convo}
|
||||||
await spendTokens(
|
await spendTokens(
|
||||||
{
|
{
|
||||||
context,
|
context,
|
||||||
user: this.user,
|
|
||||||
model: this.modelOptions.model,
|
model: this.modelOptions.model,
|
||||||
conversationId: this.conversationId,
|
conversationId: this.conversationId,
|
||||||
|
user: this.user ?? this.options.req.user.id,
|
||||||
endpointTokenConfig: this.options.endpointTokenConfig,
|
endpointTokenConfig: this.options.endpointTokenConfig,
|
||||||
},
|
},
|
||||||
{ promptTokens, completionTokens },
|
{ promptTokens, completionTokens },
|
||||||
|
|
|
||||||
|
|
@ -62,8 +62,24 @@ const deleteAction = async (searchParams, session = null) => {
|
||||||
return await Action.findOneAndDelete(searchParams, options).lean();
|
return await Action.findOneAndDelete(searchParams, options).lean();
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
/**
|
||||||
updateAction,
|
* Deletes actions by params, within a transaction session if provided.
|
||||||
getActions,
|
*
|
||||||
deleteAction,
|
* @param {Object} searchParams - The search parameters to find the actions to delete.
|
||||||
|
* @param {string} searchParams.action_id - The ID of the action(s) to delete.
|
||||||
|
* @param {string} searchParams.user - The user ID of the action's author.
|
||||||
|
* @param {mongoose.ClientSession} [session] - The transaction session to use (optional).
|
||||||
|
* @returns {Promise<Number>} A promise that resolves to the number of deleted action documents.
|
||||||
|
*/
|
||||||
|
const deleteActions = async (searchParams, session = null) => {
|
||||||
|
const options = session ? { session } : {};
|
||||||
|
const result = await Action.deleteMany(searchParams, options);
|
||||||
|
return result.deletedCount;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getActions,
|
||||||
|
updateAction,
|
||||||
|
deleteAction,
|
||||||
|
deleteActions,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -39,8 +39,21 @@ const getAssistants = async (searchParams) => {
|
||||||
return await Assistant.find(searchParams).lean();
|
return await Assistant.find(searchParams).lean();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes an assistant based on the provided ID.
|
||||||
|
*
|
||||||
|
* @param {Object} searchParams - The search parameters to find the assistant to delete.
|
||||||
|
* @param {string} searchParams.assistant_id - The ID of the assistant to delete.
|
||||||
|
* @param {string} searchParams.user - The user ID of the assistant's author.
|
||||||
|
* @returns {Promise<void>} Resolves when the assistant has been successfully deleted.
|
||||||
|
*/
|
||||||
|
const deleteAssistant = async (searchParams) => {
|
||||||
|
return await Assistant.findOneAndDelete(searchParams);
|
||||||
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
updateAssistant,
|
updateAssistant,
|
||||||
|
deleteAssistant,
|
||||||
getAssistants,
|
getAssistants,
|
||||||
getAssistant,
|
getAssistant,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -40,7 +40,7 @@ const spendTokens = async (txData, tokenUsage) => {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!completionTokens) {
|
if (!completionTokens && isNaN(completionTokens)) {
|
||||||
logger.debug('[spendTokens] !completionTokens', { prompt, completion });
|
logger.debug('[spendTokens] !completionTokens', { prompt, completion });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -76,7 +76,7 @@
|
||||||
"nodejs-gpt": "^1.37.4",
|
"nodejs-gpt": "^1.37.4",
|
||||||
"nodemailer": "^6.9.4",
|
"nodemailer": "^6.9.4",
|
||||||
"ollama": "^0.5.0",
|
"ollama": "^0.5.0",
|
||||||
"openai": "4.36.0",
|
"openai": "^4.47.1",
|
||||||
"openai-chat-tokens": "^0.2.8",
|
"openai-chat-tokens": "^0.2.8",
|
||||||
"openid-client": "^5.4.2",
|
"openid-client": "^5.4.2",
|
||||||
"passport": "^0.6.0",
|
"passport": "^0.6.0",
|
||||||
|
|
|
||||||
|
|
@ -16,10 +16,28 @@ async function endpointController(req, res) {
|
||||||
/** @type {TEndpointsConfig} */
|
/** @type {TEndpointsConfig} */
|
||||||
const mergedConfig = { ...defaultEndpointsConfig, ...customConfigEndpoints };
|
const mergedConfig = { ...defaultEndpointsConfig, ...customConfigEndpoints };
|
||||||
if (mergedConfig[EModelEndpoint.assistants] && req.app.locals?.[EModelEndpoint.assistants]) {
|
if (mergedConfig[EModelEndpoint.assistants] && req.app.locals?.[EModelEndpoint.assistants]) {
|
||||||
const { disableBuilder, retrievalModels, capabilities, ..._rest } =
|
const { disableBuilder, retrievalModels, capabilities, version, ..._rest } =
|
||||||
req.app.locals[EModelEndpoint.assistants];
|
req.app.locals[EModelEndpoint.assistants];
|
||||||
|
|
||||||
mergedConfig[EModelEndpoint.assistants] = {
|
mergedConfig[EModelEndpoint.assistants] = {
|
||||||
...mergedConfig[EModelEndpoint.assistants],
|
...mergedConfig[EModelEndpoint.assistants],
|
||||||
|
version,
|
||||||
|
retrievalModels,
|
||||||
|
disableBuilder,
|
||||||
|
capabilities,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
mergedConfig[EModelEndpoint.azureAssistants] &&
|
||||||
|
req.app.locals?.[EModelEndpoint.azureAssistants]
|
||||||
|
) {
|
||||||
|
const { disableBuilder, retrievalModels, capabilities, version, ..._rest } =
|
||||||
|
req.app.locals[EModelEndpoint.azureAssistants];
|
||||||
|
|
||||||
|
mergedConfig[EModelEndpoint.azureAssistants] = {
|
||||||
|
...mergedConfig[EModelEndpoint.azureAssistants],
|
||||||
|
version,
|
||||||
retrievalModels,
|
retrievalModels,
|
||||||
disableBuilder,
|
disableBuilder,
|
||||||
capabilities,
|
capabilities,
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
const { v4 } = require('uuid');
|
const { v4 } = require('uuid');
|
||||||
const express = require('express');
|
|
||||||
const {
|
const {
|
||||||
Constants,
|
Constants,
|
||||||
RunStatus,
|
RunStatus,
|
||||||
|
|
@ -21,27 +20,18 @@ const {
|
||||||
} = require('~/server/services/Threads');
|
} = require('~/server/services/Threads');
|
||||||
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
||||||
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
||||||
const { addTitle, initializeClient } = require('~/server/services/Endpoints/assistants');
|
|
||||||
const { formatMessage, createVisionPrompt } = require('~/app/clients/prompts');
|
const { formatMessage, createVisionPrompt } = require('~/app/clients/prompts');
|
||||||
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||||
|
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||||
const { getTransactions } = require('~/models/Transaction');
|
const { getTransactions } = require('~/models/Transaction');
|
||||||
const checkBalance = require('~/models/checkBalance');
|
const checkBalance = require('~/models/checkBalance');
|
||||||
const { getConvo } = require('~/models/Conversation');
|
const { getConvo } = require('~/models/Conversation');
|
||||||
const getLogStores = require('~/cache/getLogStores');
|
const getLogStores = require('~/cache/getLogStores');
|
||||||
const { getModelMaxTokens } = require('~/utils');
|
const { getModelMaxTokens } = require('~/utils');
|
||||||
|
const { getOpenAIClient } = require('./helpers');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
const router = express.Router();
|
const { handleAbortError } = require('~/server/middleware');
|
||||||
const {
|
|
||||||
setHeaders,
|
|
||||||
handleAbort,
|
|
||||||
validateModel,
|
|
||||||
handleAbortError,
|
|
||||||
// validateEndpoint,
|
|
||||||
buildEndpointOption,
|
|
||||||
} = require('~/server/middleware');
|
|
||||||
|
|
||||||
router.post('/abort', handleAbort());
|
|
||||||
|
|
||||||
const ten_minutes = 1000 * 60 * 10;
|
const ten_minutes = 1000 * 60 * 10;
|
||||||
|
|
||||||
|
|
@ -49,16 +39,17 @@ const ten_minutes = 1000 * 60 * 10;
|
||||||
* @route POST /
|
* @route POST /
|
||||||
* @desc Chat with an assistant
|
* @desc Chat with an assistant
|
||||||
* @access Public
|
* @access Public
|
||||||
* @param {express.Request} req - The request object, containing the request data.
|
* @param {Express.Request} req - The request object, containing the request data.
|
||||||
* @param {express.Response} res - The response object, used to send back a response.
|
* @param {Express.Response} res - The response object, used to send back a response.
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
*/
|
*/
|
||||||
router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res) => {
|
const chatV2 = async (req, res) => {
|
||||||
logger.debug('[/assistants/chat/] req.body', req.body);
|
logger.debug('[/assistants/chat/] req.body', req.body);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
text,
|
text,
|
||||||
model,
|
model,
|
||||||
|
endpoint,
|
||||||
files = [],
|
files = [],
|
||||||
promptPrefix,
|
promptPrefix,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
|
|
@ -70,7 +61,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||||
} = req.body;
|
} = req.body;
|
||||||
|
|
||||||
/** @type {Partial<TAssistantEndpoint>} */
|
/** @type {Partial<TAssistantEndpoint>} */
|
||||||
const assistantsConfig = req.app.locals?.[EModelEndpoint.assistants];
|
const assistantsConfig = req.app.locals?.[endpoint];
|
||||||
|
|
||||||
if (assistantsConfig) {
|
if (assistantsConfig) {
|
||||||
const { supportedIds, excludedIds } = assistantsConfig;
|
const { supportedIds, excludedIds } = assistantsConfig;
|
||||||
|
|
@ -138,7 +129,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||||
user: req.user.id,
|
user: req.user.id,
|
||||||
shouldSaveMessage: false,
|
shouldSaveMessage: false,
|
||||||
messageId: responseMessageId,
|
messageId: responseMessageId,
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (error.message === 'Run cancelled') {
|
if (error.message === 'Run cancelled') {
|
||||||
|
|
@ -149,7 +140,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||||
logger.debug('[/assistants/chat/] Request aborted on close');
|
logger.debug('[/assistants/chat/] Request aborted on close');
|
||||||
} else if (/Files.*are invalid/.test(error.message)) {
|
} else if (/Files.*are invalid/.test(error.message)) {
|
||||||
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||||
req.app.locals?.[EModelEndpoint.azureOpenAI].assistants
|
endpoint === EModelEndpoint.azureAssistants
|
||||||
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||||
: ''
|
: ''
|
||||||
}`;
|
}`;
|
||||||
|
|
@ -205,6 +196,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||||
const runMessages = await checkMessageGaps({
|
const runMessages = await checkMessageGaps({
|
||||||
openai,
|
openai,
|
||||||
run_id,
|
run_id,
|
||||||
|
endpoint,
|
||||||
thread_id,
|
thread_id,
|
||||||
conversationId,
|
conversationId,
|
||||||
latestMessageId: responseMessageId,
|
latestMessageId: responseMessageId,
|
||||||
|
|
@ -311,8 +303,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
/** @type {{ openai: OpenAIClient }} */
|
const { openai: _openai, client } = await getOpenAIClient({
|
||||||
const { openai: _openai, client } = await initializeClient({
|
|
||||||
req,
|
req,
|
||||||
res,
|
res,
|
||||||
endpointOption: req.body.endpointOption,
|
endpointOption: req.body.endpointOption,
|
||||||
|
|
@ -467,6 +458,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||||
assistant_id,
|
assistant_id,
|
||||||
thread_id,
|
thread_id,
|
||||||
model: assistant_id,
|
model: assistant_id,
|
||||||
|
endpoint,
|
||||||
};
|
};
|
||||||
|
|
||||||
previousMessages.push(requestMessage);
|
previousMessages.push(requestMessage);
|
||||||
|
|
@ -476,7 +468,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||||
|
|
||||||
conversation = {
|
conversation = {
|
||||||
conversationId,
|
conversationId,
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint,
|
||||||
promptPrefix: promptPrefix,
|
promptPrefix: promptPrefix,
|
||||||
instructions: instructions,
|
instructions: instructions,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
|
|
@ -513,7 +505,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||||
let response;
|
let response;
|
||||||
|
|
||||||
const processRun = async (retry = false) => {
|
const processRun = async (retry = false) => {
|
||||||
if (req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
|
if (endpoint === EModelEndpoint.azureAssistants) {
|
||||||
body.model = openai._options.model;
|
body.model = openai._options.model;
|
||||||
openai.attachedFileIds = attachedFileIds;
|
openai.attachedFileIds = attachedFileIds;
|
||||||
openai.visionPromise = visionPromise;
|
openai.visionPromise = visionPromise;
|
||||||
|
|
@ -603,6 +595,7 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||||
assistant_id,
|
assistant_id,
|
||||||
thread_id,
|
thread_id,
|
||||||
model: assistant_id,
|
model: assistant_id,
|
||||||
|
endpoint,
|
||||||
};
|
};
|
||||||
|
|
||||||
sendMessage(res, {
|
sendMessage(res, {
|
||||||
|
|
@ -655,6 +648,6 @@ router.post('/', validateModel, buildEndpointOption, setHeaders, async (req, res
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
await handleError(error);
|
await handleError(error);
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = chatV2;
|
||||||
618
api/server/controllers/assistants/chatV2.js
Normal file
618
api/server/controllers/assistants/chatV2.js
Normal file
|
|
@ -0,0 +1,618 @@
|
||||||
|
const { v4 } = require('uuid');
|
||||||
|
const {
|
||||||
|
Constants,
|
||||||
|
RunStatus,
|
||||||
|
CacheKeys,
|
||||||
|
ContentTypes,
|
||||||
|
ToolCallTypes,
|
||||||
|
EModelEndpoint,
|
||||||
|
ViolationTypes,
|
||||||
|
retrievalMimeTypes,
|
||||||
|
AssistantStreamEvents,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
|
const {
|
||||||
|
initThread,
|
||||||
|
recordUsage,
|
||||||
|
saveUserMessage,
|
||||||
|
checkMessageGaps,
|
||||||
|
addThreadMetadata,
|
||||||
|
saveAssistantMessage,
|
||||||
|
} = require('~/server/services/Threads');
|
||||||
|
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
|
||||||
|
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
|
||||||
|
const { createRun, StreamRunManager } = require('~/server/services/Runs');
|
||||||
|
const { addTitle } = require('~/server/services/Endpoints/assistants');
|
||||||
|
const { getTransactions } = require('~/models/Transaction');
|
||||||
|
const checkBalance = require('~/models/checkBalance');
|
||||||
|
const { getConvo } = require('~/models/Conversation');
|
||||||
|
const getLogStores = require('~/cache/getLogStores');
|
||||||
|
const { getModelMaxTokens } = require('~/utils');
|
||||||
|
const { getOpenAIClient } = require('./helpers');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
const { handleAbortError } = require('~/server/middleware');
|
||||||
|
|
||||||
|
const ten_minutes = 1000 * 60 * 10;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @route POST /
|
||||||
|
* @desc Chat with an assistant
|
||||||
|
* @access Public
|
||||||
|
* @param {Express.Request} req - The request object, containing the request data.
|
||||||
|
* @param {Express.Response} res - The response object, used to send back a response.
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
const chatV2 = async (req, res) => {
|
||||||
|
logger.debug('[/assistants/chat/] req.body', req.body);
|
||||||
|
|
||||||
|
/** @type {{ files: MongoFile[]}} */
|
||||||
|
const {
|
||||||
|
text,
|
||||||
|
model,
|
||||||
|
endpoint,
|
||||||
|
files = [],
|
||||||
|
promptPrefix,
|
||||||
|
assistant_id,
|
||||||
|
instructions,
|
||||||
|
thread_id: _thread_id,
|
||||||
|
messageId: _messageId,
|
||||||
|
conversationId: convoId,
|
||||||
|
parentMessageId: _parentId = Constants.NO_PARENT,
|
||||||
|
} = req.body;
|
||||||
|
|
||||||
|
/** @type {Partial<TAssistantEndpoint>} */
|
||||||
|
const assistantsConfig = req.app.locals?.[endpoint];
|
||||||
|
|
||||||
|
if (assistantsConfig) {
|
||||||
|
const { supportedIds, excludedIds } = assistantsConfig;
|
||||||
|
const error = { message: 'Assistant not supported' };
|
||||||
|
if (supportedIds?.length && !supportedIds.includes(assistant_id)) {
|
||||||
|
return await handleAbortError(res, req, error, {
|
||||||
|
sender: 'System',
|
||||||
|
conversationId: convoId,
|
||||||
|
messageId: v4(),
|
||||||
|
parentMessageId: _messageId,
|
||||||
|
error,
|
||||||
|
});
|
||||||
|
} else if (excludedIds?.length && excludedIds.includes(assistant_id)) {
|
||||||
|
return await handleAbortError(res, req, error, {
|
||||||
|
sender: 'System',
|
||||||
|
conversationId: convoId,
|
||||||
|
messageId: v4(),
|
||||||
|
parentMessageId: _messageId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @type {OpenAIClient} */
|
||||||
|
let openai;
|
||||||
|
/** @type {string|undefined} - the current thread id */
|
||||||
|
let thread_id = _thread_id;
|
||||||
|
/** @type {string|undefined} - the current run id */
|
||||||
|
let run_id;
|
||||||
|
/** @type {string|undefined} - the parent messageId */
|
||||||
|
let parentMessageId = _parentId;
|
||||||
|
/** @type {TMessage[]} */
|
||||||
|
let previousMessages = [];
|
||||||
|
/** @type {import('librechat-data-provider').TConversation | null} */
|
||||||
|
let conversation = null;
|
||||||
|
/** @type {string[]} */
|
||||||
|
let file_ids = [];
|
||||||
|
/** @type {Set<string>} */
|
||||||
|
let attachedFileIds = new Set();
|
||||||
|
/** @type {TMessage | null} */
|
||||||
|
let requestMessage = null;
|
||||||
|
|
||||||
|
const userMessageId = v4();
|
||||||
|
const responseMessageId = v4();
|
||||||
|
|
||||||
|
/** @type {string} - The conversation UUID - created if undefined */
|
||||||
|
const conversationId = convoId ?? v4();
|
||||||
|
|
||||||
|
const cache = getLogStores(CacheKeys.ABORT_KEYS);
|
||||||
|
const cacheKey = `${req.user.id}:${conversationId}`;
|
||||||
|
|
||||||
|
/** @type {Run | undefined} - The completed run, undefined if incomplete */
|
||||||
|
let completedRun;
|
||||||
|
|
||||||
|
const handleError = async (error) => {
|
||||||
|
const defaultErrorMessage =
|
||||||
|
'The Assistant run failed to initialize. Try sending a message in a new conversation.';
|
||||||
|
const messageData = {
|
||||||
|
thread_id,
|
||||||
|
assistant_id,
|
||||||
|
conversationId,
|
||||||
|
parentMessageId,
|
||||||
|
sender: 'System',
|
||||||
|
user: req.user.id,
|
||||||
|
shouldSaveMessage: false,
|
||||||
|
messageId: responseMessageId,
|
||||||
|
endpoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (error.message === 'Run cancelled') {
|
||||||
|
return res.end();
|
||||||
|
} else if (error.message === 'Request closed' && completedRun) {
|
||||||
|
return;
|
||||||
|
} else if (error.message === 'Request closed') {
|
||||||
|
logger.debug('[/assistants/chat/] Request aborted on close');
|
||||||
|
} else if (/Files.*are invalid/.test(error.message)) {
|
||||||
|
const errorMessage = `Files are invalid, or may not have uploaded yet.${
|
||||||
|
endpoint === EModelEndpoint.azureAssistants
|
||||||
|
? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.'
|
||||||
|
: ''
|
||||||
|
}`;
|
||||||
|
return sendResponse(res, messageData, errorMessage);
|
||||||
|
} else if (error?.message?.includes('string too long')) {
|
||||||
|
return sendResponse(
|
||||||
|
res,
|
||||||
|
messageData,
|
||||||
|
'Message too long. The Assistants API has a limit of 32,768 characters per message. Please shorten it and try again.',
|
||||||
|
);
|
||||||
|
} else if (error?.message?.includes(ViolationTypes.TOKEN_BALANCE)) {
|
||||||
|
return sendResponse(res, messageData, error.message);
|
||||||
|
} else {
|
||||||
|
logger.error('[/assistants/chat/]', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!openai || !thread_id || !run_id) {
|
||||||
|
return sendResponse(res, messageData, defaultErrorMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
await sleep(2000);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const status = await cache.get(cacheKey);
|
||||||
|
if (status === 'cancelled') {
|
||||||
|
logger.debug('[/assistants/chat/] Run already cancelled');
|
||||||
|
return res.end();
|
||||||
|
}
|
||||||
|
await cache.delete(cacheKey);
|
||||||
|
const cancelledRun = await openai.beta.threads.runs.cancel(thread_id, run_id);
|
||||||
|
logger.debug('[/assistants/chat/] Cancelled run:', cancelledRun);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[/assistants/chat/] Error cancelling run', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
await sleep(2000);
|
||||||
|
|
||||||
|
let run;
|
||||||
|
try {
|
||||||
|
run = await openai.beta.threads.runs.retrieve(thread_id, run_id);
|
||||||
|
await recordUsage({
|
||||||
|
...run.usage,
|
||||||
|
model: run.model,
|
||||||
|
user: req.user.id,
|
||||||
|
conversationId,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[/assistants/chat/] Error fetching or processing run', error);
|
||||||
|
}
|
||||||
|
|
||||||
|
let finalEvent;
|
||||||
|
try {
|
||||||
|
const runMessages = await checkMessageGaps({
|
||||||
|
openai,
|
||||||
|
run_id,
|
||||||
|
endpoint,
|
||||||
|
thread_id,
|
||||||
|
conversationId,
|
||||||
|
latestMessageId: responseMessageId,
|
||||||
|
});
|
||||||
|
|
||||||
|
const errorContentPart = {
|
||||||
|
text: {
|
||||||
|
value:
|
||||||
|
error?.message ?? 'There was an error processing your request. Please try again later.',
|
||||||
|
},
|
||||||
|
type: ContentTypes.ERROR,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!Array.isArray(runMessages[runMessages.length - 1]?.content)) {
|
||||||
|
runMessages[runMessages.length - 1].content = [errorContentPart];
|
||||||
|
} else {
|
||||||
|
const contentParts = runMessages[runMessages.length - 1].content;
|
||||||
|
for (let i = 0; i < contentParts.length; i++) {
|
||||||
|
const currentPart = contentParts[i];
|
||||||
|
/** @type {CodeToolCall | RetrievalToolCall | FunctionToolCall | undefined} */
|
||||||
|
const toolCall = currentPart?.[ContentTypes.TOOL_CALL];
|
||||||
|
if (
|
||||||
|
toolCall &&
|
||||||
|
toolCall?.function &&
|
||||||
|
!(toolCall?.function?.output || toolCall?.function?.output?.length)
|
||||||
|
) {
|
||||||
|
contentParts[i] = {
|
||||||
|
...currentPart,
|
||||||
|
[ContentTypes.TOOL_CALL]: {
|
||||||
|
...toolCall,
|
||||||
|
function: {
|
||||||
|
...toolCall.function,
|
||||||
|
output: 'error processing tool',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
runMessages[runMessages.length - 1].content.push(errorContentPart);
|
||||||
|
}
|
||||||
|
|
||||||
|
finalEvent = {
|
||||||
|
final: true,
|
||||||
|
conversation: await getConvo(req.user.id, conversationId),
|
||||||
|
runMessages,
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[/assistants/chat/] Error finalizing error process', error);
|
||||||
|
return sendResponse(res, messageData, 'The Assistant run failed');
|
||||||
|
}
|
||||||
|
|
||||||
|
return sendResponse(res, finalEvent);
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
res.on('close', async () => {
|
||||||
|
if (!completedRun) {
|
||||||
|
await handleError(new Error('Request closed'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (convoId && !_thread_id) {
|
||||||
|
completedRun = true;
|
||||||
|
throw new Error('Missing thread_id for existing conversation');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!assistant_id) {
|
||||||
|
completedRun = true;
|
||||||
|
throw new Error('Missing assistant_id');
|
||||||
|
}
|
||||||
|
|
||||||
|
const checkBalanceBeforeRun = async () => {
|
||||||
|
if (!isEnabled(process.env.CHECK_BALANCE)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const transactions =
|
||||||
|
(await getTransactions({
|
||||||
|
user: req.user.id,
|
||||||
|
context: 'message',
|
||||||
|
conversationId,
|
||||||
|
})) ?? [];
|
||||||
|
|
||||||
|
const totalPreviousTokens = Math.abs(
|
||||||
|
transactions.reduce((acc, curr) => acc + curr.rawAmount, 0),
|
||||||
|
);
|
||||||
|
|
||||||
|
// TODO: make promptBuffer a config option; buffer for titles, needs buffer for system instructions
|
||||||
|
const promptBuffer = parentMessageId === Constants.NO_PARENT && !_thread_id ? 200 : 0;
|
||||||
|
// 5 is added for labels
|
||||||
|
let promptTokens = (await countTokens(text + (promptPrefix ?? ''))) + 5;
|
||||||
|
promptTokens += totalPreviousTokens + promptBuffer;
|
||||||
|
// Count tokens up to the current context window
|
||||||
|
promptTokens = Math.min(promptTokens, getModelMaxTokens(model));
|
||||||
|
|
||||||
|
await checkBalance({
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
txData: {
|
||||||
|
model,
|
||||||
|
user: req.user.id,
|
||||||
|
tokenType: 'prompt',
|
||||||
|
amount: promptTokens,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const { openai: _openai, client } = await getOpenAIClient({
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
endpointOption: req.body.endpointOption,
|
||||||
|
initAppClient: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
openai = _openai;
|
||||||
|
|
||||||
|
if (previousMessages.length) {
|
||||||
|
parentMessageId = previousMessages[previousMessages.length - 1].messageId;
|
||||||
|
}
|
||||||
|
|
||||||
|
let userMessage = {
|
||||||
|
role: 'user',
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: ContentTypes.TEXT,
|
||||||
|
text,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
metadata: {
|
||||||
|
messageId: userMessageId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
/** @type {CreateRunBody | undefined} */
|
||||||
|
const body = {
|
||||||
|
assistant_id,
|
||||||
|
model,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (promptPrefix) {
|
||||||
|
body.additional_instructions = promptPrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (instructions) {
|
||||||
|
body.instructions = instructions;
|
||||||
|
}
|
||||||
|
|
||||||
|
const getRequestFileIds = async () => {
|
||||||
|
let thread_file_ids = [];
|
||||||
|
if (convoId) {
|
||||||
|
const convo = await getConvo(req.user.id, convoId);
|
||||||
|
if (convo && convo.file_ids) {
|
||||||
|
thread_file_ids = convo.file_ids;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (files.length || thread_file_ids.length) {
|
||||||
|
attachedFileIds = new Set([...file_ids, ...thread_file_ids]);
|
||||||
|
|
||||||
|
let attachmentIndex = 0;
|
||||||
|
for (const file of files) {
|
||||||
|
file_ids.push(file.file_id);
|
||||||
|
if (file.type.startsWith('image')) {
|
||||||
|
userMessage.content.push({
|
||||||
|
type: ContentTypes.IMAGE_FILE,
|
||||||
|
[ContentTypes.IMAGE_FILE]: { file_id: file.file_id },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!userMessage.attachments) {
|
||||||
|
userMessage.attachments = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
userMessage.attachments.push({
|
||||||
|
file_id: file.file_id,
|
||||||
|
tools: [{ type: ToolCallTypes.CODE_INTERPRETER }],
|
||||||
|
});
|
||||||
|
|
||||||
|
if (file.type.startsWith('image')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const mimeType = file.type;
|
||||||
|
const isSupportedByRetrieval = retrievalMimeTypes.some((regex) => regex.test(mimeType));
|
||||||
|
if (isSupportedByRetrieval) {
|
||||||
|
userMessage.attachments[attachmentIndex].tools.push({
|
||||||
|
type: ToolCallTypes.FILE_SEARCH,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
attachmentIndex++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const initializeThread = async () => {
|
||||||
|
await getRequestFileIds();
|
||||||
|
|
||||||
|
// TODO: may allow multiple messages to be created beforehand in a future update
|
||||||
|
const initThreadBody = {
|
||||||
|
messages: [userMessage],
|
||||||
|
metadata: {
|
||||||
|
user: req.user.id,
|
||||||
|
conversationId,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await initThread({ openai, body: initThreadBody, thread_id });
|
||||||
|
thread_id = result.thread_id;
|
||||||
|
|
||||||
|
createOnTextProgress({
|
||||||
|
openai,
|
||||||
|
conversationId,
|
||||||
|
userMessageId,
|
||||||
|
messageId: responseMessageId,
|
||||||
|
thread_id,
|
||||||
|
});
|
||||||
|
|
||||||
|
requestMessage = {
|
||||||
|
user: req.user.id,
|
||||||
|
text,
|
||||||
|
messageId: userMessageId,
|
||||||
|
parentMessageId,
|
||||||
|
// TODO: make sure client sends correct format for `files`, use zod
|
||||||
|
files,
|
||||||
|
file_ids,
|
||||||
|
conversationId,
|
||||||
|
isCreatedByUser: true,
|
||||||
|
assistant_id,
|
||||||
|
thread_id,
|
||||||
|
model: assistant_id,
|
||||||
|
endpoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
previousMessages.push(requestMessage);
|
||||||
|
|
||||||
|
/* asynchronous */
|
||||||
|
saveUserMessage({ ...requestMessage, model });
|
||||||
|
|
||||||
|
conversation = {
|
||||||
|
conversationId,
|
||||||
|
endpoint,
|
||||||
|
promptPrefix: promptPrefix,
|
||||||
|
instructions: instructions,
|
||||||
|
assistant_id,
|
||||||
|
// model,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (file_ids.length) {
|
||||||
|
conversation.file_ids = file_ids;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const promises = [initializeThread(), checkBalanceBeforeRun()];
|
||||||
|
await Promise.all(promises);
|
||||||
|
|
||||||
|
const sendInitialResponse = () => {
|
||||||
|
sendMessage(res, {
|
||||||
|
sync: true,
|
||||||
|
conversationId,
|
||||||
|
// messages: previousMessages,
|
||||||
|
requestMessage,
|
||||||
|
responseMessage: {
|
||||||
|
user: req.user.id,
|
||||||
|
messageId: openai.responseMessage.messageId,
|
||||||
|
parentMessageId: userMessageId,
|
||||||
|
conversationId,
|
||||||
|
assistant_id,
|
||||||
|
thread_id,
|
||||||
|
model: assistant_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/** @type {RunResponse | typeof StreamRunManager | undefined} */
|
||||||
|
let response;
|
||||||
|
|
||||||
|
const processRun = async (retry = false) => {
|
||||||
|
if (endpoint === EModelEndpoint.azureAssistants) {
|
||||||
|
body.model = openai._options.model;
|
||||||
|
openai.attachedFileIds = attachedFileIds;
|
||||||
|
if (retry) {
|
||||||
|
response = await runAssistant({
|
||||||
|
openai,
|
||||||
|
thread_id,
|
||||||
|
run_id,
|
||||||
|
in_progress: openai.in_progress,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* NOTE:
|
||||||
|
* By default, a Run will use the model and tools configuration specified in Assistant object,
|
||||||
|
* but you can override most of these when creating the Run for added flexibility:
|
||||||
|
*/
|
||||||
|
const run = await createRun({
|
||||||
|
openai,
|
||||||
|
thread_id,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
|
||||||
|
run_id = run.id;
|
||||||
|
await cache.set(cacheKey, `${thread_id}:${run_id}`, ten_minutes);
|
||||||
|
sendInitialResponse();
|
||||||
|
|
||||||
|
// todo: retry logic
|
||||||
|
response = await runAssistant({ openai, thread_id, run_id });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @type {{[AssistantStreamEvents.ThreadRunCreated]: (event: ThreadRunCreated) => Promise<void>}} */
|
||||||
|
const handlers = {
|
||||||
|
[AssistantStreamEvents.ThreadRunCreated]: async (event) => {
|
||||||
|
await cache.set(cacheKey, `${thread_id}:${event.data.id}`, ten_minutes);
|
||||||
|
run_id = event.data.id;
|
||||||
|
sendInitialResponse();
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const streamRunManager = new StreamRunManager({
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
openai,
|
||||||
|
handlers,
|
||||||
|
thread_id,
|
||||||
|
attachedFileIds,
|
||||||
|
responseMessage: openai.responseMessage,
|
||||||
|
// streamOptions: {
|
||||||
|
|
||||||
|
// },
|
||||||
|
});
|
||||||
|
|
||||||
|
await streamRunManager.runAssistant({
|
||||||
|
thread_id,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
|
||||||
|
response = streamRunManager;
|
||||||
|
};
|
||||||
|
|
||||||
|
await processRun();
|
||||||
|
logger.debug('[/assistants/chat/] response', {
|
||||||
|
run: response.run,
|
||||||
|
steps: response.steps,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.run.status === RunStatus.CANCELLED) {
|
||||||
|
logger.debug('[/assistants/chat/] Run cancelled, handled by `abortRun`');
|
||||||
|
return res.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.run.status === RunStatus.IN_PROGRESS) {
|
||||||
|
processRun(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
completedRun = response.run;
|
||||||
|
|
||||||
|
/** @type {ResponseMessage} */
|
||||||
|
const responseMessage = {
|
||||||
|
...(response.responseMessage ?? response.finalMessage),
|
||||||
|
parentMessageId: userMessageId,
|
||||||
|
conversationId,
|
||||||
|
user: req.user.id,
|
||||||
|
assistant_id,
|
||||||
|
thread_id,
|
||||||
|
model: assistant_id,
|
||||||
|
endpoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
sendMessage(res, {
|
||||||
|
final: true,
|
||||||
|
conversation,
|
||||||
|
requestMessage: {
|
||||||
|
parentMessageId,
|
||||||
|
thread_id,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
res.end();
|
||||||
|
|
||||||
|
await saveAssistantMessage({ ...responseMessage, model });
|
||||||
|
|
||||||
|
if (parentMessageId === Constants.NO_PARENT && !_thread_id) {
|
||||||
|
addTitle(req, {
|
||||||
|
text,
|
||||||
|
responseText: response.text,
|
||||||
|
conversationId,
|
||||||
|
client,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await addThreadMetadata({
|
||||||
|
openai,
|
||||||
|
thread_id,
|
||||||
|
messageId: responseMessage.messageId,
|
||||||
|
messages: response.messages,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.run.usage) {
|
||||||
|
await sleep(3000);
|
||||||
|
completedRun = await openai.beta.threads.runs.retrieve(thread_id, response.run.id);
|
||||||
|
if (completedRun.usage) {
|
||||||
|
await recordUsage({
|
||||||
|
...completedRun.usage,
|
||||||
|
user: req.user.id,
|
||||||
|
model: completedRun.model ?? model,
|
||||||
|
conversationId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
await recordUsage({
|
||||||
|
...response.run.usage,
|
||||||
|
user: req.user.id,
|
||||||
|
model: response.run.model ?? model,
|
||||||
|
conversationId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
await handleError(error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = chatV2;
|
||||||
168
api/server/controllers/assistants/helpers.js
Normal file
168
api/server/controllers/assistants/helpers.js
Normal file
|
|
@ -0,0 +1,168 @@
|
||||||
|
const {
|
||||||
|
EModelEndpoint,
|
||||||
|
FileSources,
|
||||||
|
CacheKeys,
|
||||||
|
defaultAssistantsVersion,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
|
const {
|
||||||
|
initializeClient: initAzureClient,
|
||||||
|
} = require('~/server/services/Endpoints/azureAssistants');
|
||||||
|
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
||||||
|
const { getLogStores } = require('~/cache');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Express.Request} req
|
||||||
|
* @param {string} [endpoint]
|
||||||
|
* @returns {Promise<string>}
|
||||||
|
*/
|
||||||
|
const getCurrentVersion = async (req, endpoint) => {
|
||||||
|
const index = req.baseUrl.lastIndexOf('/v');
|
||||||
|
let version = index !== -1 ? req.baseUrl.substring(index + 1, index + 3) : null;
|
||||||
|
if (!version && req.body.version) {
|
||||||
|
version = `v${req.body.version}`;
|
||||||
|
}
|
||||||
|
if (!version && endpoint) {
|
||||||
|
const cache = getLogStores(CacheKeys.CONFIG_STORE);
|
||||||
|
const cachedEndpointsConfig = await cache.get(CacheKeys.ENDPOINT_CONFIG);
|
||||||
|
version = `v${
|
||||||
|
cachedEndpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint]
|
||||||
|
}`;
|
||||||
|
}
|
||||||
|
if (!version?.startsWith('v') && version.length !== 2) {
|
||||||
|
throw new Error(`[${req.baseUrl}] Invalid version: ${version}`);
|
||||||
|
}
|
||||||
|
return version;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronously lists assistants based on provided query parameters.
|
||||||
|
*
|
||||||
|
* Initializes the client with the current request and response objects and lists assistants
|
||||||
|
* according to the query parameters. This function abstracts the logic for non-Azure paths.
|
||||||
|
*
|
||||||
|
* @async
|
||||||
|
* @param {object} params - The parameters object.
|
||||||
|
* @param {object} params.req - The request object, used for initializing the client.
|
||||||
|
* @param {object} params.res - The response object, used for initializing the client.
|
||||||
|
* @param {string} params.version - The API version to use.
|
||||||
|
* @param {object} params.query - The query parameters to list assistants (e.g., limit, order).
|
||||||
|
* @returns {Promise<object>} A promise that resolves to the response from the `openai.beta.assistants.list` method call.
|
||||||
|
*/
|
||||||
|
const listAssistants = async ({ req, res, version, query }) => {
|
||||||
|
const { openai } = await getOpenAIClient({ req, res, version });
|
||||||
|
return openai.beta.assistants.list(query);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronously lists assistants for Azure configured groups.
|
||||||
|
*
|
||||||
|
* Iterates through Azure configured assistant groups, initializes the client with the current request and response objects,
|
||||||
|
* lists assistants based on the provided query parameters, and merges their data alongside the model information into a single array.
|
||||||
|
*
|
||||||
|
* @async
|
||||||
|
* @param {object} params - The parameters object.
|
||||||
|
* @param {object} params.req - The request object, used for initializing the client and manipulating the request body.
|
||||||
|
* @param {object} params.res - The response object, used for initializing the client.
|
||||||
|
* @param {string} params.version - The API version to use.
|
||||||
|
* @param {TAzureConfig} params.azureConfig - The Azure configuration object containing assistantGroups and groupMap.
|
||||||
|
* @param {object} params.query - The query parameters to list assistants (e.g., limit, order).
|
||||||
|
* @returns {Promise<AssistantListResponse>} A promise that resolves to an array of assistant data merged with their respective model information.
|
||||||
|
*/
|
||||||
|
const listAssistantsForAzure = async ({ req, res, version, azureConfig = {}, query }) => {
|
||||||
|
/** @type {Array<[string, TAzureModelConfig]>} */
|
||||||
|
const groupModelTuples = [];
|
||||||
|
const promises = [];
|
||||||
|
/** @type {Array<TAzureGroup>} */
|
||||||
|
const groups = [];
|
||||||
|
|
||||||
|
const { groupMap, assistantGroups } = azureConfig;
|
||||||
|
|
||||||
|
for (const groupName of assistantGroups) {
|
||||||
|
const group = groupMap[groupName];
|
||||||
|
groups.push(group);
|
||||||
|
|
||||||
|
const currentModelTuples = Object.entries(group?.models);
|
||||||
|
groupModelTuples.push(currentModelTuples);
|
||||||
|
|
||||||
|
/* The specified model is only necessary to
|
||||||
|
fetch assistants for the shared instance */
|
||||||
|
req.body.model = currentModelTuples[0][0];
|
||||||
|
promises.push(listAssistants({ req, res, version, query }));
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedQueries = await Promise.all(promises);
|
||||||
|
const data = resolvedQueries.flatMap((res, i) =>
|
||||||
|
res.data.map((assistant) => {
|
||||||
|
const deploymentName = assistant.model;
|
||||||
|
const currentGroup = groups[i];
|
||||||
|
const currentModelTuples = groupModelTuples[i];
|
||||||
|
const firstModel = currentModelTuples[0][0];
|
||||||
|
|
||||||
|
if (currentGroup.deploymentName === deploymentName) {
|
||||||
|
return { ...assistant, model: firstModel };
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [model, modelConfig] of currentModelTuples) {
|
||||||
|
if (modelConfig.deploymentName === deploymentName) {
|
||||||
|
return { ...assistant, model };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ...assistant, model: firstModel };
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
first_id: data[0]?.id,
|
||||||
|
last_id: data[data.length - 1]?.id,
|
||||||
|
object: 'list',
|
||||||
|
has_more: false,
|
||||||
|
data,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
async function getOpenAIClient({ req, res, endpointOption, initAppClient }) {
|
||||||
|
let endpoint = req.body.endpoint ?? req.query.endpoint;
|
||||||
|
if (!endpoint && req.baseUrl.includes('files') && req.body.files) {
|
||||||
|
const source = req.body.files[0]?.source;
|
||||||
|
endpoint =
|
||||||
|
source === FileSources.openai ? EModelEndpoint.assistants : EModelEndpoint.azureAssistants;
|
||||||
|
}
|
||||||
|
const version = await getCurrentVersion(req, endpoint);
|
||||||
|
if (!endpoint) {
|
||||||
|
throw new Error(`[${req.baseUrl}] Endpoint is required`);
|
||||||
|
}
|
||||||
|
|
||||||
|
let result;
|
||||||
|
if (endpoint === EModelEndpoint.assistants) {
|
||||||
|
result = await initializeClient({ req, res, version, endpointOption, initAppClient });
|
||||||
|
} else if (endpoint === EModelEndpoint.azureAssistants) {
|
||||||
|
result = await initAzureClient({ req, res, version, endpointOption, initAppClient });
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fetchAssistants = async (req, res) => {
|
||||||
|
const { limit = 100, order = 'desc', after, before, endpoint } = req.query;
|
||||||
|
const version = await getCurrentVersion(req, endpoint);
|
||||||
|
const query = { limit, order, after, before };
|
||||||
|
|
||||||
|
/** @type {AssistantListResponse} */
|
||||||
|
let body;
|
||||||
|
|
||||||
|
if (endpoint === EModelEndpoint.assistants) {
|
||||||
|
({ body } = await listAssistants({ req, res, version, query }));
|
||||||
|
} else if (endpoint === EModelEndpoint.azureAssistants) {
|
||||||
|
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
|
||||||
|
body = await listAssistantsForAzure({ req, res, version, azureConfig, query });
|
||||||
|
}
|
||||||
|
|
||||||
|
return body;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getOpenAIClient,
|
||||||
|
fetchAssistants,
|
||||||
|
getCurrentVersion,
|
||||||
|
};
|
||||||
|
|
@ -1,34 +1,11 @@
|
||||||
const multer = require('multer');
|
const { FileContext } = require('librechat-data-provider');
|
||||||
const express = require('express');
|
|
||||||
const { FileContext, EModelEndpoint } = require('librechat-data-provider');
|
|
||||||
const {
|
|
||||||
initializeClient,
|
|
||||||
listAssistantsForAzure,
|
|
||||||
listAssistants,
|
|
||||||
} = require('~/server/services/Endpoints/assistants');
|
|
||||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||||
|
const { deleteAssistantActions } = require('~/server/services/ActionService');
|
||||||
const { uploadImageBuffer } = require('~/server/services/Files/process');
|
const { uploadImageBuffer } = require('~/server/services/Files/process');
|
||||||
const { updateAssistant, getAssistants } = require('~/models/Assistant');
|
const { updateAssistant, getAssistants } = require('~/models/Assistant');
|
||||||
|
const { getOpenAIClient, fetchAssistants } = require('./helpers');
|
||||||
const { deleteFileByFilter } = require('~/models/File');
|
const { deleteFileByFilter } = require('~/models/File');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
const actions = require('./actions');
|
|
||||||
const tools = require('./tools');
|
|
||||||
|
|
||||||
const upload = multer();
|
|
||||||
const router = express.Router();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Assistant actions route.
|
|
||||||
* @route GET|POST /assistants/actions
|
|
||||||
*/
|
|
||||||
router.use('/actions', actions);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create an assistant.
|
|
||||||
* @route GET /assistants/tools
|
|
||||||
* @returns {TPlugin[]} 200 - application/json
|
|
||||||
*/
|
|
||||||
router.use('/tools', tools);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an assistant.
|
* Create an assistant.
|
||||||
|
|
@ -36,12 +13,11 @@ router.use('/tools', tools);
|
||||||
* @param {AssistantCreateParams} req.body - The assistant creation parameters.
|
* @param {AssistantCreateParams} req.body - The assistant creation parameters.
|
||||||
* @returns {Assistant} 201 - success response - application/json
|
* @returns {Assistant} 201 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.post('/', async (req, res) => {
|
const createAssistant = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
/** @type {{ openai: OpenAI }} */
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
const { openai } = await initializeClient({ req, res });
|
|
||||||
|
|
||||||
const { tools = [], ...assistantData } = req.body;
|
const { tools = [], endpoint: _e, ...assistantData } = req.body;
|
||||||
assistantData.tools = tools
|
assistantData.tools = tools
|
||||||
.map((tool) => {
|
.map((tool) => {
|
||||||
if (typeof tool !== 'string') {
|
if (typeof tool !== 'string') {
|
||||||
|
|
@ -52,18 +28,23 @@ router.post('/', async (req, res) => {
|
||||||
})
|
})
|
||||||
.filter((tool) => tool);
|
.filter((tool) => tool);
|
||||||
|
|
||||||
|
let azureModelIdentifier = null;
|
||||||
if (openai.locals?.azureOptions) {
|
if (openai.locals?.azureOptions) {
|
||||||
|
azureModelIdentifier = assistantData.model;
|
||||||
assistantData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName;
|
assistantData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName;
|
||||||
}
|
}
|
||||||
|
|
||||||
const assistant = await openai.beta.assistants.create(assistantData);
|
const assistant = await openai.beta.assistants.create(assistantData);
|
||||||
|
if (azureModelIdentifier) {
|
||||||
|
assistant.model = azureModelIdentifier;
|
||||||
|
}
|
||||||
logger.debug('/assistants/', assistant);
|
logger.debug('/assistants/', assistant);
|
||||||
res.status(201).json(assistant);
|
res.status(201).json(assistant);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/assistants] Error creating assistant', error);
|
logger.error('[/assistants] Error creating assistant', error);
|
||||||
res.status(500).json({ error: error.message });
|
res.status(500).json({ error: error.message });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves an assistant.
|
* Retrieves an assistant.
|
||||||
|
|
@ -71,10 +52,10 @@ router.post('/', async (req, res) => {
|
||||||
* @param {string} req.params.id - Assistant identifier.
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
* @returns {Assistant} 200 - success response - application/json
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.get('/:id', async (req, res) => {
|
const retrieveAssistant = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
/** @type {{ openai: OpenAI }} */
|
/* NOTE: not actually being used right now */
|
||||||
const { openai } = await initializeClient({ req, res });
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
|
|
||||||
const assistant_id = req.params.id;
|
const assistant_id = req.params.id;
|
||||||
const assistant = await openai.beta.assistants.retrieve(assistant_id);
|
const assistant = await openai.beta.assistants.retrieve(assistant_id);
|
||||||
|
|
@ -83,22 +64,23 @@ router.get('/:id', async (req, res) => {
|
||||||
logger.error('[/assistants/:id] Error retrieving assistant', error);
|
logger.error('[/assistants/:id] Error retrieving assistant', error);
|
||||||
res.status(500).json({ error: error.message });
|
res.status(500).json({ error: error.message });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Modifies an assistant.
|
* Modifies an assistant.
|
||||||
* @route PATCH /assistants/:id
|
* @route PATCH /assistants/:id
|
||||||
|
* @param {object} req - Express Request
|
||||||
|
* @param {object} req.params - Request params
|
||||||
* @param {string} req.params.id - Assistant identifier.
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
* @param {AssistantUpdateParams} req.body - The assistant update parameters.
|
* @param {AssistantUpdateParams} req.body - The assistant update parameters.
|
||||||
* @returns {Assistant} 200 - success response - application/json
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.patch('/:id', async (req, res) => {
|
const patchAssistant = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
/** @type {{ openai: OpenAI }} */
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
const { openai } = await initializeClient({ req, res });
|
|
||||||
|
|
||||||
const assistant_id = req.params.id;
|
const assistant_id = req.params.id;
|
||||||
const updateData = req.body;
|
const { endpoint: _e, ...updateData } = req.body;
|
||||||
updateData.tools = (updateData.tools ?? [])
|
updateData.tools = (updateData.tools ?? [])
|
||||||
.map((tool) => {
|
.map((tool) => {
|
||||||
if (typeof tool !== 'string') {
|
if (typeof tool !== 'string') {
|
||||||
|
|
@ -119,52 +101,46 @@ router.patch('/:id', async (req, res) => {
|
||||||
logger.error('[/assistants/:id] Error updating assistant', error);
|
logger.error('[/assistants/:id] Error updating assistant', error);
|
||||||
res.status(500).json({ error: error.message });
|
res.status(500).json({ error: error.message });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Deletes an assistant.
|
* Deletes an assistant.
|
||||||
* @route DELETE /assistants/:id
|
* @route DELETE /assistants/:id
|
||||||
|
* @param {object} req - Express Request
|
||||||
|
* @param {object} req.params - Request params
|
||||||
* @param {string} req.params.id - Assistant identifier.
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
* @returns {Assistant} 200 - success response - application/json
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.delete('/:id', async (req, res) => {
|
const deleteAssistant = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
/** @type {{ openai: OpenAI }} */
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
const { openai } = await initializeClient({ req, res });
|
|
||||||
|
|
||||||
const assistant_id = req.params.id;
|
const assistant_id = req.params.id;
|
||||||
const deletionStatus = await openai.beta.assistants.del(assistant_id);
|
const deletionStatus = await openai.beta.assistants.del(assistant_id);
|
||||||
|
if (deletionStatus?.deleted) {
|
||||||
|
await deleteAssistantActions({ req, assistant_id });
|
||||||
|
}
|
||||||
res.json(deletionStatus);
|
res.json(deletionStatus);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/assistants/:id] Error deleting assistant', error);
|
logger.error('[/assistants/:id] Error deleting assistant', error);
|
||||||
res.status(500).json({ error: 'Error deleting assistant' });
|
res.status(500).json({ error: 'Error deleting assistant' });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a list of assistants.
|
* Returns a list of assistants.
|
||||||
* @route GET /assistants
|
* @route GET /assistants
|
||||||
|
* @param {object} req - Express Request
|
||||||
* @param {AssistantListParams} req.query - The assistant list parameters for pagination and sorting.
|
* @param {AssistantListParams} req.query - The assistant list parameters for pagination and sorting.
|
||||||
* @returns {AssistantListResponse} 200 - success response - application/json
|
* @returns {AssistantListResponse} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.get('/', async (req, res) => {
|
const listAssistants = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { limit = 100, order = 'desc', after, before } = req.query;
|
const body = await fetchAssistants(req, res);
|
||||||
const query = { limit, order, after, before };
|
|
||||||
|
|
||||||
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
|
if (req.app.locals?.[req.query.endpoint]) {
|
||||||
/** @type {AssistantListResponse} */
|
|
||||||
let body;
|
|
||||||
|
|
||||||
if (azureConfig?.assistants) {
|
|
||||||
body = await listAssistantsForAzure({ req, res, azureConfig, query });
|
|
||||||
} else {
|
|
||||||
({ body } = await listAssistants({ req, res, query }));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (req.app.locals?.[EModelEndpoint.assistants]) {
|
|
||||||
/** @type {Partial<TAssistantEndpoint>} */
|
/** @type {Partial<TAssistantEndpoint>} */
|
||||||
const assistantsConfig = req.app.locals[EModelEndpoint.assistants];
|
const assistantsConfig = req.app.locals[req.query.endpoint];
|
||||||
const { supportedIds, excludedIds } = assistantsConfig;
|
const { supportedIds, excludedIds } = assistantsConfig;
|
||||||
if (supportedIds?.length) {
|
if (supportedIds?.length) {
|
||||||
body.data = body.data.filter((assistant) => supportedIds.includes(assistant.id));
|
body.data = body.data.filter((assistant) => supportedIds.includes(assistant.id));
|
||||||
|
|
@ -178,31 +154,34 @@ router.get('/', async (req, res) => {
|
||||||
logger.error('[/assistants] Error listing assistants', error);
|
logger.error('[/assistants] Error listing assistants', error);
|
||||||
res.status(500).json({ message: 'Error listing assistants' });
|
res.status(500).json({ message: 'Error listing assistants' });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a list of the user's assistant documents (metadata saved to database).
|
* Returns a list of the user's assistant documents (metadata saved to database).
|
||||||
* @route GET /assistants/documents
|
* @route GET /assistants/documents
|
||||||
* @returns {AssistantDocument[]} 200 - success response - application/json
|
* @returns {AssistantDocument[]} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.get('/documents', async (req, res) => {
|
const getAssistantDocuments = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
res.json(await getAssistants({ user: req.user.id }));
|
res.json(await getAssistants({ user: req.user.id }));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('[/assistants/documents] Error listing assistant documents', error);
|
logger.error('[/assistants/documents] Error listing assistant documents', error);
|
||||||
res.status(500).json({ error: error.message });
|
res.status(500).json({ error: error.message });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Uploads and updates an avatar for a specific assistant.
|
* Uploads and updates an avatar for a specific assistant.
|
||||||
* @route POST /avatar/:assistant_id
|
* @route POST /avatar/:assistant_id
|
||||||
|
* @param {object} req - Express Request
|
||||||
|
* @param {object} req.params - Request params
|
||||||
* @param {string} req.params.assistant_id - The ID of the assistant.
|
* @param {string} req.params.assistant_id - The ID of the assistant.
|
||||||
* @param {Express.Multer.File} req.file - The avatar image file.
|
* @param {Express.Multer.File} req.file - The avatar image file.
|
||||||
|
* @param {object} req.body - Request body
|
||||||
* @param {string} [req.body.metadata] - Optional metadata for the assistant's avatar.
|
* @param {string} [req.body.metadata] - Optional metadata for the assistant's avatar.
|
||||||
* @returns {Object} 200 - success response - application/json
|
* @returns {Object} 200 - success response - application/json
|
||||||
*/
|
*/
|
||||||
router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) => {
|
const uploadAssistantAvatar = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { assistant_id } = req.params;
|
const { assistant_id } = req.params;
|
||||||
if (!assistant_id) {
|
if (!assistant_id) {
|
||||||
|
|
@ -210,8 +189,7 @@ router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) =>
|
||||||
}
|
}
|
||||||
|
|
||||||
let { metadata: _metadata = '{}' } = req.body;
|
let { metadata: _metadata = '{}' } = req.body;
|
||||||
/** @type {{ openai: OpenAI }} */
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
const { openai } = await initializeClient({ req, res });
|
|
||||||
|
|
||||||
const image = await uploadImageBuffer({
|
const image = await uploadImageBuffer({
|
||||||
req,
|
req,
|
||||||
|
|
@ -266,6 +244,14 @@ router.post('/avatar/:assistant_id', upload.single('file'), async (req, res) =>
|
||||||
logger.error(message, error);
|
logger.error(message, error);
|
||||||
res.status(500).json({ message });
|
res.status(500).json({ message });
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = {
|
||||||
|
createAssistant,
|
||||||
|
retrieveAssistant,
|
||||||
|
patchAssistant,
|
||||||
|
deleteAssistant,
|
||||||
|
listAssistants,
|
||||||
|
getAssistantDocuments,
|
||||||
|
uploadAssistantAvatar,
|
||||||
|
};
|
||||||
183
api/server/controllers/assistants/v2.js
Normal file
183
api/server/controllers/assistants/v2.js
Normal file
|
|
@ -0,0 +1,183 @@
|
||||||
|
const { validateAndUpdateTool } = require('~/server/services/ActionService');
|
||||||
|
const { getOpenAIClient } = require('./helpers');
|
||||||
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an assistant.
|
||||||
|
* @route POST /assistants
|
||||||
|
* @param {AssistantCreateParams} req.body - The assistant creation parameters.
|
||||||
|
* @returns {Assistant} 201 - success response - application/json
|
||||||
|
*/
|
||||||
|
const createAssistant = async (req, res) => {
|
||||||
|
try {
|
||||||
|
/** @type {{ openai: OpenAIClient }} */
|
||||||
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
|
|
||||||
|
const { tools = [], endpoint: _e, ...assistantData } = req.body;
|
||||||
|
assistantData.tools = tools
|
||||||
|
.map((tool) => {
|
||||||
|
if (typeof tool !== 'string') {
|
||||||
|
return tool;
|
||||||
|
}
|
||||||
|
|
||||||
|
return req.app.locals.availableTools[tool];
|
||||||
|
})
|
||||||
|
.filter((tool) => tool);
|
||||||
|
|
||||||
|
let azureModelIdentifier = null;
|
||||||
|
if (openai.locals?.azureOptions) {
|
||||||
|
azureModelIdentifier = assistantData.model;
|
||||||
|
assistantData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName;
|
||||||
|
}
|
||||||
|
|
||||||
|
const assistant = await openai.beta.assistants.create(assistantData);
|
||||||
|
if (azureModelIdentifier) {
|
||||||
|
assistant.model = azureModelIdentifier;
|
||||||
|
}
|
||||||
|
logger.debug('/assistants/', assistant);
|
||||||
|
res.status(201).json(assistant);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[/assistants] Error creating assistant', error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modifies an assistant.
|
||||||
|
* @param {object} params
|
||||||
|
* @param {Express.Request} params.req
|
||||||
|
* @param {OpenAIClient} params.openai
|
||||||
|
* @param {string} params.assistant_id
|
||||||
|
* @param {AssistantUpdateParams} params.updateData
|
||||||
|
* @returns {Promise<Assistant>} The updated assistant.
|
||||||
|
*/
|
||||||
|
const updateAssistant = async ({ req, openai, assistant_id, updateData }) => {
|
||||||
|
const tools = [];
|
||||||
|
|
||||||
|
for (const tool of updateData.tools ?? []) {
|
||||||
|
let actualTool = typeof tool === 'string' ? req.app.locals.availableTools[tool] : tool;
|
||||||
|
|
||||||
|
if (!actualTool) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!actualTool.function) {
|
||||||
|
tools.push(actualTool);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const updatedTool = await validateAndUpdateTool({ req, tool: actualTool, assistant_id });
|
||||||
|
if (updatedTool) {
|
||||||
|
tools.push(updatedTool);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
updateData.tools = tools;
|
||||||
|
|
||||||
|
if (openai.locals?.azureOptions && updateData.model) {
|
||||||
|
updateData.model = openai.locals.azureOptions.azureOpenAIApiDeploymentName;
|
||||||
|
}
|
||||||
|
|
||||||
|
return await openai.beta.assistants.update(assistant_id, updateData);
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modifies an assistant with the resource file id.
|
||||||
|
* @param {object} params
|
||||||
|
* @param {Express.Request} params.req
|
||||||
|
* @param {OpenAIClient} params.openai
|
||||||
|
* @param {string} params.assistant_id
|
||||||
|
* @param {string} params.tool_resource
|
||||||
|
* @param {string} params.file_id
|
||||||
|
* @param {AssistantUpdateParams} params.updateData
|
||||||
|
* @returns {Promise<Assistant>} The updated assistant.
|
||||||
|
*/
|
||||||
|
const addResourceFileId = async ({ req, openai, assistant_id, tool_resource, file_id }) => {
|
||||||
|
const assistant = await openai.beta.assistants.retrieve(assistant_id);
|
||||||
|
const { tool_resources = {} } = assistant;
|
||||||
|
if (tool_resources[tool_resource]) {
|
||||||
|
tool_resources[tool_resource].file_ids.push(file_id);
|
||||||
|
} else {
|
||||||
|
tool_resources[tool_resource] = { file_ids: [file_id] };
|
||||||
|
}
|
||||||
|
|
||||||
|
delete assistant.id;
|
||||||
|
return await updateAssistant({
|
||||||
|
req,
|
||||||
|
openai,
|
||||||
|
assistant_id,
|
||||||
|
updateData: { tools: assistant.tools, tool_resources },
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes a file ID from an assistant's resource.
|
||||||
|
* @param {object} params
|
||||||
|
* @param {Express.Request} params.req
|
||||||
|
* @param {OpenAIClient} params.openai
|
||||||
|
* @param {string} params.assistant_id
|
||||||
|
* @param {string} [params.tool_resource]
|
||||||
|
* @param {string} params.file_id
|
||||||
|
* @param {AssistantUpdateParams} params.updateData
|
||||||
|
* @returns {Promise<Assistant>} The updated assistant.
|
||||||
|
*/
|
||||||
|
const deleteResourceFileId = async ({ req, openai, assistant_id, tool_resource, file_id }) => {
|
||||||
|
const assistant = await openai.beta.assistants.retrieve(assistant_id);
|
||||||
|
const { tool_resources = {} } = assistant;
|
||||||
|
|
||||||
|
if (tool_resource && tool_resources[tool_resource]) {
|
||||||
|
const resource = tool_resources[tool_resource];
|
||||||
|
const index = resource.file_ids.indexOf(file_id);
|
||||||
|
if (index !== -1) {
|
||||||
|
resource.file_ids.splice(index, 1);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (const resourceKey in tool_resources) {
|
||||||
|
const resource = tool_resources[resourceKey];
|
||||||
|
const index = resource.file_ids.indexOf(file_id);
|
||||||
|
if (index !== -1) {
|
||||||
|
resource.file_ids.splice(index, 1);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
delete assistant.id;
|
||||||
|
return await updateAssistant({
|
||||||
|
req,
|
||||||
|
openai,
|
||||||
|
assistant_id,
|
||||||
|
updateData: { tools: assistant.tools, tool_resources },
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modifies an assistant.
|
||||||
|
* @route PATCH /assistants/:id
|
||||||
|
* @param {object} req - Express Request
|
||||||
|
* @param {object} req.params - Request params
|
||||||
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
|
* @param {AssistantUpdateParams} req.body - The assistant update parameters.
|
||||||
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
const patchAssistant = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
|
const assistant_id = req.params.id;
|
||||||
|
const { endpoint: _e, ...updateData } = req.body;
|
||||||
|
updateData.tools = updateData.tools ?? [];
|
||||||
|
const updatedAssistant = await updateAssistant({ req, openai, assistant_id, updateData });
|
||||||
|
res.json(updatedAssistant);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('[/assistants/:id] Error updating assistant', error);
|
||||||
|
res.status(500).json({ error: error.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
patchAssistant,
|
||||||
|
createAssistant,
|
||||||
|
updateAssistant,
|
||||||
|
addResourceFileId,
|
||||||
|
deleteResourceFileId,
|
||||||
|
};
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
const { EModelEndpoint } = require('librechat-data-provider');
|
const { isAssistantsEndpoint } = require('librechat-data-provider');
|
||||||
const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils');
|
const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils');
|
||||||
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
|
||||||
const { saveMessage, getConvo, getConvoTitle } = require('~/models');
|
const { saveMessage, getConvo, getConvoTitle } = require('~/models');
|
||||||
|
|
@ -15,7 +15,7 @@ async function abortMessage(req, res) {
|
||||||
abortKey = conversationId;
|
abortKey = conversationId;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (endpoint === EModelEndpoint.assistants) {
|
if (isAssistantsEndpoint(endpoint)) {
|
||||||
return await abortRun(req, res);
|
return await abortRun(req, res);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ const three_minutes = 1000 * 60 * 3;
|
||||||
|
|
||||||
async function abortRun(req, res) {
|
async function abortRun(req, res) {
|
||||||
res.setHeader('Content-Type', 'application/json');
|
res.setHeader('Content-Type', 'application/json');
|
||||||
const { abortKey } = req.body;
|
const { abortKey, endpoint } = req.body;
|
||||||
const [conversationId, latestMessageId] = abortKey.split(':');
|
const [conversationId, latestMessageId] = abortKey.split(':');
|
||||||
const conversation = await getConvo(req.user.id, conversationId);
|
const conversation = await getConvo(req.user.id, conversationId);
|
||||||
|
|
||||||
|
|
@ -68,9 +68,10 @@ async function abortRun(req, res) {
|
||||||
|
|
||||||
runMessages = await checkMessageGaps({
|
runMessages = await checkMessageGaps({
|
||||||
openai,
|
openai,
|
||||||
latestMessageId,
|
endpoint,
|
||||||
thread_id,
|
thread_id,
|
||||||
run_id,
|
run_id,
|
||||||
|
latestMessageId,
|
||||||
conversationId,
|
conversationId,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
const { parseConvo, EModelEndpoint } = require('librechat-data-provider');
|
const { parseConvo, EModelEndpoint } = require('librechat-data-provider');
|
||||||
const { getModelsConfig } = require('~/server/controllers/ModelController');
|
const { getModelsConfig } = require('~/server/controllers/ModelController');
|
||||||
|
const azureAssistants = require('~/server/services/Endpoints/azureAssistants');
|
||||||
const assistants = require('~/server/services/Endpoints/assistants');
|
const assistants = require('~/server/services/Endpoints/assistants');
|
||||||
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
|
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
|
||||||
const { processFiles } = require('~/server/services/Files/process');
|
const { processFiles } = require('~/server/services/Files/process');
|
||||||
|
|
@ -18,6 +19,7 @@ const buildFunction = {
|
||||||
[EModelEndpoint.anthropic]: anthropic.buildOptions,
|
[EModelEndpoint.anthropic]: anthropic.buildOptions,
|
||||||
[EModelEndpoint.gptPlugins]: gptPlugins.buildOptions,
|
[EModelEndpoint.gptPlugins]: gptPlugins.buildOptions,
|
||||||
[EModelEndpoint.assistants]: assistants.buildOptions,
|
[EModelEndpoint.assistants]: assistants.buildOptions,
|
||||||
|
[EModelEndpoint.azureAssistants]: azureAssistants.buildOptions,
|
||||||
};
|
};
|
||||||
|
|
||||||
async function buildEndpointOption(req, res, next) {
|
async function buildEndpointOption(req, res, next) {
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ const { v4 } = require('uuid');
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const { encryptMetadata, domainParser } = require('~/server/services/ActionService');
|
const { encryptMetadata, domainParser } = require('~/server/services/ActionService');
|
||||||
const { actionDelimiter, EModelEndpoint } = require('librechat-data-provider');
|
const { actionDelimiter, EModelEndpoint } = require('librechat-data-provider');
|
||||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
|
||||||
const { updateAction, getActions, deleteAction } = require('~/models/Action');
|
const { updateAction, getActions, deleteAction } = require('~/models/Action');
|
||||||
const { updateAssistant, getAssistant } = require('~/models/Assistant');
|
const { updateAssistant, getAssistant } = require('~/models/Assistant');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
@ -45,7 +45,6 @@ router.post('/:assistant_id', async (req, res) => {
|
||||||
let metadata = encryptMetadata(_metadata);
|
let metadata = encryptMetadata(_metadata);
|
||||||
|
|
||||||
let { domain } = metadata;
|
let { domain } = metadata;
|
||||||
/* Azure doesn't support periods in function names */
|
|
||||||
domain = await domainParser(req, domain, true);
|
domain = await domainParser(req, domain, true);
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
|
|
@ -55,8 +54,7 @@ router.post('/:assistant_id', async (req, res) => {
|
||||||
const action_id = _action_id ?? v4();
|
const action_id = _action_id ?? v4();
|
||||||
const initialPromises = [];
|
const initialPromises = [];
|
||||||
|
|
||||||
/** @type {{ openai: OpenAI }} */
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
const { openai } = await initializeClient({ req, res });
|
|
||||||
|
|
||||||
initialPromises.push(getAssistant({ assistant_id }));
|
initialPromises.push(getAssistant({ assistant_id }));
|
||||||
initialPromises.push(openai.beta.assistants.retrieve(assistant_id));
|
initialPromises.push(openai.beta.assistants.retrieve(assistant_id));
|
||||||
|
|
@ -157,9 +155,7 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { assistant_id, action_id, model } = req.params;
|
const { assistant_id, action_id, model } = req.params;
|
||||||
req.body.model = model;
|
req.body.model = model;
|
||||||
|
const { openai } = await getOpenAIClient({ req, res });
|
||||||
/** @type {{ openai: OpenAI }} */
|
|
||||||
const { openai } = await initializeClient({ req, res });
|
|
||||||
|
|
||||||
const initialPromises = [];
|
const initialPromises = [];
|
||||||
initialPromises.push(getAssistant({ assistant_id }));
|
initialPromises.push(getAssistant({ assistant_id }));
|
||||||
|
|
|
||||||
25
api/server/routes/assistants/chatV1.js
Normal file
25
api/server/routes/assistants/chatV1.js
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
const express = require('express');
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
const {
|
||||||
|
setHeaders,
|
||||||
|
handleAbort,
|
||||||
|
validateModel,
|
||||||
|
// validateEndpoint,
|
||||||
|
buildEndpointOption,
|
||||||
|
} = require('~/server/middleware');
|
||||||
|
const chatController = require('~/server/controllers/assistants/chatV1');
|
||||||
|
|
||||||
|
router.post('/abort', handleAbort());
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @route POST /
|
||||||
|
* @desc Chat with an assistant
|
||||||
|
* @access Public
|
||||||
|
* @param {express.Request} req - The request object, containing the request data.
|
||||||
|
* @param {express.Response} res - The response object, used to send back a response.
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
router.post('/', validateModel, buildEndpointOption, setHeaders, chatController);
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
25
api/server/routes/assistants/chatV2.js
Normal file
25
api/server/routes/assistants/chatV2.js
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
const express = require('express');
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
const {
|
||||||
|
setHeaders,
|
||||||
|
handleAbort,
|
||||||
|
validateModel,
|
||||||
|
// validateEndpoint,
|
||||||
|
buildEndpointOption,
|
||||||
|
} = require('~/server/middleware');
|
||||||
|
const chatController = require('~/server/controllers/assistants/chatV2');
|
||||||
|
|
||||||
|
router.post('/abort', handleAbort());
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @route POST /
|
||||||
|
* @desc Chat with an assistant
|
||||||
|
* @access Public
|
||||||
|
* @param {express.Request} req - The request object, containing the request data.
|
||||||
|
* @param {express.Response} res - The response object, used to send back a response.
|
||||||
|
* @returns {void}
|
||||||
|
*/
|
||||||
|
router.post('/', validateModel, buildEndpointOption, setHeaders, chatController);
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
|
|
@ -7,16 +7,19 @@ const {
|
||||||
// concurrentLimiter,
|
// concurrentLimiter,
|
||||||
// messageIpLimiter,
|
// messageIpLimiter,
|
||||||
// messageUserLimiter,
|
// messageUserLimiter,
|
||||||
} = require('../../middleware');
|
} = require('~/server/middleware');
|
||||||
|
|
||||||
const assistants = require('./assistants');
|
const v1 = require('./v1');
|
||||||
const chat = require('./chat');
|
const chatV1 = require('./chatV1');
|
||||||
|
const v2 = require('./v2');
|
||||||
|
const chatV2 = require('./chatV2');
|
||||||
|
|
||||||
router.use(requireJwtAuth);
|
router.use(requireJwtAuth);
|
||||||
router.use(checkBan);
|
router.use(checkBan);
|
||||||
router.use(uaParser);
|
router.use(uaParser);
|
||||||
|
router.use('/v1/', v1);
|
||||||
router.use('/', assistants);
|
router.use('/v1/chat', chatV1);
|
||||||
router.use('/chat', chat);
|
router.use('/v2/', v2);
|
||||||
|
router.use('/v2/chat', chatV2);
|
||||||
|
|
||||||
module.exports = router;
|
module.exports = router;
|
||||||
|
|
|
||||||
81
api/server/routes/assistants/v1.js
Normal file
81
api/server/routes/assistants/v1.js
Normal file
|
|
@ -0,0 +1,81 @@
|
||||||
|
const multer = require('multer');
|
||||||
|
const express = require('express');
|
||||||
|
const controllers = require('~/server/controllers/assistants/v1');
|
||||||
|
const actions = require('./actions');
|
||||||
|
const tools = require('./tools');
|
||||||
|
|
||||||
|
const upload = multer();
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Assistant actions route.
|
||||||
|
* @route GET|POST /assistants/actions
|
||||||
|
*/
|
||||||
|
router.use('/actions', actions);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an assistant.
|
||||||
|
* @route GET /assistants/tools
|
||||||
|
* @returns {TPlugin[]} 200 - application/json
|
||||||
|
*/
|
||||||
|
router.use('/tools', tools);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an assistant.
|
||||||
|
* @route POST /assistants
|
||||||
|
* @param {AssistantCreateParams} req.body - The assistant creation parameters.
|
||||||
|
* @returns {Assistant} 201 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.post('/', controllers.createAssistant);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves an assistant.
|
||||||
|
* @route GET /assistants/:id
|
||||||
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.get('/:id', controllers.retrieveAssistant);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modifies an assistant.
|
||||||
|
* @route PATCH /assistants/:id
|
||||||
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
|
* @param {AssistantUpdateParams} req.body - The assistant update parameters.
|
||||||
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.patch('/:id', controllers.patchAssistant);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes an assistant.
|
||||||
|
* @route DELETE /assistants/:id
|
||||||
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.delete('/:id', controllers.deleteAssistant);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of assistants.
|
||||||
|
* @route GET /assistants
|
||||||
|
* @param {AssistantListParams} req.query - The assistant list parameters for pagination and sorting.
|
||||||
|
* @returns {AssistantListResponse} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.get('/', controllers.listAssistants);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of the user's assistant documents (metadata saved to database).
|
||||||
|
* @route GET /assistants/documents
|
||||||
|
* @returns {AssistantDocument[]} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.get('/documents', controllers.getAssistantDocuments);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads and updates an avatar for a specific assistant.
|
||||||
|
* @route POST /avatar/:assistant_id
|
||||||
|
* @param {string} req.params.assistant_id - The ID of the assistant.
|
||||||
|
* @param {Express.Multer.File} req.file - The avatar image file.
|
||||||
|
* @param {string} [req.body.metadata] - Optional metadata for the assistant's avatar.
|
||||||
|
* @returns {Object} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.post('/avatar/:assistant_id', upload.single('file'), controllers.uploadAssistantAvatar);
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
82
api/server/routes/assistants/v2.js
Normal file
82
api/server/routes/assistants/v2.js
Normal file
|
|
@ -0,0 +1,82 @@
|
||||||
|
const multer = require('multer');
|
||||||
|
const express = require('express');
|
||||||
|
const v1 = require('~/server/controllers/assistants/v1');
|
||||||
|
const v2 = require('~/server/controllers/assistants/v2');
|
||||||
|
const actions = require('./actions');
|
||||||
|
const tools = require('./tools');
|
||||||
|
|
||||||
|
const upload = multer();
|
||||||
|
const router = express.Router();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Assistant actions route.
|
||||||
|
* @route GET|POST /assistants/actions
|
||||||
|
*/
|
||||||
|
router.use('/actions', actions);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an assistant.
|
||||||
|
* @route GET /assistants/tools
|
||||||
|
* @returns {TPlugin[]} 200 - application/json
|
||||||
|
*/
|
||||||
|
router.use('/tools', tools);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an assistant.
|
||||||
|
* @route POST /assistants
|
||||||
|
* @param {AssistantCreateParams} req.body - The assistant creation parameters.
|
||||||
|
* @returns {Assistant} 201 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.post('/', v2.createAssistant);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves an assistant.
|
||||||
|
* @route GET /assistants/:id
|
||||||
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.get('/:id', v1.retrieveAssistant);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modifies an assistant.
|
||||||
|
* @route PATCH /assistants/:id
|
||||||
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
|
* @param {AssistantUpdateParams} req.body - The assistant update parameters.
|
||||||
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.patch('/:id', v2.patchAssistant);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes an assistant.
|
||||||
|
* @route DELETE /assistants/:id
|
||||||
|
* @param {string} req.params.id - Assistant identifier.
|
||||||
|
* @returns {Assistant} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.delete('/:id', v1.deleteAssistant);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of assistants.
|
||||||
|
* @route GET /assistants
|
||||||
|
* @param {AssistantListParams} req.query - The assistant list parameters for pagination and sorting.
|
||||||
|
* @returns {AssistantListResponse} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.get('/', v1.listAssistants);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of the user's assistant documents (metadata saved to database).
|
||||||
|
* @route GET /assistants/documents
|
||||||
|
* @returns {AssistantDocument[]} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.get('/documents', v1.getAssistantDocuments);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads and updates an avatar for a specific assistant.
|
||||||
|
* @route POST /avatar/:assistant_id
|
||||||
|
* @param {string} req.params.assistant_id - The ID of the assistant.
|
||||||
|
* @param {Express.Multer.File} req.file - The avatar image file.
|
||||||
|
* @param {string} [req.body.metadata] - Optional metadata for the assistant's avatar.
|
||||||
|
* @returns {Object} 200 - success response - application/json
|
||||||
|
*/
|
||||||
|
router.post('/avatar/:assistant_id', upload.single('file'), v1.uploadAssistantAvatar);
|
||||||
|
|
||||||
|
module.exports = router;
|
||||||
|
|
@ -1,20 +1,59 @@
|
||||||
const {
|
const {
|
||||||
AuthTypeEnum,
|
|
||||||
EModelEndpoint,
|
|
||||||
actionDomainSeparator,
|
|
||||||
CacheKeys,
|
CacheKeys,
|
||||||
Constants,
|
Constants,
|
||||||
|
AuthTypeEnum,
|
||||||
|
actionDelimiter,
|
||||||
|
isImageVisionTool,
|
||||||
|
actionDomainSeparator,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { encryptV2, decryptV2 } = require('~/server/utils/crypto');
|
const { encryptV2, decryptV2 } = require('~/server/utils/crypto');
|
||||||
const { getActions } = require('~/models/Action');
|
const { getActions, deleteActions } = require('~/models/Action');
|
||||||
|
const { deleteAssistant } = require('~/models/Assistant');
|
||||||
const { getLogStores } = require('~/cache');
|
const { getLogStores } = require('~/cache');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
const toolNameRegex = /^[a-zA-Z0-9_-]+$/;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates tool name against regex pattern and updates if necessary.
|
||||||
|
* @param {object} params - The parameters for the function.
|
||||||
|
* @param {object} params.req - Express Request.
|
||||||
|
* @param {FunctionTool} params.tool - The tool object.
|
||||||
|
* @param {string} params.assistant_id - The assistant ID
|
||||||
|
* @returns {object|null} - Updated tool object or null if invalid and not an action.
|
||||||
|
*/
|
||||||
|
const validateAndUpdateTool = async ({ req, tool, assistant_id }) => {
|
||||||
|
let actions;
|
||||||
|
if (isImageVisionTool(tool)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (!toolNameRegex.test(tool.function.name)) {
|
||||||
|
const [functionName, domain] = tool.function.name.split(actionDelimiter);
|
||||||
|
actions = await getActions({ assistant_id, user: req.user.id }, true);
|
||||||
|
const matchingActions = actions.filter((action) => {
|
||||||
|
const metadata = action.metadata;
|
||||||
|
return metadata && metadata.domain === domain;
|
||||||
|
});
|
||||||
|
const action = matchingActions[0];
|
||||||
|
if (!action) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsedDomain = await domainParser(req, domain, true);
|
||||||
|
|
||||||
|
if (!parsedDomain) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
tool.function.name = `${functionName}${actionDelimiter}${parsedDomain}`;
|
||||||
|
}
|
||||||
|
return tool;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encodes or decodes a domain name to/from base64, or replacing periods with a custom separator.
|
* Encodes or decodes a domain name to/from base64, or replacing periods with a custom separator.
|
||||||
*
|
*
|
||||||
* Necessary because Azure OpenAI Assistants API doesn't support periods in function
|
* Necessary due to `[a-zA-Z0-9_-]*` Regex Validation, limited to a 64-character maximum.
|
||||||
* names due to `[a-zA-Z0-9_-]*` Regex Validation, limited to a 64-character maximum.
|
|
||||||
*
|
*
|
||||||
* @param {Express.Request} req - The Express Request object.
|
* @param {Express.Request} req - The Express Request object.
|
||||||
* @param {string} domain - The domain name to encode/decode.
|
* @param {string} domain - The domain name to encode/decode.
|
||||||
|
|
@ -26,10 +65,6 @@ async function domainParser(req, domain, inverse = false) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
|
|
||||||
return domain;
|
|
||||||
}
|
|
||||||
|
|
||||||
const domainsCache = getLogStores(CacheKeys.ENCODED_DOMAINS);
|
const domainsCache = getLogStores(CacheKeys.ENCODED_DOMAINS);
|
||||||
const cachedDomain = await domainsCache.get(domain);
|
const cachedDomain = await domainsCache.get(domain);
|
||||||
if (inverse && cachedDomain) {
|
if (inverse && cachedDomain) {
|
||||||
|
|
@ -170,10 +205,29 @@ function decryptMetadata(metadata) {
|
||||||
return decryptedMetadata;
|
return decryptedMetadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deletes an action and its corresponding assistant.
|
||||||
|
* @param {Object} params - The parameters for the function.
|
||||||
|
* @param {OpenAIClient} params.req - The Express Request object.
|
||||||
|
* @param {string} params.assistant_id - The ID of the assistant.
|
||||||
|
*/
|
||||||
|
const deleteAssistantActions = async ({ req, assistant_id }) => {
|
||||||
|
try {
|
||||||
|
await deleteActions({ assistant_id, user: req.user.id });
|
||||||
|
await deleteAssistant({ assistant_id, user: req.user.id });
|
||||||
|
} catch (error) {
|
||||||
|
const message = 'Trouble deleting Assistant Actions for Assistant ID: ' + assistant_id;
|
||||||
|
logger.error(message, error);
|
||||||
|
throw new Error(message);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
loadActionSets,
|
deleteAssistantActions,
|
||||||
|
validateAndUpdateTool,
|
||||||
createActionTool,
|
createActionTool,
|
||||||
encryptMetadata,
|
encryptMetadata,
|
||||||
decryptMetadata,
|
decryptMetadata,
|
||||||
|
loadActionSets,
|
||||||
domainParser,
|
domainParser,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -72,7 +72,14 @@ const AppService = async (app) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config?.endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) {
|
if (config?.endpoints?.[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||||
endpointLocals[EModelEndpoint.assistants] = azureAssistantsDefaults();
|
endpointLocals[EModelEndpoint.azureAssistants] = azureAssistantsDefaults();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config?.endpoints?.[EModelEndpoint.azureAssistants]) {
|
||||||
|
endpointLocals[EModelEndpoint.azureAssistants] = assistantsConfigSetup(
|
||||||
|
config,
|
||||||
|
endpointLocals[EModelEndpoint.azureAssistants],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config?.endpoints?.[EModelEndpoint.assistants]) {
|
if (config?.endpoints?.[EModelEndpoint.assistants]) {
|
||||||
|
|
|
||||||
|
|
@ -393,8 +393,9 @@ async function runAssistant({
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const { endpoint = EModelEndpoint.azureAssistants } = openai.req.body;
|
||||||
/** @type {TCustomConfig.endpoints.assistants} */
|
/** @type {TCustomConfig.endpoints.assistants} */
|
||||||
const assistantsEndpointConfig = openai.req.app.locals?.[EModelEndpoint.assistants] ?? {};
|
const assistantsEndpointConfig = openai.req.app.locals?.[endpoint] ?? {};
|
||||||
const { pollIntervalMs, timeoutMs } = assistantsEndpointConfig;
|
const { pollIntervalMs, timeoutMs } = assistantsEndpointConfig;
|
||||||
|
|
||||||
const run = await waitForRun({
|
const run = await waitForRun({
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ const { isUserProvided, generateConfig } = require('~/server/utils');
|
||||||
|
|
||||||
const {
|
const {
|
||||||
OPENAI_API_KEY: openAIApiKey,
|
OPENAI_API_KEY: openAIApiKey,
|
||||||
|
AZURE_ASSISTANTS_API_KEY: azureAssistantsApiKey,
|
||||||
ASSISTANTS_API_KEY: assistantsApiKey,
|
ASSISTANTS_API_KEY: assistantsApiKey,
|
||||||
AZURE_API_KEY: azureOpenAIApiKey,
|
AZURE_API_KEY: azureOpenAIApiKey,
|
||||||
ANTHROPIC_API_KEY: anthropicApiKey,
|
ANTHROPIC_API_KEY: anthropicApiKey,
|
||||||
|
|
@ -13,6 +14,7 @@ const {
|
||||||
OPENAI_REVERSE_PROXY,
|
OPENAI_REVERSE_PROXY,
|
||||||
AZURE_OPENAI_BASEURL,
|
AZURE_OPENAI_BASEURL,
|
||||||
ASSISTANTS_BASE_URL,
|
ASSISTANTS_BASE_URL,
|
||||||
|
AZURE_ASSISTANTS_BASE_URL,
|
||||||
} = process.env ?? {};
|
} = process.env ?? {};
|
||||||
|
|
||||||
const useAzurePlugins = !!PLUGINS_USE_AZURE;
|
const useAzurePlugins = !!PLUGINS_USE_AZURE;
|
||||||
|
|
@ -28,11 +30,20 @@ module.exports = {
|
||||||
useAzurePlugins,
|
useAzurePlugins,
|
||||||
userProvidedOpenAI,
|
userProvidedOpenAI,
|
||||||
googleKey,
|
googleKey,
|
||||||
[EModelEndpoint.openAI]: generateConfig(openAIApiKey, OPENAI_REVERSE_PROXY),
|
|
||||||
[EModelEndpoint.assistants]: generateConfig(assistantsApiKey, ASSISTANTS_BASE_URL, true),
|
|
||||||
[EModelEndpoint.azureOpenAI]: generateConfig(azureOpenAIApiKey, AZURE_OPENAI_BASEURL),
|
|
||||||
[EModelEndpoint.chatGPTBrowser]: generateConfig(chatGPTToken),
|
|
||||||
[EModelEndpoint.anthropic]: generateConfig(anthropicApiKey),
|
|
||||||
[EModelEndpoint.bingAI]: generateConfig(bingToken),
|
[EModelEndpoint.bingAI]: generateConfig(bingToken),
|
||||||
|
[EModelEndpoint.anthropic]: generateConfig(anthropicApiKey),
|
||||||
|
[EModelEndpoint.chatGPTBrowser]: generateConfig(chatGPTToken),
|
||||||
|
[EModelEndpoint.openAI]: generateConfig(openAIApiKey, OPENAI_REVERSE_PROXY),
|
||||||
|
[EModelEndpoint.azureOpenAI]: generateConfig(azureOpenAIApiKey, AZURE_OPENAI_BASEURL),
|
||||||
|
[EModelEndpoint.assistants]: generateConfig(
|
||||||
|
assistantsApiKey,
|
||||||
|
ASSISTANTS_BASE_URL,
|
||||||
|
EModelEndpoint.assistants,
|
||||||
|
),
|
||||||
|
[EModelEndpoint.azureAssistants]: generateConfig(
|
||||||
|
azureAssistantsApiKey,
|
||||||
|
AZURE_ASSISTANTS_BASE_URL,
|
||||||
|
EModelEndpoint.azureAssistants,
|
||||||
|
),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -53,7 +53,7 @@ async function loadConfigEndpoints(req) {
|
||||||
|
|
||||||
if (req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
|
if (req.app.locals[EModelEndpoint.azureOpenAI]?.assistants) {
|
||||||
/** @type {Omit<TConfig, 'order'>} */
|
/** @type {Omit<TConfig, 'order'>} */
|
||||||
endpointsConfig[EModelEndpoint.assistants] = {
|
endpointsConfig[EModelEndpoint.azureAssistants] = {
|
||||||
userProvide: false,
|
userProvide: false,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,7 @@ async function loadConfigModels(req) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (azureEndpoint?.assistants && azureConfig.assistantModels) {
|
if (azureEndpoint?.assistants && azureConfig.assistantModels) {
|
||||||
modelsConfig[EModelEndpoint.assistants] = azureConfig.assistantModels;
|
modelsConfig[EModelEndpoint.azureAssistants] = azureConfig.assistantModels;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!Array.isArray(endpoints[EModelEndpoint.custom])) {
|
if (!Array.isArray(endpoints[EModelEndpoint.custom])) {
|
||||||
|
|
|
||||||
|
|
@ -9,13 +9,15 @@ const { config } = require('./EndpointService');
|
||||||
*/
|
*/
|
||||||
async function loadDefaultEndpointsConfig(req) {
|
async function loadDefaultEndpointsConfig(req) {
|
||||||
const { google, gptPlugins } = await loadAsyncEndpoints(req);
|
const { google, gptPlugins } = await loadAsyncEndpoints(req);
|
||||||
const { openAI, assistants, bingAI, anthropic, azureOpenAI, chatGPTBrowser } = config;
|
const { openAI, assistants, azureAssistants, bingAI, anthropic, azureOpenAI, chatGPTBrowser } =
|
||||||
|
config;
|
||||||
|
|
||||||
const enabledEndpoints = getEnabledEndpoints();
|
const enabledEndpoints = getEnabledEndpoints();
|
||||||
|
|
||||||
const endpointConfig = {
|
const endpointConfig = {
|
||||||
[EModelEndpoint.openAI]: openAI,
|
[EModelEndpoint.openAI]: openAI,
|
||||||
[EModelEndpoint.assistants]: assistants,
|
[EModelEndpoint.assistants]: assistants,
|
||||||
|
[EModelEndpoint.azureAssistants]: azureAssistants,
|
||||||
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
[EModelEndpoint.azureOpenAI]: azureOpenAI,
|
||||||
[EModelEndpoint.google]: google,
|
[EModelEndpoint.google]: google,
|
||||||
[EModelEndpoint.bingAI]: bingAI,
|
[EModelEndpoint.bingAI]: bingAI,
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,7 @@ async function loadDefaultModels(req) {
|
||||||
plugins: true,
|
plugins: true,
|
||||||
});
|
});
|
||||||
const assistants = await getOpenAIModels({ assistants: true });
|
const assistants = await getOpenAIModels({ assistants: true });
|
||||||
|
const azureAssistants = await getOpenAIModels({ azureAssistants: true });
|
||||||
|
|
||||||
return {
|
return {
|
||||||
[EModelEndpoint.openAI]: openAI,
|
[EModelEndpoint.openAI]: openAI,
|
||||||
|
|
@ -35,6 +36,7 @@ async function loadDefaultModels(req) {
|
||||||
[EModelEndpoint.bingAI]: ['BingAI', 'Sydney'],
|
[EModelEndpoint.bingAI]: ['BingAI', 'Sydney'],
|
||||||
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
|
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowser,
|
||||||
[EModelEndpoint.assistants]: assistants,
|
[EModelEndpoint.assistants]: assistants,
|
||||||
|
[EModelEndpoint.azureAssistants]: azureAssistants,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,95 +2,8 @@ const addTitle = require('./addTitle');
|
||||||
const buildOptions = require('./buildOptions');
|
const buildOptions = require('./buildOptions');
|
||||||
const initializeClient = require('./initializeClient');
|
const initializeClient = require('./initializeClient');
|
||||||
|
|
||||||
/**
|
|
||||||
* Asynchronously lists assistants based on provided query parameters.
|
|
||||||
*
|
|
||||||
* Initializes the client with the current request and response objects and lists assistants
|
|
||||||
* according to the query parameters. This function abstracts the logic for non-Azure paths.
|
|
||||||
*
|
|
||||||
* @async
|
|
||||||
* @param {object} params - The parameters object.
|
|
||||||
* @param {object} params.req - The request object, used for initializing the client.
|
|
||||||
* @param {object} params.res - The response object, used for initializing the client.
|
|
||||||
* @param {object} params.query - The query parameters to list assistants (e.g., limit, order).
|
|
||||||
* @returns {Promise<object>} A promise that resolves to the response from the `openai.beta.assistants.list` method call.
|
|
||||||
*/
|
|
||||||
const listAssistants = async ({ req, res, query }) => {
|
|
||||||
const { openai } = await initializeClient({ req, res });
|
|
||||||
return openai.beta.assistants.list(query);
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Asynchronously lists assistants for Azure configured groups.
|
|
||||||
*
|
|
||||||
* Iterates through Azure configured assistant groups, initializes the client with the current request and response objects,
|
|
||||||
* lists assistants based on the provided query parameters, and merges their data alongside the model information into a single array.
|
|
||||||
*
|
|
||||||
* @async
|
|
||||||
* @param {object} params - The parameters object.
|
|
||||||
* @param {object} params.req - The request object, used for initializing the client and manipulating the request body.
|
|
||||||
* @param {object} params.res - The response object, used for initializing the client.
|
|
||||||
* @param {TAzureConfig} params.azureConfig - The Azure configuration object containing assistantGroups and groupMap.
|
|
||||||
* @param {object} params.query - The query parameters to list assistants (e.g., limit, order).
|
|
||||||
* @returns {Promise<AssistantListResponse>} A promise that resolves to an array of assistant data merged with their respective model information.
|
|
||||||
*/
|
|
||||||
const listAssistantsForAzure = async ({ req, res, azureConfig = {}, query }) => {
|
|
||||||
/** @type {Array<[string, TAzureModelConfig]>} */
|
|
||||||
const groupModelTuples = [];
|
|
||||||
const promises = [];
|
|
||||||
/** @type {Array<TAzureGroup>} */
|
|
||||||
const groups = [];
|
|
||||||
|
|
||||||
const { groupMap, assistantGroups } = azureConfig;
|
|
||||||
|
|
||||||
for (const groupName of assistantGroups) {
|
|
||||||
const group = groupMap[groupName];
|
|
||||||
groups.push(group);
|
|
||||||
|
|
||||||
const currentModelTuples = Object.entries(group?.models);
|
|
||||||
groupModelTuples.push(currentModelTuples);
|
|
||||||
|
|
||||||
/* The specified model is only necessary to
|
|
||||||
fetch assistants for the shared instance */
|
|
||||||
req.body.model = currentModelTuples[0][0];
|
|
||||||
promises.push(listAssistants({ req, res, query }));
|
|
||||||
}
|
|
||||||
|
|
||||||
const resolvedQueries = await Promise.all(promises);
|
|
||||||
const data = resolvedQueries.flatMap((res, i) =>
|
|
||||||
res.data.map((assistant) => {
|
|
||||||
const deploymentName = assistant.model;
|
|
||||||
const currentGroup = groups[i];
|
|
||||||
const currentModelTuples = groupModelTuples[i];
|
|
||||||
const firstModel = currentModelTuples[0][0];
|
|
||||||
|
|
||||||
if (currentGroup.deploymentName === deploymentName) {
|
|
||||||
return { ...assistant, model: firstModel };
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const [model, modelConfig] of currentModelTuples) {
|
|
||||||
if (modelConfig.deploymentName === deploymentName) {
|
|
||||||
return { ...assistant, model };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { ...assistant, model: firstModel };
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
|
||||||
first_id: data[0]?.id,
|
|
||||||
last_id: data[data.length - 1]?.id,
|
|
||||||
object: 'list',
|
|
||||||
has_more: false,
|
|
||||||
data,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
addTitle,
|
addTitle,
|
||||||
buildOptions,
|
buildOptions,
|
||||||
initializeClient,
|
initializeClient,
|
||||||
listAssistants,
|
|
||||||
listAssistantsForAzure,
|
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,6 @@
|
||||||
const OpenAI = require('openai');
|
const OpenAI = require('openai');
|
||||||
const { HttpsProxyAgent } = require('https-proxy-agent');
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
const {
|
const { ErrorTypes, EModelEndpoint } = require('librechat-data-provider');
|
||||||
ErrorTypes,
|
|
||||||
EModelEndpoint,
|
|
||||||
resolveHeaders,
|
|
||||||
mapModelToAzureConfig,
|
|
||||||
} = require('librechat-data-provider');
|
|
||||||
const {
|
const {
|
||||||
getUserKeyValues,
|
getUserKeyValues,
|
||||||
getUserKeyExpiry,
|
getUserKeyExpiry,
|
||||||
|
|
@ -13,9 +8,8 @@ const {
|
||||||
} = require('~/server/services/UserService');
|
} = require('~/server/services/UserService');
|
||||||
const OpenAIClient = require('~/app/clients/OpenAIClient');
|
const OpenAIClient = require('~/app/clients/OpenAIClient');
|
||||||
const { isUserProvided } = require('~/server/utils');
|
const { isUserProvided } = require('~/server/utils');
|
||||||
const { constructAzureURL } = require('~/utils');
|
|
||||||
|
|
||||||
const initializeClient = async ({ req, res, endpointOption, initAppClient = false }) => {
|
const initializeClient = async ({ req, res, endpointOption, version, initAppClient = false }) => {
|
||||||
const { PROXY, OPENAI_ORGANIZATION, ASSISTANTS_API_KEY, ASSISTANTS_BASE_URL } = process.env;
|
const { PROXY, OPENAI_ORGANIZATION, ASSISTANTS_API_KEY, ASSISTANTS_BASE_URL } = process.env;
|
||||||
|
|
||||||
const userProvidesKey = isUserProvided(ASSISTANTS_API_KEY);
|
const userProvidesKey = isUserProvided(ASSISTANTS_API_KEY);
|
||||||
|
|
@ -34,7 +28,11 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals
|
||||||
let apiKey = userProvidesKey ? userValues.apiKey : ASSISTANTS_API_KEY;
|
let apiKey = userProvidesKey ? userValues.apiKey : ASSISTANTS_API_KEY;
|
||||||
let baseURL = userProvidesURL ? userValues.baseURL : ASSISTANTS_BASE_URL;
|
let baseURL = userProvidesURL ? userValues.baseURL : ASSISTANTS_BASE_URL;
|
||||||
|
|
||||||
const opts = {};
|
const opts = {
|
||||||
|
defaultHeaders: {
|
||||||
|
'OpenAI-Beta': `assistants=${version}`,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
const clientOptions = {
|
const clientOptions = {
|
||||||
reverseProxyUrl: baseURL ?? null,
|
reverseProxyUrl: baseURL ?? null,
|
||||||
|
|
@ -44,54 +42,6 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals
|
||||||
...endpointOption,
|
...endpointOption,
|
||||||
};
|
};
|
||||||
|
|
||||||
/** @type {TAzureConfig | undefined} */
|
|
||||||
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
|
|
||||||
|
|
||||||
/** @type {AzureOptions | undefined} */
|
|
||||||
let azureOptions;
|
|
||||||
|
|
||||||
if (azureConfig && azureConfig.assistants) {
|
|
||||||
const { modelGroupMap, groupMap, assistantModels } = azureConfig;
|
|
||||||
const modelName = req.body.model ?? req.query.model ?? assistantModels[0];
|
|
||||||
const {
|
|
||||||
azureOptions: currentOptions,
|
|
||||||
baseURL: azureBaseURL,
|
|
||||||
headers = {},
|
|
||||||
serverless,
|
|
||||||
} = mapModelToAzureConfig({
|
|
||||||
modelName,
|
|
||||||
modelGroupMap,
|
|
||||||
groupMap,
|
|
||||||
});
|
|
||||||
|
|
||||||
azureOptions = currentOptions;
|
|
||||||
|
|
||||||
baseURL = constructAzureURL({
|
|
||||||
baseURL: azureBaseURL ?? 'https://${INSTANCE_NAME}.openai.azure.com/openai',
|
|
||||||
azureOptions,
|
|
||||||
});
|
|
||||||
|
|
||||||
apiKey = azureOptions.azureOpenAIApiKey;
|
|
||||||
opts.defaultQuery = { 'api-version': azureOptions.azureOpenAIApiVersion };
|
|
||||||
opts.defaultHeaders = resolveHeaders({ ...headers, 'api-key': apiKey });
|
|
||||||
opts.model = azureOptions.azureOpenAIApiDeploymentName;
|
|
||||||
|
|
||||||
if (initAppClient) {
|
|
||||||
clientOptions.titleConvo = azureConfig.titleConvo;
|
|
||||||
clientOptions.titleModel = azureConfig.titleModel;
|
|
||||||
clientOptions.titleMethod = azureConfig.titleMethod ?? 'completion';
|
|
||||||
|
|
||||||
const groupName = modelGroupMap[modelName].group;
|
|
||||||
clientOptions.addParams = azureConfig.groupMap[groupName].addParams;
|
|
||||||
clientOptions.dropParams = azureConfig.groupMap[groupName].dropParams;
|
|
||||||
clientOptions.forcePrompt = azureConfig.groupMap[groupName].forcePrompt;
|
|
||||||
|
|
||||||
clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl;
|
|
||||||
clientOptions.headers = opts.defaultHeaders;
|
|
||||||
clientOptions.azure = !serverless && azureOptions;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (userProvidesKey & !apiKey) {
|
if (userProvidesKey & !apiKey) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
|
|
@ -125,10 +75,6 @@ const initializeClient = async ({ req, res, endpointOption, initAppClient = fals
|
||||||
openai.req = req;
|
openai.req = req;
|
||||||
openai.res = res;
|
openai.res = res;
|
||||||
|
|
||||||
if (azureOptions) {
|
|
||||||
openai.locals = { ...(openai.locals ?? {}), azureOptions };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (endpointOption && initAppClient) {
|
if (endpointOption && initAppClient) {
|
||||||
const client = new OpenAIClient(apiKey, clientOptions);
|
const client = new OpenAIClient(apiKey, clientOptions);
|
||||||
return {
|
return {
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
const buildOptions = (endpoint, parsedBody) => {
|
||||||
|
// eslint-disable-next-line no-unused-vars
|
||||||
|
const { promptPrefix, assistant_id, iconURL, greeting, spec, ...rest } = parsedBody;
|
||||||
|
const endpointOption = {
|
||||||
|
endpoint,
|
||||||
|
promptPrefix,
|
||||||
|
assistant_id,
|
||||||
|
iconURL,
|
||||||
|
greeting,
|
||||||
|
spec,
|
||||||
|
modelOptions: {
|
||||||
|
...rest,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return endpointOption;
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = buildOptions;
|
||||||
7
api/server/services/Endpoints/azureAssistants/index.js
Normal file
7
api/server/services/Endpoints/azureAssistants/index.js
Normal file
|
|
@ -0,0 +1,7 @@
|
||||||
|
const buildOptions = require('./buildOptions');
|
||||||
|
const initializeClient = require('./initializeClient');
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
buildOptions,
|
||||||
|
initializeClient,
|
||||||
|
};
|
||||||
|
|
@ -0,0 +1,155 @@
|
||||||
|
const OpenAI = require('openai');
|
||||||
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
|
const {
|
||||||
|
ErrorTypes,
|
||||||
|
EModelEndpoint,
|
||||||
|
resolveHeaders,
|
||||||
|
mapModelToAzureConfig,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
|
const {
|
||||||
|
getUserKeyValues,
|
||||||
|
getUserKeyExpiry,
|
||||||
|
checkUserKeyExpiry,
|
||||||
|
} = require('~/server/services/UserService');
|
||||||
|
const OpenAIClient = require('~/app/clients/OpenAIClient');
|
||||||
|
const { isUserProvided } = require('~/server/utils');
|
||||||
|
const { constructAzureURL } = require('~/utils');
|
||||||
|
|
||||||
|
const initializeClient = async ({ req, res, version, endpointOption, initAppClient = false }) => {
|
||||||
|
const { PROXY, OPENAI_ORGANIZATION, AZURE_ASSISTANTS_API_KEY, AZURE_ASSISTANTS_BASE_URL } =
|
||||||
|
process.env;
|
||||||
|
|
||||||
|
const userProvidesKey = isUserProvided(AZURE_ASSISTANTS_API_KEY);
|
||||||
|
const userProvidesURL = isUserProvided(AZURE_ASSISTANTS_BASE_URL);
|
||||||
|
|
||||||
|
let userValues = null;
|
||||||
|
if (userProvidesKey || userProvidesURL) {
|
||||||
|
const expiresAt = await getUserKeyExpiry({
|
||||||
|
userId: req.user.id,
|
||||||
|
name: EModelEndpoint.azureAssistants,
|
||||||
|
});
|
||||||
|
checkUserKeyExpiry(expiresAt, EModelEndpoint.azureAssistants);
|
||||||
|
userValues = await getUserKeyValues({
|
||||||
|
userId: req.user.id,
|
||||||
|
name: EModelEndpoint.azureAssistants,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let apiKey = userProvidesKey ? userValues.apiKey : AZURE_ASSISTANTS_API_KEY;
|
||||||
|
let baseURL = userProvidesURL ? userValues.baseURL : AZURE_ASSISTANTS_BASE_URL;
|
||||||
|
|
||||||
|
const opts = {};
|
||||||
|
|
||||||
|
const clientOptions = {
|
||||||
|
reverseProxyUrl: baseURL ?? null,
|
||||||
|
proxy: PROXY ?? null,
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
...endpointOption,
|
||||||
|
};
|
||||||
|
|
||||||
|
/** @type {TAzureConfig | undefined} */
|
||||||
|
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
|
||||||
|
|
||||||
|
/** @type {AzureOptions | undefined} */
|
||||||
|
let azureOptions;
|
||||||
|
|
||||||
|
if (azureConfig && azureConfig.assistants) {
|
||||||
|
const { modelGroupMap, groupMap, assistantModels } = azureConfig;
|
||||||
|
const modelName = req.body.model ?? req.query.model ?? assistantModels[0];
|
||||||
|
const {
|
||||||
|
azureOptions: currentOptions,
|
||||||
|
baseURL: azureBaseURL,
|
||||||
|
headers = {},
|
||||||
|
serverless,
|
||||||
|
} = mapModelToAzureConfig({
|
||||||
|
modelName,
|
||||||
|
modelGroupMap,
|
||||||
|
groupMap,
|
||||||
|
});
|
||||||
|
|
||||||
|
azureOptions = currentOptions;
|
||||||
|
|
||||||
|
baseURL = constructAzureURL({
|
||||||
|
baseURL: azureBaseURL ?? 'https://${INSTANCE_NAME}.openai.azure.com/openai',
|
||||||
|
azureOptions,
|
||||||
|
});
|
||||||
|
|
||||||
|
apiKey = azureOptions.azureOpenAIApiKey;
|
||||||
|
opts.defaultQuery = { 'api-version': azureOptions.azureOpenAIApiVersion };
|
||||||
|
opts.defaultHeaders = resolveHeaders({
|
||||||
|
...headers,
|
||||||
|
'api-key': apiKey,
|
||||||
|
'OpenAI-Beta': `assistants=${version}`,
|
||||||
|
});
|
||||||
|
opts.model = azureOptions.azureOpenAIApiDeploymentName;
|
||||||
|
|
||||||
|
if (initAppClient) {
|
||||||
|
clientOptions.titleConvo = azureConfig.titleConvo;
|
||||||
|
clientOptions.titleModel = azureConfig.titleModel;
|
||||||
|
clientOptions.titleMethod = azureConfig.titleMethod ?? 'completion';
|
||||||
|
|
||||||
|
const groupName = modelGroupMap[modelName].group;
|
||||||
|
clientOptions.addParams = azureConfig.groupMap[groupName].addParams;
|
||||||
|
clientOptions.dropParams = azureConfig.groupMap[groupName].dropParams;
|
||||||
|
clientOptions.forcePrompt = azureConfig.groupMap[groupName].forcePrompt;
|
||||||
|
|
||||||
|
clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl;
|
||||||
|
clientOptions.headers = opts.defaultHeaders;
|
||||||
|
clientOptions.azure = !serverless && azureOptions;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (userProvidesKey & !apiKey) {
|
||||||
|
throw new Error(
|
||||||
|
JSON.stringify({
|
||||||
|
type: ErrorTypes.NO_USER_KEY,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!apiKey) {
|
||||||
|
throw new Error('Assistants API key not provided. Please provide it again.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseURL) {
|
||||||
|
opts.baseURL = baseURL;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (PROXY) {
|
||||||
|
opts.httpAgent = new HttpsProxyAgent(PROXY);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (OPENAI_ORGANIZATION) {
|
||||||
|
opts.organization = OPENAI_ORGANIZATION;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @type {OpenAIClient} */
|
||||||
|
const openai = new OpenAI({
|
||||||
|
apiKey,
|
||||||
|
...opts,
|
||||||
|
});
|
||||||
|
|
||||||
|
openai.req = req;
|
||||||
|
openai.res = res;
|
||||||
|
|
||||||
|
if (azureOptions) {
|
||||||
|
openai.locals = { ...(openai.locals ?? {}), azureOptions };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (endpointOption && initAppClient) {
|
||||||
|
const client = new OpenAIClient(apiKey, clientOptions);
|
||||||
|
return {
|
||||||
|
client,
|
||||||
|
openai,
|
||||||
|
openAIApiKey: apiKey,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
openai,
|
||||||
|
openAIApiKey: apiKey,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports = initializeClient;
|
||||||
|
|
@ -0,0 +1,112 @@
|
||||||
|
// const OpenAI = require('openai');
|
||||||
|
const { HttpsProxyAgent } = require('https-proxy-agent');
|
||||||
|
const { ErrorTypes } = require('librechat-data-provider');
|
||||||
|
const { getUserKey, getUserKeyExpiry, getUserKeyValues } = require('~/server/services/UserService');
|
||||||
|
const initializeClient = require('./initializeClient');
|
||||||
|
// const { OpenAIClient } = require('~/app');
|
||||||
|
|
||||||
|
jest.mock('~/server/services/UserService', () => ({
|
||||||
|
getUserKey: jest.fn(),
|
||||||
|
getUserKeyExpiry: jest.fn(),
|
||||||
|
getUserKeyValues: jest.fn(),
|
||||||
|
checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const today = new Date();
|
||||||
|
const tenDaysFromToday = new Date(today.setDate(today.getDate() + 10));
|
||||||
|
const isoString = tenDaysFromToday.toISOString();
|
||||||
|
|
||||||
|
describe('initializeClient', () => {
|
||||||
|
// Set up environment variables
|
||||||
|
const originalEnvironment = process.env;
|
||||||
|
const app = {
|
||||||
|
locals: {},
|
||||||
|
};
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.resetModules(); // Clears the cache
|
||||||
|
process.env = { ...originalEnvironment }; // Make a copy
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
process.env = originalEnvironment; // Restore original env vars
|
||||||
|
});
|
||||||
|
|
||||||
|
test('initializes OpenAI client with default API key and URL', async () => {
|
||||||
|
process.env.ASSISTANTS_API_KEY = 'default-api-key';
|
||||||
|
process.env.ASSISTANTS_BASE_URL = 'https://default.api.url';
|
||||||
|
|
||||||
|
// Assuming 'isUserProvided' to return false for this test case
|
||||||
|
jest.mock('~/server/utils', () => ({
|
||||||
|
isUserProvided: jest.fn().mockReturnValueOnce(false),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const req = { user: { id: 'user123' }, app };
|
||||||
|
const res = {};
|
||||||
|
|
||||||
|
const { openai, openAIApiKey } = await initializeClient({ req, res });
|
||||||
|
expect(openai.apiKey).toBe('default-api-key');
|
||||||
|
expect(openAIApiKey).toBe('default-api-key');
|
||||||
|
expect(openai.baseURL).toBe('https://default.api.url');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('initializes OpenAI client with user-provided API key and URL', async () => {
|
||||||
|
process.env.ASSISTANTS_API_KEY = 'user_provided';
|
||||||
|
process.env.ASSISTANTS_BASE_URL = 'user_provided';
|
||||||
|
|
||||||
|
getUserKeyValues.mockResolvedValue({ apiKey: 'user-api-key', baseURL: 'https://user.api.url' });
|
||||||
|
getUserKeyExpiry.mockResolvedValue(isoString);
|
||||||
|
|
||||||
|
const req = { user: { id: 'user123' }, app };
|
||||||
|
const res = {};
|
||||||
|
|
||||||
|
const { openai, openAIApiKey } = await initializeClient({ req, res });
|
||||||
|
expect(openAIApiKey).toBe('user-api-key');
|
||||||
|
expect(openai.apiKey).toBe('user-api-key');
|
||||||
|
expect(openai.baseURL).toBe('https://user.api.url');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('throws error for invalid JSON in user-provided values', async () => {
|
||||||
|
process.env.ASSISTANTS_API_KEY = 'user_provided';
|
||||||
|
getUserKey.mockResolvedValue('invalid-json');
|
||||||
|
getUserKeyExpiry.mockResolvedValue(isoString);
|
||||||
|
getUserKeyValues.mockImplementation(() => {
|
||||||
|
let userValues = getUserKey();
|
||||||
|
try {
|
||||||
|
userValues = JSON.parse(userValues);
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error(
|
||||||
|
JSON.stringify({
|
||||||
|
type: ErrorTypes.INVALID_USER_KEY,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return userValues;
|
||||||
|
});
|
||||||
|
|
||||||
|
const req = { user: { id: 'user123' } };
|
||||||
|
const res = {};
|
||||||
|
|
||||||
|
await expect(initializeClient({ req, res })).rejects.toThrow(/invalid_user_key/);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('throws error if API key is not provided', async () => {
|
||||||
|
delete process.env.ASSISTANTS_API_KEY; // Simulate missing API key
|
||||||
|
|
||||||
|
const req = { user: { id: 'user123' }, app };
|
||||||
|
const res = {};
|
||||||
|
|
||||||
|
await expect(initializeClient({ req, res })).rejects.toThrow(/Assistants API key not/);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('initializes OpenAI client with proxy configuration', async () => {
|
||||||
|
process.env.ASSISTANTS_API_KEY = 'test-key';
|
||||||
|
process.env.PROXY = 'http://proxy.server';
|
||||||
|
|
||||||
|
const req = { user: { id: 'user123' }, app };
|
||||||
|
const res = {};
|
||||||
|
|
||||||
|
const { openai } = await initializeClient({ req, res });
|
||||||
|
expect(openai.httpAgent).toBeInstanceOf(HttpsProxyAgent);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
@ -14,9 +14,11 @@ const { logger } = require('~/config');
|
||||||
* @returns {Promise<OpenAIFile>}
|
* @returns {Promise<OpenAIFile>}
|
||||||
*/
|
*/
|
||||||
async function uploadOpenAIFile({ req, file, openai }) {
|
async function uploadOpenAIFile({ req, file, openai }) {
|
||||||
|
const { height, width } = req.body;
|
||||||
|
const isImage = height && width;
|
||||||
const uploadedFile = await openai.files.create({
|
const uploadedFile = await openai.files.create({
|
||||||
file: fs.createReadStream(file.path),
|
file: fs.createReadStream(file.path),
|
||||||
purpose: FilePurpose.Assistants,
|
purpose: isImage ? FilePurpose.Vision : FilePurpose.Assistants,
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
|
@ -34,7 +36,7 @@ async function uploadOpenAIFile({ req, file, openai }) {
|
||||||
await sleep(sleepTime);
|
await sleep(sleepTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
return uploadedFile;
|
return isImage ? { ...uploadedFile, height, width } : uploadedFile;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -11,15 +11,20 @@ const {
|
||||||
mergeFileConfig,
|
mergeFileConfig,
|
||||||
hostImageIdSuffix,
|
hostImageIdSuffix,
|
||||||
hostImageNamePrefix,
|
hostImageNamePrefix,
|
||||||
|
isAssistantsEndpoint,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { convertImage, resizeAndConvert } = require('~/server/services/Files/images');
|
const { convertImage, resizeAndConvert } = require('~/server/services/Files/images');
|
||||||
const { initializeClient } = require('~/server/services/Endpoints/assistants');
|
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
|
||||||
const { createFile, updateFileUsage, deleteFiles } = require('~/models/File');
|
const { createFile, updateFileUsage, deleteFiles } = require('~/models/File');
|
||||||
|
const { addResourceFileId } = require('~/server/controllers/assistants/v2');
|
||||||
const { LB_QueueAsyncCall } = require('~/server/utils/queue');
|
const { LB_QueueAsyncCall } = require('~/server/utils/queue');
|
||||||
const { getStrategyFunctions } = require('./strategies');
|
const { getStrategyFunctions } = require('./strategies');
|
||||||
const { determineFileType } = require('~/server/utils');
|
const { determineFileType } = require('~/server/utils');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
const checkOpenAIStorage = (source) =>
|
||||||
|
source === FileSources.openai || source === FileSources.azure;
|
||||||
|
|
||||||
const processFiles = async (files) => {
|
const processFiles = async (files) => {
|
||||||
const promises = [];
|
const promises = [];
|
||||||
for (let file of files) {
|
for (let file of files) {
|
||||||
|
|
@ -41,7 +46,7 @@ const processFiles = async (files) => {
|
||||||
* @param {OpenAI | undefined} [openai] - If an OpenAI file, the initialized OpenAI client.
|
* @param {OpenAI | undefined} [openai] - If an OpenAI file, the initialized OpenAI client.
|
||||||
*/
|
*/
|
||||||
function enqueueDeleteOperation(req, file, deleteFile, promises, openai) {
|
function enqueueDeleteOperation(req, file, deleteFile, promises, openai) {
|
||||||
if (file.source === FileSources.openai) {
|
if (checkOpenAIStorage(file.source)) {
|
||||||
// Enqueue to leaky bucket
|
// Enqueue to leaky bucket
|
||||||
promises.push(
|
promises.push(
|
||||||
new Promise((resolve, reject) => {
|
new Promise((resolve, reject) => {
|
||||||
|
|
@ -93,14 +98,14 @@ const processDeleteRequest = async ({ req, files }) => {
|
||||||
/** @type {OpenAI | undefined} */
|
/** @type {OpenAI | undefined} */
|
||||||
let openai;
|
let openai;
|
||||||
if (req.body.assistant_id) {
|
if (req.body.assistant_id) {
|
||||||
({ openai } = await initializeClient({ req }));
|
({ openai } = await getOpenAIClient({ req }));
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
const source = file.source ?? FileSources.local;
|
const source = file.source ?? FileSources.local;
|
||||||
|
|
||||||
if (source === FileSources.openai && !openai) {
|
if (checkOpenAIStorage(source) && !openai) {
|
||||||
({ openai } = await initializeClient({ req }));
|
({ openai } = await getOpenAIClient({ req }));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (req.body.assistant_id) {
|
if (req.body.assistant_id) {
|
||||||
|
|
@ -180,12 +185,13 @@ const processFileURL = async ({ fileStrategy, userId, URL, fileName, basePath, c
|
||||||
*
|
*
|
||||||
* @param {Object} params - The parameters object.
|
* @param {Object} params - The parameters object.
|
||||||
* @param {Express.Request} params.req - The Express request object.
|
* @param {Express.Request} params.req - The Express request object.
|
||||||
* @param {Express.Response} params.res - The Express response object.
|
* @param {Express.Response} [params.res] - The Express response object.
|
||||||
* @param {Express.Multer.File} params.file - The uploaded file.
|
* @param {Express.Multer.File} params.file - The uploaded file.
|
||||||
* @param {ImageMetadata} params.metadata - Additional metadata for the file.
|
* @param {ImageMetadata} params.metadata - Additional metadata for the file.
|
||||||
|
* @param {boolean} params.returnFile - Whether to return the file metadata or return response as normal.
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
const processImageFile = async ({ req, res, file, metadata }) => {
|
const processImageFile = async ({ req, res, file, metadata, returnFile = false }) => {
|
||||||
const source = req.app.locals.fileStrategy;
|
const source = req.app.locals.fileStrategy;
|
||||||
const { handleImageUpload } = getStrategyFunctions(source);
|
const { handleImageUpload } = getStrategyFunctions(source);
|
||||||
const { file_id, temp_file_id, endpoint } = metadata;
|
const { file_id, temp_file_id, endpoint } = metadata;
|
||||||
|
|
@ -213,6 +219,10 @@ const processImageFile = async ({ req, res, file, metadata }) => {
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (returnFile) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
res.status(200).json({ message: 'File uploaded and processed successfully', ...result });
|
res.status(200).json({ message: 'File uploaded and processed successfully', ...result });
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -274,28 +284,57 @@ const uploadImageBuffer = async ({ req, context, metadata = {}, resize = true })
|
||||||
* @returns {Promise<void>}
|
* @returns {Promise<void>}
|
||||||
*/
|
*/
|
||||||
const processFileUpload = async ({ req, res, file, metadata }) => {
|
const processFileUpload = async ({ req, res, file, metadata }) => {
|
||||||
const isAssistantUpload = metadata.endpoint === EModelEndpoint.assistants;
|
const isAssistantUpload = isAssistantsEndpoint(metadata.endpoint);
|
||||||
const source = isAssistantUpload ? FileSources.openai : FileSources.vectordb;
|
const assistantSource =
|
||||||
|
metadata.endpoint === EModelEndpoint.azureAssistants ? FileSources.azure : FileSources.openai;
|
||||||
|
const source = isAssistantUpload ? assistantSource : FileSources.vectordb;
|
||||||
const { handleFileUpload } = getStrategyFunctions(source);
|
const { handleFileUpload } = getStrategyFunctions(source);
|
||||||
const { file_id, temp_file_id } = metadata;
|
const { file_id, temp_file_id } = metadata;
|
||||||
|
|
||||||
/** @type {OpenAI | undefined} */
|
/** @type {OpenAI | undefined} */
|
||||||
let openai;
|
let openai;
|
||||||
if (source === FileSources.openai) {
|
if (checkOpenAIStorage(source)) {
|
||||||
({ openai } = await initializeClient({ req }));
|
({ openai } = await getOpenAIClient({ req }));
|
||||||
}
|
}
|
||||||
|
|
||||||
const { id, bytes, filename, filepath, embedded } = await handleFileUpload({
|
const {
|
||||||
|
id,
|
||||||
|
bytes,
|
||||||
|
filename,
|
||||||
|
filepath: _filepath,
|
||||||
|
embedded,
|
||||||
|
height,
|
||||||
|
width,
|
||||||
|
} = await handleFileUpload({
|
||||||
req,
|
req,
|
||||||
file,
|
file,
|
||||||
file_id,
|
file_id,
|
||||||
openai,
|
openai,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (isAssistantUpload && !metadata.message_file) {
|
if (isAssistantUpload && !metadata.message_file && !metadata.tool_resource) {
|
||||||
await openai.beta.assistants.files.create(metadata.assistant_id, {
|
await openai.beta.assistants.files.create(metadata.assistant_id, {
|
||||||
file_id: id,
|
file_id: id,
|
||||||
});
|
});
|
||||||
|
} else if (isAssistantUpload && !metadata.message_file) {
|
||||||
|
await addResourceFileId({
|
||||||
|
req,
|
||||||
|
openai,
|
||||||
|
file_id: id,
|
||||||
|
assistant_id: metadata.assistant_id,
|
||||||
|
tool_resource: metadata.tool_resource,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let filepath = isAssistantUpload ? `${openai.baseURL}/files/${id}` : _filepath;
|
||||||
|
if (isAssistantUpload && file.mimetype.startsWith('image')) {
|
||||||
|
const result = await processImageFile({
|
||||||
|
req,
|
||||||
|
file,
|
||||||
|
metadata: { file_id: v4() },
|
||||||
|
returnFile: true,
|
||||||
|
});
|
||||||
|
filepath = result.filepath;
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await createFile(
|
const result = await createFile(
|
||||||
|
|
@ -304,13 +343,15 @@ const processFileUpload = async ({ req, res, file, metadata }) => {
|
||||||
file_id: id ?? file_id,
|
file_id: id ?? file_id,
|
||||||
temp_file_id,
|
temp_file_id,
|
||||||
bytes,
|
bytes,
|
||||||
|
filepath,
|
||||||
filename: filename ?? file.originalname,
|
filename: filename ?? file.originalname,
|
||||||
filepath: isAssistantUpload ? `${openai.baseURL}/files/${id}` : filepath,
|
|
||||||
context: isAssistantUpload ? FileContext.assistants : FileContext.message_attachment,
|
context: isAssistantUpload ? FileContext.assistants : FileContext.message_attachment,
|
||||||
model: isAssistantUpload ? req.body.model : undefined,
|
model: isAssistantUpload ? req.body.model : undefined,
|
||||||
type: file.mimetype,
|
type: file.mimetype,
|
||||||
embedded,
|
embedded,
|
||||||
source,
|
source,
|
||||||
|
height,
|
||||||
|
width,
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
|
|
@ -500,7 +541,12 @@ async function retrieveAndProcessFile({
|
||||||
* Filters a file based on its size and the endpoint origin.
|
* Filters a file based on its size and the endpoint origin.
|
||||||
*
|
*
|
||||||
* @param {Object} params - The parameters for the function.
|
* @param {Object} params - The parameters for the function.
|
||||||
* @param {Express.Request} params.req - The request object from Express.
|
* @param {object} params.req - The request object from Express.
|
||||||
|
* @param {string} [params.req.endpoint]
|
||||||
|
* @param {string} [params.req.file_id]
|
||||||
|
* @param {number} [params.req.width]
|
||||||
|
* @param {number} [params.req.height]
|
||||||
|
* @param {number} [params.req.version]
|
||||||
* @param {Express.Multer.File} params.file - The file uploaded to the server via multer.
|
* @param {Express.Multer.File} params.file - The file uploaded to the server via multer.
|
||||||
* @param {boolean} [params.image] - Whether the file expected is an image.
|
* @param {boolean} [params.image] - Whether the file expected is an image.
|
||||||
* @returns {void}
|
* @returns {void}
|
||||||
|
|
|
||||||
|
|
@ -111,6 +111,8 @@ const getStrategyFunctions = (fileSource) => {
|
||||||
return localStrategy();
|
return localStrategy();
|
||||||
} else if (fileSource === FileSources.openai) {
|
} else if (fileSource === FileSources.openai) {
|
||||||
return openAIStrategy();
|
return openAIStrategy();
|
||||||
|
} else if (fileSource === FileSources.azure) {
|
||||||
|
return openAIStrategy();
|
||||||
} else if (fileSource === FileSources.vectordb) {
|
} else if (fileSource === FileSources.vectordb) {
|
||||||
return vectorStrategy();
|
return vectorStrategy();
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -167,6 +167,8 @@ const getOpenAIModels = async (opts) => {
|
||||||
|
|
||||||
if (opts.assistants) {
|
if (opts.assistants) {
|
||||||
models = defaultModels[EModelEndpoint.assistants];
|
models = defaultModels[EModelEndpoint.assistants];
|
||||||
|
} else if (opts.azure) {
|
||||||
|
models = defaultModels[EModelEndpoint.azureAssistants];
|
||||||
}
|
}
|
||||||
|
|
||||||
if (opts.plugins) {
|
if (opts.plugins) {
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@ const { v4 } = require('uuid');
|
||||||
const {
|
const {
|
||||||
Constants,
|
Constants,
|
||||||
ContentTypes,
|
ContentTypes,
|
||||||
EModelEndpoint,
|
|
||||||
AnnotationTypes,
|
AnnotationTypes,
|
||||||
defaultOrderQuery,
|
defaultOrderQuery,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
|
|
@ -50,6 +49,7 @@ async function initThread({ openai, body, thread_id: _thread_id }) {
|
||||||
* @param {string} params.assistant_id - The current assistant Id.
|
* @param {string} params.assistant_id - The current assistant Id.
|
||||||
* @param {string} params.thread_id - The thread Id.
|
* @param {string} params.thread_id - The thread Id.
|
||||||
* @param {string} params.conversationId - The message's conversationId
|
* @param {string} params.conversationId - The message's conversationId
|
||||||
|
* @param {string} params.endpoint - The conversation endpoint
|
||||||
* @param {string} [params.parentMessageId] - Optional if initial message.
|
* @param {string} [params.parentMessageId] - Optional if initial message.
|
||||||
* Defaults to Constants.NO_PARENT.
|
* Defaults to Constants.NO_PARENT.
|
||||||
* @param {string} [params.instructions] - Optional: from preset for `instructions` field.
|
* @param {string} [params.instructions] - Optional: from preset for `instructions` field.
|
||||||
|
|
@ -82,7 +82,7 @@ async function saveUserMessage(params) {
|
||||||
|
|
||||||
const userMessage = {
|
const userMessage = {
|
||||||
user: params.user,
|
user: params.user,
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint: params.endpoint,
|
||||||
messageId: params.messageId,
|
messageId: params.messageId,
|
||||||
conversationId: params.conversationId,
|
conversationId: params.conversationId,
|
||||||
parentMessageId: params.parentMessageId ?? Constants.NO_PARENT,
|
parentMessageId: params.parentMessageId ?? Constants.NO_PARENT,
|
||||||
|
|
@ -96,7 +96,7 @@ async function saveUserMessage(params) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const convo = {
|
const convo = {
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint: params.endpoint,
|
||||||
conversationId: params.conversationId,
|
conversationId: params.conversationId,
|
||||||
promptPrefix: params.promptPrefix,
|
promptPrefix: params.promptPrefix,
|
||||||
instructions: params.instructions,
|
instructions: params.instructions,
|
||||||
|
|
@ -126,6 +126,7 @@ async function saveUserMessage(params) {
|
||||||
* @param {string} params.model - The model used by the assistant.
|
* @param {string} params.model - The model used by the assistant.
|
||||||
* @param {ContentPart[]} params.content - The message content parts.
|
* @param {ContentPart[]} params.content - The message content parts.
|
||||||
* @param {string} params.conversationId - The message's conversationId
|
* @param {string} params.conversationId - The message's conversationId
|
||||||
|
* @param {string} params.endpoint - The conversation endpoint
|
||||||
* @param {string} params.parentMessageId - The latest user message that triggered this response.
|
* @param {string} params.parentMessageId - The latest user message that triggered this response.
|
||||||
* @param {string} [params.instructions] - Optional: from preset for `instructions` field.
|
* @param {string} [params.instructions] - Optional: from preset for `instructions` field.
|
||||||
* Overrides the instructions of the assistant.
|
* Overrides the instructions of the assistant.
|
||||||
|
|
@ -145,7 +146,7 @@ async function saveAssistantMessage(params) {
|
||||||
|
|
||||||
const message = await recordMessage({
|
const message = await recordMessage({
|
||||||
user: params.user,
|
user: params.user,
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint: params.endpoint,
|
||||||
messageId: params.messageId,
|
messageId: params.messageId,
|
||||||
conversationId: params.conversationId,
|
conversationId: params.conversationId,
|
||||||
parentMessageId: params.parentMessageId,
|
parentMessageId: params.parentMessageId,
|
||||||
|
|
@ -160,7 +161,7 @@ async function saveAssistantMessage(params) {
|
||||||
});
|
});
|
||||||
|
|
||||||
await saveConvo(params.user, {
|
await saveConvo(params.user, {
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint: params.endpoint,
|
||||||
conversationId: params.conversationId,
|
conversationId: params.conversationId,
|
||||||
promptPrefix: params.promptPrefix,
|
promptPrefix: params.promptPrefix,
|
||||||
instructions: params.instructions,
|
instructions: params.instructions,
|
||||||
|
|
@ -205,20 +206,22 @@ async function addThreadMetadata({ openai, thread_id, messageId, messages }) {
|
||||||
*
|
*
|
||||||
* @param {Object} params - The parameters for synchronizing messages.
|
* @param {Object} params - The parameters for synchronizing messages.
|
||||||
* @param {OpenAIClient} params.openai - The OpenAI client instance.
|
* @param {OpenAIClient} params.openai - The OpenAI client instance.
|
||||||
|
* @param {string} params.endpoint - The current endpoint.
|
||||||
|
* @param {string} params.thread_id - The current thread ID.
|
||||||
* @param {TMessage[]} params.dbMessages - The LibreChat DB messages.
|
* @param {TMessage[]} params.dbMessages - The LibreChat DB messages.
|
||||||
* @param {ThreadMessage[]} params.apiMessages - The thread messages from the API.
|
* @param {ThreadMessage[]} params.apiMessages - The thread messages from the API.
|
||||||
* @param {string} params.conversationId - The current conversation ID.
|
|
||||||
* @param {string} params.thread_id - The current thread ID.
|
|
||||||
* @param {string} [params.assistant_id] - The current assistant ID.
|
* @param {string} [params.assistant_id] - The current assistant ID.
|
||||||
|
* @param {string} params.conversationId - The current conversation ID.
|
||||||
* @return {Promise<TMessage[]>} A promise that resolves to the updated messages
|
* @return {Promise<TMessage[]>} A promise that resolves to the updated messages
|
||||||
*/
|
*/
|
||||||
async function syncMessages({
|
async function syncMessages({
|
||||||
openai,
|
openai,
|
||||||
apiMessages,
|
endpoint,
|
||||||
dbMessages,
|
|
||||||
conversationId,
|
|
||||||
thread_id,
|
thread_id,
|
||||||
|
dbMessages,
|
||||||
|
apiMessages,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
|
conversationId,
|
||||||
}) {
|
}) {
|
||||||
let result = [];
|
let result = [];
|
||||||
let dbMessageMap = new Map(dbMessages.map((msg) => [msg.messageId, msg]));
|
let dbMessageMap = new Map(dbMessages.map((msg) => [msg.messageId, msg]));
|
||||||
|
|
@ -290,7 +293,7 @@ async function syncMessages({
|
||||||
thread_id,
|
thread_id,
|
||||||
conversationId,
|
conversationId,
|
||||||
messageId: v4(),
|
messageId: v4(),
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint,
|
||||||
parentMessageId: lastMessage ? lastMessage.messageId : Constants.NO_PARENT,
|
parentMessageId: lastMessage ? lastMessage.messageId : Constants.NO_PARENT,
|
||||||
role: apiMessage.role,
|
role: apiMessage.role,
|
||||||
isCreatedByUser: apiMessage.role === 'user',
|
isCreatedByUser: apiMessage.role === 'user',
|
||||||
|
|
@ -382,13 +385,21 @@ function mapMessagesToSteps(steps, messages) {
|
||||||
*
|
*
|
||||||
* @param {Object} params - The parameters for initializing a thread.
|
* @param {Object} params - The parameters for initializing a thread.
|
||||||
* @param {OpenAIClient} params.openai - The OpenAI client instance.
|
* @param {OpenAIClient} params.openai - The OpenAI client instance.
|
||||||
|
* @param {string} params.endpoint - The current endpoint.
|
||||||
* @param {string} [params.latestMessageId] - Optional: The latest message ID from LibreChat.
|
* @param {string} [params.latestMessageId] - Optional: The latest message ID from LibreChat.
|
||||||
* @param {string} params.thread_id - Response thread ID.
|
* @param {string} params.thread_id - Response thread ID.
|
||||||
* @param {string} params.run_id - Response Run ID.
|
* @param {string} params.run_id - Response Run ID.
|
||||||
* @param {string} params.conversationId - LibreChat conversation ID.
|
* @param {string} params.conversationId - LibreChat conversation ID.
|
||||||
* @return {Promise<TMessage[]>} A promise that resolves to the updated messages
|
* @return {Promise<TMessage[]>} A promise that resolves to the updated messages
|
||||||
*/
|
*/
|
||||||
async function checkMessageGaps({ openai, latestMessageId, thread_id, run_id, conversationId }) {
|
async function checkMessageGaps({
|
||||||
|
openai,
|
||||||
|
endpoint,
|
||||||
|
latestMessageId,
|
||||||
|
thread_id,
|
||||||
|
run_id,
|
||||||
|
conversationId,
|
||||||
|
}) {
|
||||||
const promises = [];
|
const promises = [];
|
||||||
promises.push(openai.beta.threads.messages.list(thread_id, defaultOrderQuery));
|
promises.push(openai.beta.threads.messages.list(thread_id, defaultOrderQuery));
|
||||||
promises.push(openai.beta.threads.runs.steps.list(thread_id, run_id));
|
promises.push(openai.beta.threads.runs.steps.list(thread_id, run_id));
|
||||||
|
|
@ -406,6 +417,7 @@ async function checkMessageGaps({ openai, latestMessageId, thread_id, run_id, co
|
||||||
role: 'assistant',
|
role: 'assistant',
|
||||||
run_id,
|
run_id,
|
||||||
thread_id,
|
thread_id,
|
||||||
|
endpoint,
|
||||||
metadata: {
|
metadata: {
|
||||||
messageId: latestMessageId,
|
messageId: latestMessageId,
|
||||||
},
|
},
|
||||||
|
|
@ -452,11 +464,12 @@ async function checkMessageGaps({ openai, latestMessageId, thread_id, run_id, co
|
||||||
|
|
||||||
const syncedMessages = await syncMessages({
|
const syncedMessages = await syncMessages({
|
||||||
openai,
|
openai,
|
||||||
|
endpoint,
|
||||||
|
thread_id,
|
||||||
dbMessages,
|
dbMessages,
|
||||||
apiMessages,
|
apiMessages,
|
||||||
thread_id,
|
|
||||||
conversationId,
|
|
||||||
assistant_id,
|
assistant_id,
|
||||||
|
conversationId,
|
||||||
});
|
});
|
||||||
|
|
||||||
return Object.values(
|
return Object.values(
|
||||||
|
|
@ -498,41 +511,62 @@ const recordUsage = async ({
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* Creates a replaceAnnotation function with internal state for tracking the index offset.
|
||||||
|
*
|
||||||
|
* @returns {function} The replaceAnnotation function with closure for index offset.
|
||||||
|
*/
|
||||||
|
function createReplaceAnnotation() {
|
||||||
|
let indexOffset = 0;
|
||||||
|
|
||||||
|
/**
|
||||||
* Safely replaces the annotated text within the specified range denoted by start_index and end_index,
|
* Safely replaces the annotated text within the specified range denoted by start_index and end_index,
|
||||||
* after verifying that the text within that range matches the given annotation text.
|
* after verifying that the text within that range matches the given annotation text.
|
||||||
* Proceeds with the replacement even if a mismatch is found, but logs a warning.
|
* Proceeds with the replacement even if a mismatch is found, but logs a warning.
|
||||||
*
|
*
|
||||||
* @param {string} originalText The original text content.
|
* @param {object} params The original text content.
|
||||||
* @param {number} start_index The starting index where replacement should begin.
|
* @param {string} params.currentText The current text content, with/without replacements.
|
||||||
* @param {number} end_index The ending index where replacement should end.
|
* @param {number} params.start_index The starting index where replacement should begin.
|
||||||
* @param {string} expectedText The text expected to be found in the specified range.
|
* @param {number} params.end_index The ending index where replacement should end.
|
||||||
* @param {string} replacementText The text to insert in place of the existing content.
|
* @param {string} params.expectedText The text expected to be found in the specified range.
|
||||||
|
* @param {string} params.replacementText The text to insert in place of the existing content.
|
||||||
* @returns {string} The text with the replacement applied, regardless of text match.
|
* @returns {string} The text with the replacement applied, regardless of text match.
|
||||||
*/
|
*/
|
||||||
function replaceAnnotation(originalText, start_index, end_index, expectedText, replacementText) {
|
function replaceAnnotation({
|
||||||
if (start_index < 0 || end_index > originalText.length || start_index > end_index) {
|
currentText,
|
||||||
|
start_index,
|
||||||
|
end_index,
|
||||||
|
expectedText,
|
||||||
|
replacementText,
|
||||||
|
}) {
|
||||||
|
const adjustedStartIndex = start_index + indexOffset;
|
||||||
|
const adjustedEndIndex = end_index + indexOffset;
|
||||||
|
|
||||||
|
if (
|
||||||
|
adjustedStartIndex < 0 ||
|
||||||
|
adjustedEndIndex > currentText.length ||
|
||||||
|
adjustedStartIndex > adjustedEndIndex
|
||||||
|
) {
|
||||||
logger.warn(`Invalid range specified for annotation replacement.
|
logger.warn(`Invalid range specified for annotation replacement.
|
||||||
Attempting replacement with \`replace\` method instead...
|
Attempting replacement with \`replace\` method instead...
|
||||||
length: ${originalText.length}
|
length: ${currentText.length}
|
||||||
start_index: ${start_index}
|
start_index: ${adjustedStartIndex}
|
||||||
end_index: ${end_index}`);
|
end_index: ${adjustedEndIndex}`);
|
||||||
return originalText.replace(originalText, replacementText);
|
return currentText.replace(expectedText, replacementText);
|
||||||
}
|
}
|
||||||
|
|
||||||
const actualTextInRange = originalText.substring(start_index, end_index);
|
if (currentText.substring(adjustedStartIndex, adjustedEndIndex) !== expectedText) {
|
||||||
|
return currentText.replace(expectedText, replacementText);
|
||||||
if (actualTextInRange !== expectedText) {
|
|
||||||
logger.warn(`The text within the specified range does not match the expected annotation text.
|
|
||||||
Attempting replacement with \`replace\` method instead...
|
|
||||||
Expected: ${expectedText}
|
|
||||||
Actual: ${actualTextInRange}`);
|
|
||||||
|
|
||||||
return originalText.replace(originalText, replacementText);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const beforeText = originalText.substring(0, start_index);
|
indexOffset += replacementText.length - (adjustedEndIndex - adjustedStartIndex);
|
||||||
const afterText = originalText.substring(end_index);
|
return (
|
||||||
return beforeText + replacementText + afterText;
|
currentText.slice(0, adjustedStartIndex) +
|
||||||
|
replacementText +
|
||||||
|
currentText.slice(adjustedEndIndex)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return replaceAnnotation;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -581,6 +615,11 @@ async function processMessages({ openai, client, messages = [] }) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const originalText = currentText;
|
||||||
|
text += originalText;
|
||||||
|
|
||||||
|
const replaceAnnotation = createReplaceAnnotation();
|
||||||
|
|
||||||
logger.debug('[processMessages] Processing annotations:', annotations);
|
logger.debug('[processMessages] Processing annotations:', annotations);
|
||||||
for (const annotation of annotations) {
|
for (const annotation of annotations) {
|
||||||
let file;
|
let file;
|
||||||
|
|
@ -589,14 +628,16 @@ async function processMessages({ openai, client, messages = [] }) {
|
||||||
const file_id = annotationType?.file_id;
|
const file_id = annotationType?.file_id;
|
||||||
const alreadyProcessed = client.processedFileIds.has(file_id);
|
const alreadyProcessed = client.processedFileIds.has(file_id);
|
||||||
|
|
||||||
const replaceCurrentAnnotation = (replacement = '') => {
|
const replaceCurrentAnnotation = (replacementText = '') => {
|
||||||
currentText = replaceAnnotation(
|
const { start_index, end_index, text: expectedText } = annotation;
|
||||||
|
currentText = replaceAnnotation({
|
||||||
|
originalText,
|
||||||
currentText,
|
currentText,
|
||||||
annotation.start_index,
|
start_index,
|
||||||
annotation.end_index,
|
end_index,
|
||||||
annotation.text,
|
expectedText,
|
||||||
replacement,
|
replacementText,
|
||||||
);
|
});
|
||||||
edited = true;
|
edited = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -623,7 +664,7 @@ async function processMessages({ openai, client, messages = [] }) {
|
||||||
replaceCurrentAnnotation(`^${sources.length}^`);
|
replaceCurrentAnnotation(`^${sources.length}^`);
|
||||||
}
|
}
|
||||||
|
|
||||||
text += currentText + ' ';
|
text = currentText;
|
||||||
|
|
||||||
if (!file) {
|
if (!file) {
|
||||||
continue;
|
continue;
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ const {
|
||||||
Capabilities,
|
Capabilities,
|
||||||
EModelEndpoint,
|
EModelEndpoint,
|
||||||
assistantEndpointSchema,
|
assistantEndpointSchema,
|
||||||
|
defaultAssistantsVersion,
|
||||||
} = require('librechat-data-provider');
|
} = require('librechat-data-provider');
|
||||||
const { logger } = require('~/config');
|
const { logger } = require('~/config');
|
||||||
|
|
||||||
|
|
@ -12,6 +13,7 @@ const { logger } = require('~/config');
|
||||||
function azureAssistantsDefaults() {
|
function azureAssistantsDefaults() {
|
||||||
return {
|
return {
|
||||||
capabilities: [Capabilities.tools, Capabilities.actions, Capabilities.code_interpreter],
|
capabilities: [Capabilities.tools, Capabilities.actions, Capabilities.code_interpreter],
|
||||||
|
version: defaultAssistantsVersion.azureAssistants,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,10 @@
|
||||||
const { Capabilities, defaultRetrievalModels } = require('librechat-data-provider');
|
const {
|
||||||
|
Capabilities,
|
||||||
|
EModelEndpoint,
|
||||||
|
isAssistantsEndpoint,
|
||||||
|
defaultRetrievalModels,
|
||||||
|
defaultAssistantsVersion,
|
||||||
|
} = require('librechat-data-provider');
|
||||||
const { getCitations, citeText } = require('./citations');
|
const { getCitations, citeText } = require('./citations');
|
||||||
const partialRight = require('lodash/partialRight');
|
const partialRight = require('lodash/partialRight');
|
||||||
const { sendMessage } = require('./streamResponse');
|
const { sendMessage } = require('./streamResponse');
|
||||||
|
|
@ -154,9 +160,10 @@ const isUserProvided = (value) => value === 'user_provided';
|
||||||
* Generate the configuration for a given key and base URL.
|
* Generate the configuration for a given key and base URL.
|
||||||
* @param {string} key
|
* @param {string} key
|
||||||
* @param {string} baseURL
|
* @param {string} baseURL
|
||||||
|
* @param {string} endpoint
|
||||||
* @returns {boolean | { userProvide: boolean, userProvideURL?: boolean }}
|
* @returns {boolean | { userProvide: boolean, userProvideURL?: boolean }}
|
||||||
*/
|
*/
|
||||||
function generateConfig(key, baseURL, assistants = false) {
|
function generateConfig(key, baseURL, endpoint) {
|
||||||
if (!key) {
|
if (!key) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -168,6 +175,8 @@ function generateConfig(key, baseURL, assistants = false) {
|
||||||
config.userProvideURL = isUserProvided(baseURL);
|
config.userProvideURL = isUserProvided(baseURL);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const assistants = isAssistantsEndpoint(endpoint);
|
||||||
|
|
||||||
if (assistants) {
|
if (assistants) {
|
||||||
config.retrievalModels = defaultRetrievalModels;
|
config.retrievalModels = defaultRetrievalModels;
|
||||||
config.capabilities = [
|
config.capabilities = [
|
||||||
|
|
@ -179,6 +188,12 @@ function generateConfig(key, baseURL, assistants = false) {
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (assistants && endpoint === EModelEndpoint.azureAssistants) {
|
||||||
|
config.version = defaultAssistantsVersion.azureAssistants;
|
||||||
|
} else if (assistants) {
|
||||||
|
config.version = defaultAssistantsVersion.assistants;
|
||||||
|
}
|
||||||
|
|
||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -657,6 +657,12 @@
|
||||||
* @memberof typedefs
|
* @memberof typedefs
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @exports OpenAISpecClient
|
||||||
|
* @typedef {import('./app/clients/OpenAIClient')} OpenAISpecClient
|
||||||
|
* @memberof typedefs
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @exports ImportBatchBuilder
|
* @exports ImportBatchBuilder
|
||||||
* @typedef {import('./server/utils/import/importBatchBuilder.js').ImportBatchBuilder} ImportBatchBuilder
|
* @typedef {import('./server/utils/import/importBatchBuilder.js').ImportBatchBuilder} ImportBatchBuilder
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,11 @@ import type { Option, ExtendedFile } from './types';
|
||||||
|
|
||||||
export type TAssistantOption =
|
export type TAssistantOption =
|
||||||
| string
|
| string
|
||||||
| (Option & Assistant & { files?: Array<[string, ExtendedFile]> });
|
| (Option &
|
||||||
|
Assistant & {
|
||||||
|
files?: Array<[string, ExtendedFile]>;
|
||||||
|
code_files?: Array<[string, ExtendedFile]>;
|
||||||
|
});
|
||||||
|
|
||||||
export type Actions = {
|
export type Actions = {
|
||||||
[Capabilities.code_interpreter]: boolean;
|
[Capabilities.code_interpreter]: boolean;
|
||||||
|
|
|
||||||
|
|
@ -8,10 +8,12 @@ import type {
|
||||||
TPreset,
|
TPreset,
|
||||||
TPlugin,
|
TPlugin,
|
||||||
TMessage,
|
TMessage,
|
||||||
|
Assistant,
|
||||||
TLoginUser,
|
TLoginUser,
|
||||||
AuthTypeEnum,
|
AuthTypeEnum,
|
||||||
TConversation,
|
TConversation,
|
||||||
EModelEndpoint,
|
EModelEndpoint,
|
||||||
|
AssistantsEndpoint,
|
||||||
AuthorizationTypeEnum,
|
AuthorizationTypeEnum,
|
||||||
TSetOption as SetOption,
|
TSetOption as SetOption,
|
||||||
TokenExchangeMethodEnum,
|
TokenExchangeMethodEnum,
|
||||||
|
|
@ -19,6 +21,13 @@ import type {
|
||||||
import type { UseMutationResult } from '@tanstack/react-query';
|
import type { UseMutationResult } from '@tanstack/react-query';
|
||||||
import type { LucideIcon } from 'lucide-react';
|
import type { LucideIcon } from 'lucide-react';
|
||||||
|
|
||||||
|
export type AssistantListItem = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
metadata: Assistant['metadata'];
|
||||||
|
model: string;
|
||||||
|
};
|
||||||
|
|
||||||
export type TPluginMap = Record<string, TPlugin>;
|
export type TPluginMap = Record<string, TPlugin>;
|
||||||
|
|
||||||
export type GenericSetter<T> = (value: T | ((currentValue: T) => T)) => void;
|
export type GenericSetter<T> = (value: T | ((currentValue: T) => T)) => void;
|
||||||
|
|
@ -101,6 +110,8 @@ export type AssistantPanelProps = {
|
||||||
actions?: Action[];
|
actions?: Action[];
|
||||||
assistant_id?: string;
|
assistant_id?: string;
|
||||||
activePanel?: string;
|
activePanel?: string;
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
|
version: number | string;
|
||||||
setAction: React.Dispatch<React.SetStateAction<Action | undefined>>;
|
setAction: React.Dispatch<React.SetStateAction<Action | undefined>>;
|
||||||
setCurrentAssistantId: React.Dispatch<React.SetStateAction<string | undefined>>;
|
setCurrentAssistantId: React.Dispatch<React.SetStateAction<string | undefined>>;
|
||||||
setActivePanel: React.Dispatch<React.SetStateAction<Panel>>;
|
setActivePanel: React.Dispatch<React.SetStateAction<Panel>>;
|
||||||
|
|
@ -327,7 +338,11 @@ export type Option = Record<string, unknown> & {
|
||||||
};
|
};
|
||||||
|
|
||||||
export type OptionWithIcon = Option & { icon?: React.ReactNode };
|
export type OptionWithIcon = Option & { icon?: React.ReactNode };
|
||||||
export type MentionOption = OptionWithIcon & { type: string; value: string; description?: string };
|
export type MentionOption = OptionWithIcon & {
|
||||||
|
type: string;
|
||||||
|
value: string;
|
||||||
|
description?: string;
|
||||||
|
};
|
||||||
|
|
||||||
export type TOptionSettings = {
|
export type TOptionSettings = {
|
||||||
showExamples?: boolean;
|
showExamples?: boolean;
|
||||||
|
|
|
||||||
|
|
@ -3,8 +3,8 @@ import { useForm } from 'react-hook-form';
|
||||||
import { memo, useCallback, useRef, useMemo } from 'react';
|
import { memo, useCallback, useRef, useMemo } from 'react';
|
||||||
import {
|
import {
|
||||||
supportsFiles,
|
supportsFiles,
|
||||||
EModelEndpoint,
|
|
||||||
mergeFileConfig,
|
mergeFileConfig,
|
||||||
|
isAssistantsEndpoint,
|
||||||
fileConfig as defaultFileConfig,
|
fileConfig as defaultFileConfig,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
import { useChatContext, useAssistantsMapContext } from '~/Providers';
|
import { useChatContext, useAssistantsMapContext } from '~/Providers';
|
||||||
|
|
@ -74,8 +74,9 @@ const ChatForm = ({ index = 0 }) => {
|
||||||
const endpointFileConfig = fileConfig.endpoints[endpoint ?? ''];
|
const endpointFileConfig = fileConfig.endpoints[endpoint ?? ''];
|
||||||
const invalidAssistant = useMemo(
|
const invalidAssistant = useMemo(
|
||||||
() =>
|
() =>
|
||||||
conversation?.endpoint === EModelEndpoint.assistants &&
|
isAssistantsEndpoint(conversation?.endpoint) &&
|
||||||
(!conversation?.assistant_id || !assistantMap?.[conversation?.assistant_id ?? '']),
|
(!conversation?.assistant_id ||
|
||||||
|
!assistantMap?.[conversation?.endpoint ?? '']?.[conversation?.assistant_id ?? '']),
|
||||||
[conversation?.assistant_id, conversation?.endpoint, assistantMap],
|
[conversation?.assistant_id, conversation?.endpoint, assistantMap],
|
||||||
);
|
);
|
||||||
const disableInputs = useMemo(
|
const disableInputs = useMemo(
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ const FilePreview = ({
|
||||||
}) => {
|
}) => {
|
||||||
const radius = 55; // Radius of the SVG circle
|
const radius = 55; // Radius of the SVG circle
|
||||||
const circumference = 2 * Math.PI * radius;
|
const circumference = 2 * Math.PI * radius;
|
||||||
const progress = useProgress(file?.['progress'] ?? 1, 0.001, file?.size ?? 1);
|
const progress = useProgress(file?.['progress'] ?? 1, 0.001, (file as ExtendedFile)?.size ?? 1);
|
||||||
console.log(progress);
|
console.log(progress);
|
||||||
|
|
||||||
// Calculate the offset based on the loading progress
|
// Calculate the offset based on the loading progress
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,9 @@ export default function Mention({
|
||||||
}) {
|
}) {
|
||||||
const localize = useLocalize();
|
const localize = useLocalize();
|
||||||
const assistantMap = useAssistantsMapContext();
|
const assistantMap = useAssistantsMapContext();
|
||||||
const { options, modelsConfig, assistants, onSelectMention } = useMentions({ assistantMap });
|
const { options, modelsConfig, assistantListMap, onSelectMention } = useMentions({
|
||||||
|
assistantMap,
|
||||||
|
});
|
||||||
|
|
||||||
const [activeIndex, setActiveIndex] = useState(0);
|
const [activeIndex, setActiveIndex] = useState(0);
|
||||||
const timeoutRef = useRef<NodeJS.Timeout | null>(null);
|
const timeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||||
|
|
@ -47,7 +49,12 @@ export default function Mention({
|
||||||
|
|
||||||
if (mention.type === 'endpoint' && mention.value === EModelEndpoint.assistants) {
|
if (mention.type === 'endpoint' && mention.value === EModelEndpoint.assistants) {
|
||||||
setSearchValue('');
|
setSearchValue('');
|
||||||
setInputOptions(assistants);
|
setInputOptions(assistantListMap[EModelEndpoint.assistants]);
|
||||||
|
setActiveIndex(0);
|
||||||
|
inputRef.current?.focus();
|
||||||
|
} else if (mention.type === 'endpoint' && mention.value === EModelEndpoint.azureAssistants) {
|
||||||
|
setSearchValue('');
|
||||||
|
setInputOptions(assistantListMap[EModelEndpoint.azureAssistants]);
|
||||||
setActiveIndex(0);
|
setActiveIndex(0);
|
||||||
inputRef.current?.focus();
|
inputRef.current?.focus();
|
||||||
} else if (mention.type === 'endpoint') {
|
} else if (mention.type === 'endpoint') {
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import { useGetEndpointsQuery, useGetStartupConfig } from 'librechat-data-provider/react-query';
|
import { useGetEndpointsQuery, useGetStartupConfig } from 'librechat-data-provider/react-query';
|
||||||
import type { ReactNode } from 'react';
|
import type { ReactNode } from 'react';
|
||||||
import { TooltipProvider, Tooltip, TooltipTrigger, TooltipContent } from '~/components/ui';
|
import { TooltipProvider, Tooltip, TooltipTrigger, TooltipContent } from '~/components/ui';
|
||||||
|
|
@ -30,7 +30,8 @@ export default function Landing({ Header }: { Header?: ReactNode }) {
|
||||||
const iconURL = conversation?.iconURL;
|
const iconURL = conversation?.iconURL;
|
||||||
endpoint = getIconEndpoint({ endpointsConfig, iconURL, endpoint });
|
endpoint = getIconEndpoint({ endpointsConfig, iconURL, endpoint });
|
||||||
|
|
||||||
const assistant = endpoint === EModelEndpoint.assistants && assistantMap?.[assistant_id ?? ''];
|
const isAssistant = isAssistantsEndpoint(endpoint);
|
||||||
|
const assistant = isAssistant && assistantMap?.[endpoint]?.[assistant_id ?? ''];
|
||||||
const assistantName = (assistant && assistant?.name) || '';
|
const assistantName = (assistant && assistant?.name) || '';
|
||||||
const assistantDesc = (assistant && assistant?.description) || '';
|
const assistantDesc = (assistant && assistant?.description) || '';
|
||||||
const avatar = (assistant && (assistant?.metadata?.avatar as string)) || '';
|
const avatar = (assistant && (assistant?.metadata?.avatar as string)) || '';
|
||||||
|
|
@ -77,7 +78,7 @@ export default function Landing({ Header }: { Header?: ReactNode }) {
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<div className="mb-5 max-w-[75vh] px-12 text-center text-lg font-medium dark:text-white md:px-0 md:text-2xl">
|
<div className="mb-5 max-w-[75vh] px-12 text-center text-lg font-medium dark:text-white md:px-0 md:text-2xl">
|
||||||
{endpoint === EModelEndpoint.assistants
|
{isAssistant
|
||||||
? conversation?.greeting ?? localize('com_nav_welcome_assistant')
|
? conversation?.greeting ?? localize('com_nav_welcome_assistant')
|
||||||
: conversation?.greeting ?? localize('com_nav_welcome_message')}
|
: conversation?.greeting ?? localize('com_nav_welcome_message')}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
||||||
|
|
@ -15,16 +15,7 @@ import {
|
||||||
import UnknownIcon from './UnknownIcon';
|
import UnknownIcon from './UnknownIcon';
|
||||||
import { cn } from '~/utils';
|
import { cn } from '~/utils';
|
||||||
|
|
||||||
export const icons = {
|
const AssistantAvatar = ({ className = '', assistantName, avatar, size }: IconMapProps) => {
|
||||||
[EModelEndpoint.azureOpenAI]: AzureMinimalIcon,
|
|
||||||
[EModelEndpoint.openAI]: GPTIcon,
|
|
||||||
[EModelEndpoint.gptPlugins]: MinimalPlugin,
|
|
||||||
[EModelEndpoint.anthropic]: AnthropicIcon,
|
|
||||||
[EModelEndpoint.chatGPTBrowser]: LightningIcon,
|
|
||||||
[EModelEndpoint.google]: GoogleMinimalIcon,
|
|
||||||
[EModelEndpoint.bingAI]: BingAIMinimalIcon,
|
|
||||||
[EModelEndpoint.custom]: CustomMinimalIcon,
|
|
||||||
[EModelEndpoint.assistants]: ({ className = '', assistantName, avatar, size }: IconMapProps) => {
|
|
||||||
if (assistantName && avatar) {
|
if (assistantName && avatar) {
|
||||||
return (
|
return (
|
||||||
<img
|
<img
|
||||||
|
|
@ -40,6 +31,18 @@ export const icons = {
|
||||||
}
|
}
|
||||||
|
|
||||||
return <Sparkles className={cn(assistantName === '' ? 'icon-2xl' : '', className)} />;
|
return <Sparkles className={cn(assistantName === '' ? 'icon-2xl' : '', className)} />;
|
||||||
},
|
};
|
||||||
|
|
||||||
|
export const icons = {
|
||||||
|
[EModelEndpoint.azureOpenAI]: AzureMinimalIcon,
|
||||||
|
[EModelEndpoint.openAI]: GPTIcon,
|
||||||
|
[EModelEndpoint.gptPlugins]: MinimalPlugin,
|
||||||
|
[EModelEndpoint.anthropic]: AnthropicIcon,
|
||||||
|
[EModelEndpoint.chatGPTBrowser]: LightningIcon,
|
||||||
|
[EModelEndpoint.google]: GoogleMinimalIcon,
|
||||||
|
[EModelEndpoint.bingAI]: BingAIMinimalIcon,
|
||||||
|
[EModelEndpoint.custom]: CustomMinimalIcon,
|
||||||
|
[EModelEndpoint.assistants]: AssistantAvatar,
|
||||||
|
[EModelEndpoint.azureAssistants]: AssistantAvatar,
|
||||||
unknown: UnknownIcon,
|
unknown: UnknownIcon,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { Content, Portal, Root } from '@radix-ui/react-popover';
|
import { Content, Portal, Root } from '@radix-ui/react-popover';
|
||||||
import { alternateName, EModelEndpoint } from 'librechat-data-provider';
|
import { alternateName, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
||||||
import type { FC } from 'react';
|
import type { FC } from 'react';
|
||||||
import { useChatContext, useAssistantsMapContext } from '~/Providers';
|
import { useChatContext, useAssistantsMapContext } from '~/Providers';
|
||||||
|
|
@ -16,7 +16,8 @@ const EndpointsMenu: FC = () => {
|
||||||
const { endpoint = '', assistant_id = null } = conversation ?? {};
|
const { endpoint = '', assistant_id = null } = conversation ?? {};
|
||||||
const assistantMap = useAssistantsMapContext();
|
const assistantMap = useAssistantsMapContext();
|
||||||
|
|
||||||
const assistant = endpoint === EModelEndpoint.assistants && assistantMap?.[assistant_id ?? ''];
|
const assistant =
|
||||||
|
isAssistantsEndpoint(endpoint) && assistantMap?.[endpoint ?? '']?.[assistant_id ?? ''];
|
||||||
const assistantName = (assistant && assistant?.name) || 'Assistant';
|
const assistantName = (assistant && assistant?.name) || 'Assistant';
|
||||||
|
|
||||||
if (!endpoint) {
|
if (!endpoint) {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { useRecoilValue } from 'recoil';
|
import { useRecoilValue } from 'recoil';
|
||||||
import ProgressCircle from './ProgressCircle';
|
import ProgressCircle from './ProgressCircle';
|
||||||
|
import CancelledIcon from './CancelledIcon';
|
||||||
import ProgressText from './ProgressText';
|
import ProgressText from './ProgressText';
|
||||||
import FinishedIcon from './FinishedIcon';
|
import FinishedIcon from './FinishedIcon';
|
||||||
import MarkdownLite from './MarkdownLite';
|
import MarkdownLite from './MarkdownLite';
|
||||||
|
|
@ -11,10 +12,12 @@ export default function CodeAnalyze({
|
||||||
initialProgress = 0.1,
|
initialProgress = 0.1,
|
||||||
code,
|
code,
|
||||||
outputs = [],
|
outputs = [],
|
||||||
|
isSubmitting,
|
||||||
}: {
|
}: {
|
||||||
initialProgress: number;
|
initialProgress: number;
|
||||||
code: string;
|
code: string;
|
||||||
outputs: Record<string, unknown>[];
|
outputs: Record<string, unknown>[];
|
||||||
|
isSubmitting: boolean;
|
||||||
}) {
|
}) {
|
||||||
const showCodeDefault = useRecoilValue(store.showCode);
|
const showCodeDefault = useRecoilValue(store.showCode);
|
||||||
const [showCode, setShowCode] = useState(showCodeDefault);
|
const [showCode, setShowCode] = useState(showCodeDefault);
|
||||||
|
|
@ -35,7 +38,13 @@ export default function CodeAnalyze({
|
||||||
<div className="my-2.5 flex items-center gap-2.5">
|
<div className="my-2.5 flex items-center gap-2.5">
|
||||||
<div className="relative h-5 w-5 shrink-0">
|
<div className="relative h-5 w-5 shrink-0">
|
||||||
{progress < 1 ? (
|
{progress < 1 ? (
|
||||||
<CodeInProgress offset={offset} circumference={circumference} radius={radius} />
|
<CodeInProgress
|
||||||
|
offset={offset}
|
||||||
|
radius={radius}
|
||||||
|
progress={progress}
|
||||||
|
isSubmitting={isSubmitting}
|
||||||
|
circumference={circumference}
|
||||||
|
/>
|
||||||
) : (
|
) : (
|
||||||
<FinishedIcon />
|
<FinishedIcon />
|
||||||
)}
|
)}
|
||||||
|
|
@ -74,18 +83,25 @@ const CodeInProgress = ({
|
||||||
offset,
|
offset,
|
||||||
circumference,
|
circumference,
|
||||||
radius,
|
radius,
|
||||||
|
isSubmitting,
|
||||||
|
progress,
|
||||||
}: {
|
}: {
|
||||||
|
progress: number;
|
||||||
offset: number;
|
offset: number;
|
||||||
circumference: number;
|
circumference: number;
|
||||||
radius: number;
|
radius: number;
|
||||||
|
isSubmitting: boolean;
|
||||||
}) => {
|
}) => {
|
||||||
|
if (progress < 1 && !isSubmitting) {
|
||||||
|
return <CancelledIcon />;
|
||||||
|
}
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className="absolute left-0 top-0 flex h-full w-full items-center justify-center rounded-full bg-transparent text-white"
|
className="absolute left-0 top-0 flex h-full w-full items-center justify-center rounded-full bg-transparent text-white"
|
||||||
style={{ opacity: 1, transform: 'none' }}
|
style={{ opacity: 1, transform: 'none' }}
|
||||||
data-projection-id="77"
|
data-projection-id="77"
|
||||||
>
|
>
|
||||||
<div className='absolute right-[1.5px] bottom-[1.5px]'>
|
<div className="absolute bottom-[1.5px] right-[1.5px]">
|
||||||
<svg
|
<svg
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
xmlnsXlink="http://www.w3.org/1999/xlink"
|
xmlnsXlink="http://www.w3.org/1999/xlink"
|
||||||
|
|
|
||||||
|
|
@ -79,11 +79,13 @@ export default function Part({
|
||||||
initialProgress={toolCall.progress ?? 0.1}
|
initialProgress={toolCall.progress ?? 0.1}
|
||||||
code={code_interpreter.input}
|
code={code_interpreter.input}
|
||||||
outputs={code_interpreter.outputs ?? []}
|
outputs={code_interpreter.outputs ?? []}
|
||||||
|
isSubmitting={isSubmitting}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
} else if (
|
} else if (
|
||||||
part.type === ContentTypes.TOOL_CALL &&
|
part.type === ContentTypes.TOOL_CALL &&
|
||||||
part[ContentTypes.TOOL_CALL].type === ToolCallTypes.RETRIEVAL
|
(part[ContentTypes.TOOL_CALL].type === ToolCallTypes.RETRIEVAL ||
|
||||||
|
part[ContentTypes.TOOL_CALL].type === ToolCallTypes.FILE_SEARCH)
|
||||||
) {
|
) {
|
||||||
const toolCall = part[ContentTypes.TOOL_CALL];
|
const toolCall = part[ContentTypes.TOOL_CALL];
|
||||||
return <RetrievalCall initialProgress={toolCall.progress ?? 0.1} isSubmitting={isSubmitting} />;
|
return <RetrievalCall initialProgress={toolCall.progress ?? 0.1} isSubmitting={isSubmitting} />;
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
|
||||||
import type { TConversation, TMessage } from 'librechat-data-provider';
|
import type { TConversation, TMessage } from 'librechat-data-provider';
|
||||||
import { Clipboard, CheckMark, EditIcon, RegenerateIcon, ContinueIcon } from '~/components/svg';
|
import { Clipboard, CheckMark, EditIcon, RegenerateIcon, ContinueIcon } from '~/components/svg';
|
||||||
import { useGenerationsByLatest, useLocalize } from '~/hooks';
|
import { useGenerationsByLatest, useLocalize } from '~/hooks';
|
||||||
|
|
@ -35,8 +34,13 @@ export default function HoverButtons({
|
||||||
const { endpoint: _endpoint, endpointType } = conversation ?? {};
|
const { endpoint: _endpoint, endpointType } = conversation ?? {};
|
||||||
const endpoint = endpointType ?? _endpoint;
|
const endpoint = endpointType ?? _endpoint;
|
||||||
const [isCopied, setIsCopied] = useState(false);
|
const [isCopied, setIsCopied] = useState(false);
|
||||||
const { hideEditButton, regenerateEnabled, continueSupported, forkingSupported } =
|
const {
|
||||||
useGenerationsByLatest({
|
hideEditButton,
|
||||||
|
regenerateEnabled,
|
||||||
|
continueSupported,
|
||||||
|
forkingSupported,
|
||||||
|
isEditableEndpoint,
|
||||||
|
} = useGenerationsByLatest({
|
||||||
isEditing,
|
isEditing,
|
||||||
isSubmitting,
|
isSubmitting,
|
||||||
message,
|
message,
|
||||||
|
|
@ -58,7 +62,7 @@ export default function HoverButtons({
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="visible mt-0 flex justify-center gap-1 self-end text-gray-400 lg:justify-start">
|
<div className="visible mt-0 flex justify-center gap-1 self-end text-gray-400 lg:justify-start">
|
||||||
{endpoint !== EModelEndpoint.assistants && (
|
{isEditableEndpoint && (
|
||||||
<button
|
<button
|
||||||
className={cn(
|
className={cn(
|
||||||
'hover-button rounded-md p-1 text-gray-400 hover:text-gray-900 dark:text-gray-400/70 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
'hover-button rounded-md p-1 text-gray-400 hover:text-gray-900 dark:text-gray-400/70 dark:hover:text-gray-200 disabled:dark:hover:text-gray-400 md:group-hover:visible md:group-[.final-completion]:visible',
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,13 @@
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
import { isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import type { Assistant, TConversation, TEndpointsConfig, TPreset } from 'librechat-data-provider';
|
import type {
|
||||||
|
TAssistantsMap,
|
||||||
|
TConversation,
|
||||||
|
TEndpointsConfig,
|
||||||
|
TPreset,
|
||||||
|
} from 'librechat-data-provider';
|
||||||
|
import { getEndpointField, getIconKey, getIconEndpoint } from '~/utils';
|
||||||
import { icons } from '~/components/Chat/Menus/Endpoints/Icons';
|
import { icons } from '~/components/Chat/Menus/Endpoints/Icons';
|
||||||
import ConvoIconURL from '~/components/Endpoints/ConvoIconURL';
|
import ConvoIconURL from '~/components/Endpoints/ConvoIconURL';
|
||||||
import { getEndpointField, getIconKey, getIconEndpoint } from '~/utils';
|
|
||||||
|
|
||||||
export default function ConvoIcon({
|
export default function ConvoIcon({
|
||||||
conversation,
|
conversation,
|
||||||
|
|
@ -15,7 +20,7 @@ export default function ConvoIcon({
|
||||||
}: {
|
}: {
|
||||||
conversation: TConversation | TPreset | null;
|
conversation: TConversation | TPreset | null;
|
||||||
endpointsConfig: TEndpointsConfig;
|
endpointsConfig: TEndpointsConfig;
|
||||||
assistantMap: Record<string, Assistant>;
|
assistantMap: TAssistantsMap;
|
||||||
containerClassName?: string;
|
containerClassName?: string;
|
||||||
context?: 'message' | 'nav' | 'landing' | 'menu-item';
|
context?: 'message' | 'nav' | 'landing' | 'menu-item';
|
||||||
className?: string;
|
className?: string;
|
||||||
|
|
@ -25,7 +30,7 @@ export default function ConvoIcon({
|
||||||
let endpoint = conversation?.endpoint;
|
let endpoint = conversation?.endpoint;
|
||||||
endpoint = getIconEndpoint({ endpointsConfig, iconURL, endpoint });
|
endpoint = getIconEndpoint({ endpointsConfig, iconURL, endpoint });
|
||||||
const assistant =
|
const assistant =
|
||||||
endpoint === EModelEndpoint.assistants && assistantMap?.[conversation?.assistant_id ?? ''];
|
isAssistantsEndpoint(endpoint) && assistantMap?.[endpoint]?.[conversation?.assistant_id ?? ''];
|
||||||
const assistantName = (assistant && assistant?.name) || '';
|
const assistantName = (assistant && assistant?.name) || '';
|
||||||
|
|
||||||
const avatar = (assistant && (assistant?.metadata?.avatar as string)) || '';
|
const avatar = (assistant && (assistant?.metadata?.avatar as string)) || '';
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,10 @@
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
import { isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import type { Assistant, TConversation, TEndpointsConfig, TPreset } from 'librechat-data-provider';
|
import type {
|
||||||
|
TConversation,
|
||||||
|
TEndpointsConfig,
|
||||||
|
TPreset,
|
||||||
|
TAssistantsMap,
|
||||||
|
} from 'librechat-data-provider';
|
||||||
import ConvoIconURL from '~/components/Endpoints/ConvoIconURL';
|
import ConvoIconURL from '~/components/Endpoints/ConvoIconURL';
|
||||||
import MinimalIcon from '~/components/Endpoints/MinimalIcon';
|
import MinimalIcon from '~/components/Endpoints/MinimalIcon';
|
||||||
import { getEndpointField, getIconEndpoint } from '~/utils';
|
import { getEndpointField, getIconEndpoint } from '~/utils';
|
||||||
|
|
@ -15,7 +20,7 @@ export default function EndpointIcon({
|
||||||
endpointsConfig: TEndpointsConfig;
|
endpointsConfig: TEndpointsConfig;
|
||||||
containerClassName?: string;
|
containerClassName?: string;
|
||||||
context?: 'message' | 'nav' | 'landing' | 'menu-item';
|
context?: 'message' | 'nav' | 'landing' | 'menu-item';
|
||||||
assistantMap?: Record<string, Assistant>;
|
assistantMap?: TAssistantsMap;
|
||||||
className?: string;
|
className?: string;
|
||||||
size?: number;
|
size?: number;
|
||||||
}) {
|
}) {
|
||||||
|
|
@ -27,7 +32,7 @@ export default function EndpointIcon({
|
||||||
const endpointIconURL = getEndpointField(endpointsConfig, endpoint, 'iconURL');
|
const endpointIconURL = getEndpointField(endpointsConfig, endpoint, 'iconURL');
|
||||||
|
|
||||||
const assistant =
|
const assistant =
|
||||||
endpoint === EModelEndpoint.assistants && assistantMap?.[conversation?.assistant_id ?? ''];
|
isAssistantsEndpoint(endpoint) && assistantMap?.[endpoint]?.[conversation?.assistant_id ?? ''];
|
||||||
const assistantAvatar = (assistant && (assistant?.metadata?.avatar as string)) || '';
|
const assistantAvatar = (assistant && (assistant?.metadata?.avatar as string)) || '';
|
||||||
const assistantName = (assistant && assistant?.name) || '';
|
const assistantName = (assistant && assistant?.name) || '';
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -42,6 +42,7 @@ const MinimalIcon: React.FC<IconProps> = (props) => {
|
||||||
[EModelEndpoint.bingAI]: { icon: <BingAIMinimalIcon />, name: 'BingAI' },
|
[EModelEndpoint.bingAI]: { icon: <BingAIMinimalIcon />, name: 'BingAI' },
|
||||||
[EModelEndpoint.chatGPTBrowser]: { icon: <LightningIcon />, name: 'ChatGPT' },
|
[EModelEndpoint.chatGPTBrowser]: { icon: <LightningIcon />, name: 'ChatGPT' },
|
||||||
[EModelEndpoint.assistants]: { icon: <Sparkles className="icon-sm" />, name: 'Assistant' },
|
[EModelEndpoint.assistants]: { icon: <Sparkles className="icon-sm" />, name: 'Assistant' },
|
||||||
|
[EModelEndpoint.azureAssistants]: { icon: <Sparkles className="icon-sm" />, name: 'Assistant' },
|
||||||
default: {
|
default: {
|
||||||
icon: (
|
icon: (
|
||||||
<UnknownIcon
|
<UnknownIcon
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
import TextareaAutosize from 'react-textarea-autosize';
|
import TextareaAutosize from 'react-textarea-autosize';
|
||||||
import { ImageDetail, imageDetailNumeric, imageDetailValue } from 'librechat-data-provider';
|
import { ImageDetail, imageDetailNumeric, imageDetailValue } from 'librechat-data-provider';
|
||||||
|
import type { ValueType } from '@rc-component/mini-decimal';
|
||||||
|
import type { TModelSelectProps } from '~/common';
|
||||||
import {
|
import {
|
||||||
Input,
|
Input,
|
||||||
Label,
|
Label,
|
||||||
|
|
@ -11,7 +13,6 @@ import {
|
||||||
} from '~/components/ui';
|
} from '~/components/ui';
|
||||||
import { cn, defaultTextProps, optionText, removeFocusOutlines } from '~/utils/';
|
import { cn, defaultTextProps, optionText, removeFocusOutlines } from '~/utils/';
|
||||||
import { useLocalize, useDebouncedInput } from '~/hooks';
|
import { useLocalize, useDebouncedInput } from '~/hooks';
|
||||||
import type { TModelSelectProps } from '~/common';
|
|
||||||
import OptionHover from './OptionHover';
|
import OptionHover from './OptionHover';
|
||||||
import { ESide } from '~/common';
|
import { ESide } from '~/common';
|
||||||
|
|
||||||
|
|
@ -127,7 +128,7 @@ export default function Settings({
|
||||||
id="temp-int"
|
id="temp-int"
|
||||||
disabled={readonly}
|
disabled={readonly}
|
||||||
value={temperatureValue as number}
|
value={temperatureValue as number}
|
||||||
onChange={setTemperature}
|
onChange={setTemperature as (value: ValueType | null) => void}
|
||||||
max={2}
|
max={2}
|
||||||
min={0}
|
min={0}
|
||||||
step={0.01}
|
step={0.01}
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,10 @@
|
||||||
import { useState, useMemo, useEffect } from 'react';
|
import { useState, useMemo, useEffect } from 'react';
|
||||||
import TextareaAutosize from 'react-textarea-autosize';
|
import TextareaAutosize from 'react-textarea-autosize';
|
||||||
import { defaultOrderQuery } from 'librechat-data-provider';
|
|
||||||
import type { TPreset } from 'librechat-data-provider';
|
import type { TPreset } from 'librechat-data-provider';
|
||||||
import type { TModelSelectProps, Option } from '~/common';
|
import type { TModelSelectProps, Option } from '~/common';
|
||||||
import { Label, HoverCard, SelectDropDown, HoverCardTrigger } from '~/components/ui';
|
import { Label, HoverCard, SelectDropDown, HoverCardTrigger } from '~/components/ui';
|
||||||
import { cn, defaultTextProps, removeFocusOutlines, mapAssistants } from '~/utils';
|
import { cn, defaultTextProps, removeFocusOutlines, mapAssistants } from '~/utils';
|
||||||
import { useLocalize, useDebouncedInput } from '~/hooks';
|
import { useLocalize, useDebouncedInput, useAssistantListMap } from '~/hooks';
|
||||||
import { useListAssistantsQuery } from '~/data-provider';
|
|
||||||
import OptionHover from './OptionHover';
|
import OptionHover from './OptionHover';
|
||||||
import { ESide } from '~/common';
|
import { ESide } from '~/common';
|
||||||
|
|
||||||
|
|
@ -17,23 +15,21 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
[localize],
|
[localize],
|
||||||
);
|
);
|
||||||
|
|
||||||
const { data: assistants = [] } = useListAssistantsQuery(defaultOrderQuery, {
|
const assistantListMap = useAssistantListMap((res) => mapAssistants(res.data));
|
||||||
select: (res) =>
|
|
||||||
[
|
|
||||||
defaultOption,
|
|
||||||
...res.data.map(({ id, name }) => ({
|
|
||||||
label: name,
|
|
||||||
value: id,
|
|
||||||
})),
|
|
||||||
].filter(Boolean),
|
|
||||||
});
|
|
||||||
|
|
||||||
const { data: assistantMap = {} } = useListAssistantsQuery(defaultOrderQuery, {
|
|
||||||
select: (res) => mapAssistants(res.data),
|
|
||||||
});
|
|
||||||
|
|
||||||
const { model, endpoint, assistant_id, endpointType, promptPrefix, instructions } =
|
const { model, endpoint, assistant_id, endpointType, promptPrefix, instructions } =
|
||||||
conversation ?? {};
|
conversation ?? {};
|
||||||
|
|
||||||
|
const assistants = useMemo(() => {
|
||||||
|
return [
|
||||||
|
defaultOption,
|
||||||
|
...(assistantListMap[endpoint ?? ''] ?? []).map(({ id, name }) => ({
|
||||||
|
label: name,
|
||||||
|
value: id,
|
||||||
|
})),
|
||||||
|
].filter(Boolean);
|
||||||
|
}, [assistantListMap, endpoint, defaultOption]);
|
||||||
|
|
||||||
const [onPromptPrefixChange, promptPrefixValue] = useDebouncedInput({
|
const [onPromptPrefixChange, promptPrefixValue] = useDebouncedInput({
|
||||||
setOption,
|
setOption,
|
||||||
optionKey: 'promptPrefix',
|
optionKey: 'promptPrefix',
|
||||||
|
|
@ -47,11 +43,11 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
|
|
||||||
const activeAssistant = useMemo(() => {
|
const activeAssistant = useMemo(() => {
|
||||||
if (assistant_id) {
|
if (assistant_id) {
|
||||||
return assistantMap[assistant_id];
|
return assistantListMap[endpoint ?? '']?.[assistant_id];
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}, [assistant_id, assistantMap]);
|
}, [assistant_id, assistantListMap, endpoint]);
|
||||||
|
|
||||||
const modelOptions = useMemo(() => {
|
const modelOptions = useMemo(() => {
|
||||||
return models.map((model) => ({
|
return models.map((model) => ({
|
||||||
|
|
@ -89,7 +85,7 @@ export default function Settings({ conversation, setOption, models, readonly }:
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const assistant = assistantMap[value];
|
const assistant = assistantListMap[endpoint ?? '']?.[value];
|
||||||
if (!assistant) {
|
if (!assistant) {
|
||||||
setAssistantValue(defaultOption);
|
setAssistantValue(defaultOption);
|
||||||
return;
|
return;
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ import OpenAISettings from './OpenAI';
|
||||||
|
|
||||||
const settings: { [key: string]: FC<TModelSelectProps> } = {
|
const settings: { [key: string]: FC<TModelSelectProps> } = {
|
||||||
[EModelEndpoint.assistants]: AssistantsSettings,
|
[EModelEndpoint.assistants]: AssistantsSettings,
|
||||||
|
[EModelEndpoint.azureAssistants]: AssistantsSettings,
|
||||||
[EModelEndpoint.openAI]: OpenAISettings,
|
[EModelEndpoint.openAI]: OpenAISettings,
|
||||||
[EModelEndpoint.custom]: OpenAISettings,
|
[EModelEndpoint.custom]: OpenAISettings,
|
||||||
[EModelEndpoint.azureOpenAI]: OpenAISettings,
|
[EModelEndpoint.azureOpenAI]: OpenAISettings,
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import React, { useState } from 'react';
|
import React, { useState } from 'react';
|
||||||
import { useForm, FormProvider } from 'react-hook-form';
|
import { useForm, FormProvider } from 'react-hook-form';
|
||||||
import { EModelEndpoint, alternateName } from 'librechat-data-provider';
|
import { EModelEndpoint, alternateName, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
||||||
import type { TDialogProps } from '~/common';
|
import type { TDialogProps } from '~/common';
|
||||||
import DialogTemplate from '~/components/ui/DialogTemplate';
|
import DialogTemplate from '~/components/ui/DialogTemplate';
|
||||||
|
|
@ -21,6 +21,7 @@ const endpointComponents = {
|
||||||
[EModelEndpoint.azureOpenAI]: OpenAIConfig,
|
[EModelEndpoint.azureOpenAI]: OpenAIConfig,
|
||||||
[EModelEndpoint.gptPlugins]: OpenAIConfig,
|
[EModelEndpoint.gptPlugins]: OpenAIConfig,
|
||||||
[EModelEndpoint.assistants]: OpenAIConfig,
|
[EModelEndpoint.assistants]: OpenAIConfig,
|
||||||
|
[EModelEndpoint.azureAssistants]: OpenAIConfig,
|
||||||
default: OtherConfig,
|
default: OtherConfig,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -30,6 +31,7 @@ const formSet: Set<string> = new Set([
|
||||||
EModelEndpoint.azureOpenAI,
|
EModelEndpoint.azureOpenAI,
|
||||||
EModelEndpoint.gptPlugins,
|
EModelEndpoint.gptPlugins,
|
||||||
EModelEndpoint.assistants,
|
EModelEndpoint.assistants,
|
||||||
|
EModelEndpoint.azureAssistants,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const EXPIRY = {
|
const EXPIRY = {
|
||||||
|
|
@ -97,7 +99,7 @@ const SetKeyDialog = ({
|
||||||
isAzure ||
|
isAzure ||
|
||||||
endpoint === EModelEndpoint.openAI ||
|
endpoint === EModelEndpoint.openAI ||
|
||||||
endpoint === EModelEndpoint.gptPlugins ||
|
endpoint === EModelEndpoint.gptPlugins ||
|
||||||
endpoint === EModelEndpoint.assistants;
|
isAssistantsEndpoint(endpoint);
|
||||||
if (isAzure) {
|
if (isAzure) {
|
||||||
data.apiKey = 'n/a';
|
data.apiKey = 'n/a';
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
import { useEffect, useMemo } from 'react';
|
import { useEffect, useMemo } from 'react';
|
||||||
import { Combobox } from '~/components/ui';
|
import { Combobox } from '~/components/ui';
|
||||||
import { EModelEndpoint, defaultOrderQuery, LocalStorageKeys } from 'librechat-data-provider';
|
import { isAssistantsEndpoint, LocalStorageKeys } from 'librechat-data-provider';
|
||||||
import type { SwitcherProps } from '~/common';
|
import type { AssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import { useSetIndexOptions, useSelectAssistant, useLocalize } from '~/hooks';
|
import type { SwitcherProps, AssistantListItem } from '~/common';
|
||||||
|
import { useSetIndexOptions, useSelectAssistant, useLocalize, useAssistantListMap } from '~/hooks';
|
||||||
import { useChatContext, useAssistantsMapContext } from '~/Providers';
|
import { useChatContext, useAssistantsMapContext } from '~/Providers';
|
||||||
import { useListAssistantsQuery } from '~/data-provider';
|
|
||||||
import Icon from '~/components/Endpoints/Icon';
|
import Icon from '~/components/Endpoints/Icon';
|
||||||
|
|
||||||
export default function AssistantSwitcher({ isCollapsed }: SwitcherProps) {
|
export default function AssistantSwitcher({ isCollapsed }: SwitcherProps) {
|
||||||
|
|
@ -15,26 +15,29 @@ export default function AssistantSwitcher({ isCollapsed }: SwitcherProps) {
|
||||||
/* `selectedAssistant` must be defined with `null` to cause re-render on update */
|
/* `selectedAssistant` must be defined with `null` to cause re-render on update */
|
||||||
const { assistant_id: selectedAssistant = null, endpoint } = conversation ?? {};
|
const { assistant_id: selectedAssistant = null, endpoint } = conversation ?? {};
|
||||||
|
|
||||||
const { data: assistants = [] } = useListAssistantsQuery(defaultOrderQuery, {
|
const assistantListMap = useAssistantListMap((res) =>
|
||||||
select: (res) => res.data.map(({ id, name, metadata }) => ({ id, name, metadata })),
|
res.data.map(({ id, name, metadata }) => ({ id, name, metadata })),
|
||||||
});
|
);
|
||||||
|
const assistants: Omit<AssistantListItem, 'model'>[] = useMemo(
|
||||||
|
() => assistantListMap[endpoint ?? ''] ?? [],
|
||||||
|
[endpoint, assistantListMap],
|
||||||
|
);
|
||||||
const assistantMap = useAssistantsMapContext();
|
const assistantMap = useAssistantsMapContext();
|
||||||
const { onSelect } = useSelectAssistant();
|
const { onSelect } = useSelectAssistant(endpoint as AssistantsEndpoint);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!selectedAssistant && assistants && assistants.length && assistantMap) {
|
if (!selectedAssistant && assistants && assistants.length && assistantMap) {
|
||||||
const assistant_id =
|
const assistant_id =
|
||||||
localStorage.getItem(`${LocalStorageKeys.ASST_ID_PREFIX}${index}`) ??
|
localStorage.getItem(`${LocalStorageKeys.ASST_ID_PREFIX}${index}${endpoint}`) ??
|
||||||
assistants[0]?.id ??
|
assistants[0]?.id ??
|
||||||
'';
|
'';
|
||||||
const assistant = assistantMap?.[assistant_id];
|
const assistant = assistantMap?.[endpoint ?? '']?.[assistant_id];
|
||||||
|
|
||||||
if (!assistant) {
|
if (!assistant) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (endpoint !== EModelEndpoint.assistants) {
|
if (!isAssistantsEndpoint(endpoint)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -43,7 +46,7 @@ export default function AssistantSwitcher({ isCollapsed }: SwitcherProps) {
|
||||||
}
|
}
|
||||||
}, [index, assistants, selectedAssistant, assistantMap, endpoint, setOption]);
|
}, [index, assistants, selectedAssistant, assistantMap, endpoint, setOption]);
|
||||||
|
|
||||||
const currentAssistant = assistantMap?.[selectedAssistant ?? ''];
|
const currentAssistant = assistantMap?.[endpoint ?? '']?.[selectedAssistant ?? ''];
|
||||||
|
|
||||||
const assistantOptions = useMemo(() => {
|
const assistantOptions = useMemo(() => {
|
||||||
return assistants.map((assistant) => {
|
return assistants.map((assistant) => {
|
||||||
|
|
@ -53,14 +56,14 @@ export default function AssistantSwitcher({ isCollapsed }: SwitcherProps) {
|
||||||
icon: (
|
icon: (
|
||||||
<Icon
|
<Icon
|
||||||
isCreatedByUser={false}
|
isCreatedByUser={false}
|
||||||
endpoint={EModelEndpoint.assistants}
|
endpoint={endpoint}
|
||||||
assistantName={assistant.name ?? ''}
|
assistantName={assistant.name ?? ''}
|
||||||
iconURL={(assistant.metadata?.avatar as string) ?? ''}
|
iconURL={(assistant.metadata?.avatar as string) ?? ''}
|
||||||
/>
|
/>
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
}, [assistants]);
|
}, [assistants, endpoint]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Combobox
|
<Combobox
|
||||||
|
|
@ -78,7 +81,7 @@ export default function AssistantSwitcher({ isCollapsed }: SwitcherProps) {
|
||||||
SelectIcon={
|
SelectIcon={
|
||||||
<Icon
|
<Icon
|
||||||
isCreatedByUser={false}
|
isCreatedByUser={false}
|
||||||
endpoint={EModelEndpoint.assistants}
|
endpoint={endpoint}
|
||||||
assistantName={currentAssistant?.name ?? ''}
|
assistantName={currentAssistant?.name ?? ''}
|
||||||
iconURL={(currentAssistant?.metadata?.avatar as string) ?? ''}
|
iconURL={(currentAssistant?.metadata?.avatar as string) ?? ''}
|
||||||
/>
|
/>
|
||||||
|
|
|
||||||
|
|
@ -7,10 +7,11 @@ import {
|
||||||
AuthTypeEnum,
|
AuthTypeEnum,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
import type {
|
import type {
|
||||||
ValidationResult,
|
|
||||||
Action,
|
Action,
|
||||||
FunctionTool,
|
FunctionTool,
|
||||||
ActionMetadata,
|
ActionMetadata,
|
||||||
|
ValidationResult,
|
||||||
|
AssistantsEndpoint,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
import type { ActionAuthForm } from '~/common';
|
import type { ActionAuthForm } from '~/common';
|
||||||
import type { Spec } from './ActionsTable';
|
import type { Spec } from './ActionsTable';
|
||||||
|
|
@ -32,10 +33,14 @@ const debouncedValidation = debounce(
|
||||||
export default function ActionsInput({
|
export default function ActionsInput({
|
||||||
action,
|
action,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
|
endpoint,
|
||||||
|
version,
|
||||||
setAction,
|
setAction,
|
||||||
}: {
|
}: {
|
||||||
action?: Action;
|
action?: Action;
|
||||||
assistant_id?: string;
|
assistant_id?: string;
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
|
version: number | string;
|
||||||
setAction: React.Dispatch<React.SetStateAction<Action | undefined>>;
|
setAction: React.Dispatch<React.SetStateAction<Action | undefined>>;
|
||||||
}) {
|
}) {
|
||||||
const handleResult = (result: ValidationResult) => {
|
const handleResult = (result: ValidationResult) => {
|
||||||
|
|
@ -173,7 +178,9 @@ export default function ActionsInput({
|
||||||
metadata,
|
metadata,
|
||||||
functions,
|
functions,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
model: assistantMap[assistant_id].model,
|
endpoint,
|
||||||
|
version,
|
||||||
|
model: assistantMap[endpoint][assistant_id].model,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -18,9 +18,11 @@ import { Panel } from '~/common';
|
||||||
export default function ActionsPanel({
|
export default function ActionsPanel({
|
||||||
// activePanel,
|
// activePanel,
|
||||||
action,
|
action,
|
||||||
|
endpoint,
|
||||||
|
version,
|
||||||
setAction,
|
setAction,
|
||||||
setActivePanel,
|
|
||||||
assistant_id,
|
assistant_id,
|
||||||
|
setActivePanel,
|
||||||
}: AssistantPanelProps) {
|
}: AssistantPanelProps) {
|
||||||
const localize = useLocalize();
|
const localize = useLocalize();
|
||||||
const { showToast } = useToastContext();
|
const { showToast } = useToastContext();
|
||||||
|
|
@ -130,9 +132,10 @@ export default function ActionsPanel({
|
||||||
const confirmed = confirm('Are you sure you want to delete this action?');
|
const confirmed = confirm('Are you sure you want to delete this action?');
|
||||||
if (confirmed) {
|
if (confirmed) {
|
||||||
deleteAction.mutate({
|
deleteAction.mutate({
|
||||||
model: assistantMap[assistant_id].model,
|
model: assistantMap[endpoint][assistant_id].model,
|
||||||
action_id: action.action_id,
|
action_id: action.action_id,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
|
endpoint,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
|
|
@ -185,7 +188,13 @@ export default function ActionsPanel({
|
||||||
</DialogTrigger>
|
</DialogTrigger>
|
||||||
<ActionsAuth setOpenAuthDialog={setOpenAuthDialog} />
|
<ActionsAuth setOpenAuthDialog={setOpenAuthDialog} />
|
||||||
</Dialog>
|
</Dialog>
|
||||||
<ActionsInput action={action} assistant_id={assistant_id} setAction={setAction} />
|
<ActionsInput
|
||||||
|
action={action}
|
||||||
|
assistant_id={assistant_id}
|
||||||
|
setAction={setAction}
|
||||||
|
endpoint={endpoint}
|
||||||
|
version={version}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</FormProvider>
|
</FormProvider>
|
||||||
|
|
|
||||||
|
|
@ -10,9 +10,10 @@ import {
|
||||||
import type { UseMutationResult } from '@tanstack/react-query';
|
import type { UseMutationResult } from '@tanstack/react-query';
|
||||||
import type {
|
import type {
|
||||||
Metadata,
|
Metadata,
|
||||||
AssistantListResponse,
|
|
||||||
Assistant,
|
Assistant,
|
||||||
|
AssistantsEndpoint,
|
||||||
AssistantCreateParams,
|
AssistantCreateParams,
|
||||||
|
AssistantListResponse,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
import { useUploadAssistantAvatarMutation, useGetFileConfig } from '~/data-provider';
|
import { useUploadAssistantAvatarMutation, useGetFileConfig } from '~/data-provider';
|
||||||
import { AssistantAvatar, NoImage, AvatarMenu } from './Images';
|
import { AssistantAvatar, NoImage, AvatarMenu } from './Images';
|
||||||
|
|
@ -22,10 +23,14 @@ import { useLocalize } from '~/hooks';
|
||||||
// import { cn } from '~/utils/';
|
// import { cn } from '~/utils/';
|
||||||
|
|
||||||
function Avatar({
|
function Avatar({
|
||||||
|
endpoint,
|
||||||
|
version,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
metadata,
|
metadata,
|
||||||
createMutation,
|
createMutation,
|
||||||
}: {
|
}: {
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
|
version: number | string;
|
||||||
assistant_id: string | null;
|
assistant_id: string | null;
|
||||||
metadata: null | Metadata;
|
metadata: null | Metadata;
|
||||||
createMutation: UseMutationResult<Assistant, Error, AssistantCreateParams>;
|
createMutation: UseMutationResult<Assistant, Error, AssistantCreateParams>;
|
||||||
|
|
@ -46,8 +51,8 @@ function Avatar({
|
||||||
const { showToast } = useToastContext();
|
const { showToast } = useToastContext();
|
||||||
|
|
||||||
const activeModel = useMemo(() => {
|
const activeModel = useMemo(() => {
|
||||||
return assistantsMap[assistant_id ?? '']?.model ?? '';
|
return assistantsMap[endpoint][assistant_id ?? '']?.model ?? '';
|
||||||
}, [assistant_id, assistantsMap]);
|
}, [assistantsMap, endpoint, assistant_id]);
|
||||||
|
|
||||||
const { mutate: uploadAvatar } = useUploadAssistantAvatarMutation({
|
const { mutate: uploadAvatar } = useUploadAssistantAvatarMutation({
|
||||||
onMutate: () => {
|
onMutate: () => {
|
||||||
|
|
@ -65,6 +70,7 @@ function Avatar({
|
||||||
|
|
||||||
const res = queryClient.getQueryData<AssistantListResponse>([
|
const res = queryClient.getQueryData<AssistantListResponse>([
|
||||||
QueryKeys.assistants,
|
QueryKeys.assistants,
|
||||||
|
endpoint,
|
||||||
defaultOrderQuery,
|
defaultOrderQuery,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
@ -83,10 +89,13 @@ function Avatar({
|
||||||
return assistant;
|
return assistant;
|
||||||
}) ?? [];
|
}) ?? [];
|
||||||
|
|
||||||
queryClient.setQueryData<AssistantListResponse>([QueryKeys.assistants, defaultOrderQuery], {
|
queryClient.setQueryData<AssistantListResponse>(
|
||||||
|
[QueryKeys.assistants, endpoint, defaultOrderQuery],
|
||||||
|
{
|
||||||
...res,
|
...res,
|
||||||
data: assistants,
|
data: assistants,
|
||||||
});
|
},
|
||||||
|
);
|
||||||
|
|
||||||
setProgress(1);
|
setProgress(1);
|
||||||
},
|
},
|
||||||
|
|
@ -149,9 +158,20 @@ function Avatar({
|
||||||
model: activeModel,
|
model: activeModel,
|
||||||
postCreation: true,
|
postCreation: true,
|
||||||
formData,
|
formData,
|
||||||
|
endpoint,
|
||||||
|
version,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, [createMutation.data, createMutation.isSuccess, input, previewUrl, uploadAvatar, activeModel]);
|
}, [
|
||||||
|
createMutation.data,
|
||||||
|
createMutation.isSuccess,
|
||||||
|
input,
|
||||||
|
previewUrl,
|
||||||
|
uploadAvatar,
|
||||||
|
activeModel,
|
||||||
|
endpoint,
|
||||||
|
version,
|
||||||
|
]);
|
||||||
|
|
||||||
const handleFileChange = (event: React.ChangeEvent<HTMLInputElement>): void => {
|
const handleFileChange = (event: React.ChangeEvent<HTMLInputElement>): void => {
|
||||||
const file = event.target.files?.[0];
|
const file = event.target.files?.[0];
|
||||||
|
|
@ -183,6 +203,8 @@ function Avatar({
|
||||||
assistant_id,
|
assistant_id,
|
||||||
model: activeModel,
|
model: activeModel,
|
||||||
formData,
|
formData,
|
||||||
|
endpoint,
|
||||||
|
version,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
showToast({
|
showToast({
|
||||||
|
|
|
||||||
|
|
@ -1,23 +1,23 @@
|
||||||
import { useState, useMemo, useEffect } from 'react';
|
import { useState, useMemo } from 'react';
|
||||||
import { useQueryClient } from '@tanstack/react-query';
|
import { useQueryClient } from '@tanstack/react-query';
|
||||||
import { useForm, FormProvider, Controller, useWatch } from 'react-hook-form';
|
import { useForm, FormProvider, Controller, useWatch } from 'react-hook-form';
|
||||||
import { useGetModelsQuery, useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||||
import {
|
import {
|
||||||
Tools,
|
Tools,
|
||||||
QueryKeys,
|
QueryKeys,
|
||||||
Capabilities,
|
Capabilities,
|
||||||
EModelEndpoint,
|
|
||||||
actionDelimiter,
|
actionDelimiter,
|
||||||
ImageVisionTool,
|
ImageVisionTool,
|
||||||
defaultAssistantFormValues,
|
defaultAssistantFormValues,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
|
import type { FunctionTool, TConfig, TPlugin } from 'librechat-data-provider';
|
||||||
import type { AssistantForm, AssistantPanelProps } from '~/common';
|
import type { AssistantForm, AssistantPanelProps } from '~/common';
|
||||||
import type { FunctionTool, TPlugin, TEndpointsConfig } from 'librechat-data-provider';
|
|
||||||
import { useCreateAssistantMutation, useUpdateAssistantMutation } from '~/data-provider';
|
import { useCreateAssistantMutation, useUpdateAssistantMutation } from '~/data-provider';
|
||||||
import { SelectDropDown, Checkbox, QuestionMark } from '~/components/ui';
|
|
||||||
import { useAssistantsMapContext, useToastContext } from '~/Providers';
|
import { useAssistantsMapContext, useToastContext } from '~/Providers';
|
||||||
import { useSelectAssistant, useLocalize } from '~/hooks';
|
import { useSelectAssistant, useLocalize } from '~/hooks';
|
||||||
import { ToolSelectDialog } from '~/components/Tools';
|
import { ToolSelectDialog } from '~/components/Tools';
|
||||||
|
import CapabilitiesForm from './CapabilitiesForm';
|
||||||
|
import { SelectDropDown } from '~/components/ui';
|
||||||
import AssistantAvatar from './AssistantAvatar';
|
import AssistantAvatar from './AssistantAvatar';
|
||||||
import AssistantSelect from './AssistantSelect';
|
import AssistantSelect from './AssistantSelect';
|
||||||
import AssistantAction from './AssistantAction';
|
import AssistantAction from './AssistantAction';
|
||||||
|
|
@ -35,17 +35,20 @@ const inputClass =
|
||||||
export default function AssistantPanel({
|
export default function AssistantPanel({
|
||||||
// index = 0,
|
// index = 0,
|
||||||
setAction,
|
setAction,
|
||||||
|
endpoint,
|
||||||
actions = [],
|
actions = [],
|
||||||
setActivePanel,
|
setActivePanel,
|
||||||
assistant_id: current_assistant_id,
|
assistant_id: current_assistant_id,
|
||||||
setCurrentAssistantId,
|
setCurrentAssistantId,
|
||||||
}: AssistantPanelProps) {
|
assistantsConfig,
|
||||||
|
version,
|
||||||
|
}: AssistantPanelProps & { assistantsConfig?: TConfig | null }) {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const modelsQuery = useGetModelsQuery();
|
const modelsQuery = useGetModelsQuery();
|
||||||
const assistantMap = useAssistantsMapContext();
|
const assistantMap = useAssistantsMapContext();
|
||||||
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
|
|
||||||
const allTools = queryClient.getQueryData<TPlugin[]>([QueryKeys.tools]) ?? [];
|
const allTools = queryClient.getQueryData<TPlugin[]>([QueryKeys.tools]) ?? [];
|
||||||
const { onSelect: onSelectAssistant } = useSelectAssistant();
|
const { onSelect: onSelectAssistant } = useSelectAssistant(endpoint);
|
||||||
const { showToast } = useToastContext();
|
const { showToast } = useToastContext();
|
||||||
const localize = useLocalize();
|
const localize = useLocalize();
|
||||||
|
|
||||||
|
|
@ -55,44 +58,31 @@ export default function AssistantPanel({
|
||||||
|
|
||||||
const [showToolDialog, setShowToolDialog] = useState(false);
|
const [showToolDialog, setShowToolDialog] = useState(false);
|
||||||
|
|
||||||
const { control, handleSubmit, reset, setValue, getValues } = methods;
|
const { control, handleSubmit, reset } = methods;
|
||||||
const assistant = useWatch({ control, name: 'assistant' });
|
const assistant = useWatch({ control, name: 'assistant' });
|
||||||
const functions = useWatch({ control, name: 'functions' });
|
const functions = useWatch({ control, name: 'functions' });
|
||||||
const assistant_id = useWatch({ control, name: 'id' });
|
const assistant_id = useWatch({ control, name: 'id' });
|
||||||
const model = useWatch({ control, name: 'model' });
|
|
||||||
|
|
||||||
const activeModel = useMemo(() => {
|
const activeModel = useMemo(() => {
|
||||||
return assistantMap?.[assistant_id]?.model;
|
return assistantMap?.[endpoint]?.[assistant_id]?.model;
|
||||||
}, [assistantMap, assistant_id]);
|
}, [assistantMap, endpoint, assistant_id]);
|
||||||
|
|
||||||
const assistants = useMemo(() => endpointsConfig?.[EModelEndpoint.assistants], [endpointsConfig]);
|
|
||||||
const retrievalModels = useMemo(() => new Set(assistants?.retrievalModels ?? []), [assistants]);
|
|
||||||
const toolsEnabled = useMemo(
|
const toolsEnabled = useMemo(
|
||||||
() => assistants?.capabilities?.includes(Capabilities.tools),
|
() => assistantsConfig?.capabilities?.includes(Capabilities.tools),
|
||||||
[assistants],
|
[assistantsConfig],
|
||||||
);
|
);
|
||||||
const actionsEnabled = useMemo(
|
const actionsEnabled = useMemo(
|
||||||
() => assistants?.capabilities?.includes(Capabilities.actions),
|
() => assistantsConfig?.capabilities?.includes(Capabilities.actions),
|
||||||
[assistants],
|
[assistantsConfig],
|
||||||
);
|
);
|
||||||
const retrievalEnabled = useMemo(
|
const retrievalEnabled = useMemo(
|
||||||
() => assistants?.capabilities?.includes(Capabilities.retrieval),
|
() => assistantsConfig?.capabilities?.includes(Capabilities.retrieval),
|
||||||
[assistants],
|
[assistantsConfig],
|
||||||
);
|
);
|
||||||
const codeEnabled = useMemo(
|
const codeEnabled = useMemo(
|
||||||
() => assistants?.capabilities?.includes(Capabilities.code_interpreter),
|
() => assistantsConfig?.capabilities?.includes(Capabilities.code_interpreter),
|
||||||
[assistants],
|
[assistantsConfig],
|
||||||
);
|
);
|
||||||
const imageVisionEnabled = useMemo(
|
|
||||||
() => assistants?.capabilities?.includes(Capabilities.image_vision),
|
|
||||||
[assistants],
|
|
||||||
);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (model && !retrievalModels.has(model)) {
|
|
||||||
setValue(Capabilities.retrieval, false);
|
|
||||||
}
|
|
||||||
}, [model, setValue, retrievalModels]);
|
|
||||||
|
|
||||||
/* Mutations */
|
/* Mutations */
|
||||||
const update = useUpdateAssistantMutation({
|
const update = useUpdateAssistantMutation({
|
||||||
|
|
@ -145,7 +135,7 @@ export default function AssistantPanel({
|
||||||
if (!functionName.includes(actionDelimiter)) {
|
if (!functionName.includes(actionDelimiter)) {
|
||||||
return functionName;
|
return functionName;
|
||||||
} else {
|
} else {
|
||||||
const assistant = assistantMap?.[assistant_id];
|
const assistant = assistantMap?.[endpoint]?.[assistant_id];
|
||||||
const tool = assistant?.tools?.find((tool) => tool.function?.name === functionName);
|
const tool = assistant?.tools?.find((tool) => tool.function?.name === functionName);
|
||||||
if (assistant && tool) {
|
if (assistant && tool) {
|
||||||
return tool;
|
return tool;
|
||||||
|
|
@ -160,7 +150,7 @@ export default function AssistantPanel({
|
||||||
tools.push({ type: Tools.code_interpreter });
|
tools.push({ type: Tools.code_interpreter });
|
||||||
}
|
}
|
||||||
if (data.retrieval) {
|
if (data.retrieval) {
|
||||||
tools.push({ type: Tools.retrieval });
|
tools.push({ type: version == 2 ? Tools.file_search : Tools.retrieval });
|
||||||
}
|
}
|
||||||
if (data.image_vision) {
|
if (data.image_vision) {
|
||||||
tools.push(ImageVisionTool);
|
tools.push(ImageVisionTool);
|
||||||
|
|
@ -183,6 +173,7 @@ export default function AssistantPanel({
|
||||||
instructions,
|
instructions,
|
||||||
model,
|
model,
|
||||||
tools,
|
tools,
|
||||||
|
endpoint,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
|
|
@ -194,6 +185,8 @@ export default function AssistantPanel({
|
||||||
instructions,
|
instructions,
|
||||||
model,
|
model,
|
||||||
tools,
|
tools,
|
||||||
|
endpoint,
|
||||||
|
version,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -211,6 +204,7 @@ export default function AssistantPanel({
|
||||||
<AssistantSelect
|
<AssistantSelect
|
||||||
reset={reset}
|
reset={reset}
|
||||||
value={field.value}
|
value={field.value}
|
||||||
|
endpoint={endpoint}
|
||||||
setCurrentAssistantId={setCurrentAssistantId}
|
setCurrentAssistantId={setCurrentAssistantId}
|
||||||
selectedAssistant={current_assistant_id ?? null}
|
selectedAssistant={current_assistant_id ?? null}
|
||||||
createMutation={create}
|
createMutation={create}
|
||||||
|
|
@ -239,6 +233,8 @@ export default function AssistantPanel({
|
||||||
createMutation={create}
|
createMutation={create}
|
||||||
assistant_id={assistant_id ?? null}
|
assistant_id={assistant_id ?? null}
|
||||||
metadata={assistant?.['metadata'] ?? null}
|
metadata={assistant?.['metadata'] ?? null}
|
||||||
|
endpoint={endpoint}
|
||||||
|
version={version}
|
||||||
/>
|
/>
|
||||||
<label className={labelClass} htmlFor="name">
|
<label className={labelClass} htmlFor="name">
|
||||||
{localize('com_ui_name')}
|
{localize('com_ui_name')}
|
||||||
|
|
@ -324,7 +320,7 @@ export default function AssistantPanel({
|
||||||
emptyTitle={true}
|
emptyTitle={true}
|
||||||
value={field.value}
|
value={field.value}
|
||||||
setValue={field.onChange}
|
setValue={field.onChange}
|
||||||
availableValues={modelsQuery.data?.[EModelEndpoint.assistants] ?? []}
|
availableValues={modelsQuery.data?.[endpoint] ?? []}
|
||||||
showAbove={false}
|
showAbove={false}
|
||||||
showLabel={false}
|
showLabel={false}
|
||||||
className={cn(
|
className={cn(
|
||||||
|
|
@ -343,120 +339,17 @@ export default function AssistantPanel({
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
{/* Knowledge */}
|
{/* Knowledge */}
|
||||||
{(codeEnabled || retrievalEnabled) && (
|
{(codeEnabled || retrievalEnabled) && version == 1 && (
|
||||||
<Knowledge assistant_id={assistant_id} files={files} />
|
<Knowledge assistant_id={assistant_id} files={files} endpoint={endpoint} />
|
||||||
)}
|
)}
|
||||||
{/* Capabilities */}
|
{/* Capabilities */}
|
||||||
<div className="mb-6">
|
<CapabilitiesForm
|
||||||
<div className="mb-1.5 flex items-center">
|
version={version}
|
||||||
<span>
|
endpoint={endpoint}
|
||||||
<label className="text-token-text-primary block font-medium">
|
codeEnabled={codeEnabled}
|
||||||
{localize('com_assistants_capabilities')}
|
assistantsConfig={assistantsConfig}
|
||||||
</label>
|
retrievalEnabled={retrievalEnabled}
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<div className="flex flex-col items-start gap-2">
|
|
||||||
{codeEnabled && (
|
|
||||||
<div className="flex items-center">
|
|
||||||
<Controller
|
|
||||||
name={Capabilities.code_interpreter}
|
|
||||||
control={control}
|
|
||||||
render={({ field }) => (
|
|
||||||
<Checkbox
|
|
||||||
{...field}
|
|
||||||
checked={field.value}
|
|
||||||
onCheckedChange={field.onChange}
|
|
||||||
className="relative float-left mr-2 inline-flex h-4 w-4 cursor-pointer"
|
|
||||||
value={field?.value?.toString()}
|
|
||||||
/>
|
/>
|
||||||
)}
|
|
||||||
/>
|
|
||||||
<label
|
|
||||||
className="form-check-label text-token-text-primary w-full cursor-pointer"
|
|
||||||
htmlFor={Capabilities.code_interpreter}
|
|
||||||
onClick={() =>
|
|
||||||
setValue(
|
|
||||||
Capabilities.code_interpreter,
|
|
||||||
!getValues(Capabilities.code_interpreter),
|
|
||||||
{
|
|
||||||
shouldDirty: true,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<div className="flex items-center">
|
|
||||||
{localize('com_assistants_code_interpreter')}
|
|
||||||
<QuestionMark />
|
|
||||||
</div>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{imageVisionEnabled && (
|
|
||||||
<div className="flex items-center">
|
|
||||||
<Controller
|
|
||||||
name={Capabilities.image_vision}
|
|
||||||
control={control}
|
|
||||||
render={({ field }) => (
|
|
||||||
<Checkbox
|
|
||||||
{...field}
|
|
||||||
checked={field.value}
|
|
||||||
onCheckedChange={field.onChange}
|
|
||||||
className="relative float-left mr-2 inline-flex h-4 w-4 cursor-pointer"
|
|
||||||
value={field?.value?.toString()}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
<label
|
|
||||||
className="form-check-label text-token-text-primary w-full cursor-pointer"
|
|
||||||
htmlFor={Capabilities.image_vision}
|
|
||||||
onClick={() =>
|
|
||||||
setValue(Capabilities.image_vision, !getValues(Capabilities.image_vision), {
|
|
||||||
shouldDirty: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
>
|
|
||||||
<div className="flex items-center">
|
|
||||||
{localize('com_assistants_image_vision')}
|
|
||||||
<QuestionMark />
|
|
||||||
</div>
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{retrievalEnabled && (
|
|
||||||
<div className="flex items-center">
|
|
||||||
<Controller
|
|
||||||
name={Capabilities.retrieval}
|
|
||||||
control={control}
|
|
||||||
render={({ field }) => (
|
|
||||||
<Checkbox
|
|
||||||
{...field}
|
|
||||||
checked={field.value}
|
|
||||||
disabled={!retrievalModels.has(model)}
|
|
||||||
onCheckedChange={field.onChange}
|
|
||||||
className="relative float-left mr-2 inline-flex h-4 w-4 cursor-pointer"
|
|
||||||
value={field?.value?.toString()}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
/>
|
|
||||||
<label
|
|
||||||
className={cn(
|
|
||||||
'form-check-label text-token-text-primary w-full',
|
|
||||||
!retrievalModels.has(model) ? 'cursor-no-drop opacity-50' : 'cursor-pointer',
|
|
||||||
)}
|
|
||||||
htmlFor={Capabilities.retrieval}
|
|
||||||
onClick={() =>
|
|
||||||
retrievalModels.has(model) &&
|
|
||||||
setValue(Capabilities.retrieval, !getValues(Capabilities.retrieval), {
|
|
||||||
shouldDirty: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
>
|
|
||||||
{localize('com_assistants_retrieval')}
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{/* Tools */}
|
{/* Tools */}
|
||||||
<div className="mb-6">
|
<div className="mb-6">
|
||||||
<label className={labelClass}>
|
<label className={labelClass}>
|
||||||
|
|
@ -520,6 +413,7 @@ export default function AssistantPanel({
|
||||||
activeModel={activeModel}
|
activeModel={activeModel}
|
||||||
setCurrentAssistantId={setCurrentAssistantId}
|
setCurrentAssistantId={setCurrentAssistantId}
|
||||||
createMutation={create}
|
createMutation={create}
|
||||||
|
endpoint={endpoint}
|
||||||
/>
|
/>
|
||||||
{/* Secondary Select Button */}
|
{/* Secondary Select Button */}
|
||||||
{assistant_id && (
|
{assistant_id && (
|
||||||
|
|
@ -554,6 +448,7 @@ export default function AssistantPanel({
|
||||||
isOpen={showToolDialog}
|
isOpen={showToolDialog}
|
||||||
setIsOpen={setShowToolDialog}
|
setIsOpen={setShowToolDialog}
|
||||||
assistant_id={assistant_id}
|
assistant_id={assistant_id}
|
||||||
|
endpoint={endpoint}
|
||||||
/>
|
/>
|
||||||
</form>
|
</form>
|
||||||
</FormProvider>
|
</FormProvider>
|
||||||
|
|
|
||||||
|
|
@ -1,21 +1,21 @@
|
||||||
import { Plus } from 'lucide-react';
|
import { Plus } from 'lucide-react';
|
||||||
import { useCallback, useEffect, useRef } from 'react';
|
import { useCallback, useEffect, useRef } from 'react';
|
||||||
import {
|
import {
|
||||||
defaultAssistantFormValues,
|
Tools,
|
||||||
defaultOrderQuery,
|
|
||||||
isImageVisionTool,
|
|
||||||
EModelEndpoint,
|
|
||||||
Capabilities,
|
|
||||||
FileSources,
|
FileSources,
|
||||||
|
Capabilities,
|
||||||
|
LocalStorageKeys,
|
||||||
|
isImageVisionTool,
|
||||||
|
defaultAssistantFormValues,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
import type { UseFormReset } from 'react-hook-form';
|
import type { UseFormReset } from 'react-hook-form';
|
||||||
import type { UseMutationResult } from '@tanstack/react-query';
|
import type { UseMutationResult } from '@tanstack/react-query';
|
||||||
import type { Assistant, AssistantCreateParams } from 'librechat-data-provider';
|
import type { Assistant, AssistantCreateParams, AssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import type {
|
import type {
|
||||||
AssistantForm,
|
|
||||||
Actions,
|
Actions,
|
||||||
TAssistantOption,
|
|
||||||
ExtendedFile,
|
ExtendedFile,
|
||||||
|
AssistantForm,
|
||||||
|
TAssistantOption,
|
||||||
LastSelectedModels,
|
LastSelectedModels,
|
||||||
} from '~/common';
|
} from '~/common';
|
||||||
import SelectDropDown from '~/components/ui/SelectDropDown';
|
import SelectDropDown from '~/components/ui/SelectDropDown';
|
||||||
|
|
@ -29,12 +29,14 @@ const keys = new Set(['name', 'id', 'description', 'instructions', 'model']);
|
||||||
export default function AssistantSelect({
|
export default function AssistantSelect({
|
||||||
reset,
|
reset,
|
||||||
value,
|
value,
|
||||||
|
endpoint,
|
||||||
selectedAssistant,
|
selectedAssistant,
|
||||||
setCurrentAssistantId,
|
setCurrentAssistantId,
|
||||||
createMutation,
|
createMutation,
|
||||||
}: {
|
}: {
|
||||||
reset: UseFormReset<AssistantForm>;
|
reset: UseFormReset<AssistantForm>;
|
||||||
value: TAssistantOption;
|
value: TAssistantOption;
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
selectedAssistant: string | null;
|
selectedAssistant: string | null;
|
||||||
setCurrentAssistantId: React.Dispatch<React.SetStateAction<string | undefined>>;
|
setCurrentAssistantId: React.Dispatch<React.SetStateAction<string | undefined>>;
|
||||||
createMutation: UseMutationResult<Assistant, Error, AssistantCreateParams>;
|
createMutation: UseMutationResult<Assistant, Error, AssistantCreateParams>;
|
||||||
|
|
@ -43,11 +45,11 @@ export default function AssistantSelect({
|
||||||
const fileMap = useFileMapContext();
|
const fileMap = useFileMapContext();
|
||||||
const lastSelectedAssistant = useRef<string | null>(null);
|
const lastSelectedAssistant = useRef<string | null>(null);
|
||||||
const [lastSelectedModels] = useLocalStorage<LastSelectedModels>(
|
const [lastSelectedModels] = useLocalStorage<LastSelectedModels>(
|
||||||
'lastSelectedModel',
|
LocalStorageKeys.LAST_MODEL,
|
||||||
{} as LastSelectedModels,
|
{} as LastSelectedModels,
|
||||||
);
|
);
|
||||||
|
|
||||||
const assistants = useListAssistantsQuery(defaultOrderQuery, {
|
const assistants = useListAssistantsQuery(endpoint, undefined, {
|
||||||
select: (res) =>
|
select: (res) =>
|
||||||
res.data.map((_assistant) => {
|
res.data.map((_assistant) => {
|
||||||
const assistant = {
|
const assistant = {
|
||||||
|
|
@ -55,13 +57,15 @@ export default function AssistantSelect({
|
||||||
label: _assistant?.name ?? '',
|
label: _assistant?.name ?? '',
|
||||||
value: _assistant.id,
|
value: _assistant.id,
|
||||||
files: _assistant?.file_ids ? ([] as Array<[string, ExtendedFile]>) : undefined,
|
files: _assistant?.file_ids ? ([] as Array<[string, ExtendedFile]>) : undefined,
|
||||||
|
code_files: _assistant?.tool_resources?.code_interpreter?.file_ids
|
||||||
|
? ([] as Array<[string, ExtendedFile]>)
|
||||||
|
: undefined,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (assistant.files && _assistant.file_ids) {
|
const handleFile = (file_id: string, list?: Array<[string, ExtendedFile]>) => {
|
||||||
_assistant.file_ids.forEach((file_id) => {
|
|
||||||
const file = fileMap?.[file_id];
|
const file = fileMap?.[file_id];
|
||||||
if (file) {
|
if (file) {
|
||||||
assistant.files?.push([
|
list?.push([
|
||||||
file_id,
|
file_id,
|
||||||
{
|
{
|
||||||
file_id: file.file_id,
|
file_id: file.file_id,
|
||||||
|
|
@ -76,9 +80,32 @@ export default function AssistantSelect({
|
||||||
source: FileSources.openai,
|
source: FileSources.openai,
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
|
} else {
|
||||||
|
list?.push([
|
||||||
|
file_id,
|
||||||
|
{
|
||||||
|
file_id,
|
||||||
|
type: '',
|
||||||
|
filename: '',
|
||||||
|
size: 1,
|
||||||
|
progress: 1,
|
||||||
|
filepath: endpoint,
|
||||||
|
source: FileSources.openai,
|
||||||
|
},
|
||||||
|
]);
|
||||||
}
|
}
|
||||||
});
|
};
|
||||||
|
|
||||||
|
if (assistant.files && _assistant.file_ids) {
|
||||||
|
_assistant.file_ids.forEach((file_id) => handleFile(file_id, assistant.files));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (assistant.code_files && _assistant.tool_resources?.code_interpreter?.file_ids) {
|
||||||
|
_assistant.tool_resources?.code_interpreter?.file_ids?.forEach((file_id) =>
|
||||||
|
handleFile(file_id, assistant.code_files),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return assistant;
|
return assistant;
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
@ -92,7 +119,7 @@ export default function AssistantSelect({
|
||||||
setCurrentAssistantId(undefined);
|
setCurrentAssistantId(undefined);
|
||||||
return reset({
|
return reset({
|
||||||
...defaultAssistantFormValues,
|
...defaultAssistantFormValues,
|
||||||
model: lastSelectedModels?.[EModelEndpoint.assistants] ?? '',
|
model: lastSelectedModels?.[endpoint] ?? '',
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -112,6 +139,9 @@ export default function AssistantSelect({
|
||||||
?.filter((tool) => tool.type !== 'function' || isImageVisionTool(tool))
|
?.filter((tool) => tool.type !== 'function' || isImageVisionTool(tool))
|
||||||
?.map((tool) => tool?.function?.name || tool.type)
|
?.map((tool) => tool?.function?.name || tool.type)
|
||||||
.forEach((tool) => {
|
.forEach((tool) => {
|
||||||
|
if (tool === Tools.file_search) {
|
||||||
|
actions[Capabilities.retrieval] = true;
|
||||||
|
}
|
||||||
actions[tool] = true;
|
actions[tool] = true;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -141,7 +171,7 @@ export default function AssistantSelect({
|
||||||
reset(formValues);
|
reset(formValues);
|
||||||
setCurrentAssistantId(assistant?.id);
|
setCurrentAssistantId(assistant?.id);
|
||||||
},
|
},
|
||||||
[assistants.data, reset, setCurrentAssistantId, createMutation, lastSelectedModels],
|
[assistants.data, reset, setCurrentAssistantId, createMutation, endpoint, lastSelectedModels],
|
||||||
);
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|
|
||||||
51
client/src/components/SidePanel/Builder/CapabilitiesForm.tsx
Normal file
51
client/src/components/SidePanel/Builder/CapabilitiesForm.tsx
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
import { useMemo } from 'react';
|
||||||
|
import { Capabilities } from 'librechat-data-provider';
|
||||||
|
import type { TConfig, AssistantsEndpoint } from 'librechat-data-provider';
|
||||||
|
import ImageVision from './ImageVision';
|
||||||
|
import { useLocalize } from '~/hooks';
|
||||||
|
import Retrieval from './Retrieval';
|
||||||
|
import Code from './Code';
|
||||||
|
|
||||||
|
export default function CapabilitiesForm({
|
||||||
|
version,
|
||||||
|
endpoint,
|
||||||
|
codeEnabled,
|
||||||
|
retrievalEnabled,
|
||||||
|
assistantsConfig,
|
||||||
|
}: {
|
||||||
|
version: number | string;
|
||||||
|
codeEnabled?: boolean;
|
||||||
|
retrievalEnabled?: boolean;
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
|
assistantsConfig?: TConfig | null;
|
||||||
|
}) {
|
||||||
|
const localize = useLocalize();
|
||||||
|
|
||||||
|
const retrievalModels = useMemo(
|
||||||
|
() => new Set(assistantsConfig?.retrievalModels ?? []),
|
||||||
|
[assistantsConfig],
|
||||||
|
);
|
||||||
|
const imageVisionEnabled = useMemo(
|
||||||
|
() => assistantsConfig?.capabilities?.includes(Capabilities.image_vision),
|
||||||
|
[assistantsConfig],
|
||||||
|
);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="mb-6">
|
||||||
|
<div className="mb-1.5 flex items-center">
|
||||||
|
<span>
|
||||||
|
<label className="text-token-text-primary block font-medium">
|
||||||
|
{localize('com_assistants_capabilities')}
|
||||||
|
</label>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-col items-start gap-2">
|
||||||
|
{codeEnabled && <Code endpoint={endpoint} version={version} />}
|
||||||
|
{imageVisionEnabled && version == 1 && <ImageVision />}
|
||||||
|
{retrievalEnabled && (
|
||||||
|
<Retrieval endpoint={endpoint} version={version} retrievalModels={retrievalModels} />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
71
client/src/components/SidePanel/Builder/Code.tsx
Normal file
71
client/src/components/SidePanel/Builder/Code.tsx
Normal file
|
|
@ -0,0 +1,71 @@
|
||||||
|
import { useMemo } from 'react';
|
||||||
|
import { Capabilities } from 'librechat-data-provider';
|
||||||
|
import { useFormContext, Controller, useWatch } from 'react-hook-form';
|
||||||
|
import type { AssistantsEndpoint } from 'librechat-data-provider';
|
||||||
|
import type { AssistantForm } from '~/common';
|
||||||
|
import { Checkbox, QuestionMark } from '~/components/ui';
|
||||||
|
import { useLocalize } from '~/hooks';
|
||||||
|
import CodeFiles from './CodeFiles';
|
||||||
|
|
||||||
|
export default function Code({
|
||||||
|
version,
|
||||||
|
endpoint,
|
||||||
|
}: {
|
||||||
|
version: number | string;
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
|
}) {
|
||||||
|
const localize = useLocalize();
|
||||||
|
const methods = useFormContext<AssistantForm>();
|
||||||
|
const { control, setValue, getValues } = methods;
|
||||||
|
const assistant = useWatch({ control, name: 'assistant' });
|
||||||
|
const assistant_id = useWatch({ control, name: 'id' });
|
||||||
|
const files = useMemo(() => {
|
||||||
|
if (typeof assistant === 'string') {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
return assistant.code_files;
|
||||||
|
}, [assistant]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Controller
|
||||||
|
name={Capabilities.code_interpreter}
|
||||||
|
control={control}
|
||||||
|
render={({ field }) => (
|
||||||
|
<Checkbox
|
||||||
|
{...field}
|
||||||
|
checked={field.value}
|
||||||
|
onCheckedChange={field.onChange}
|
||||||
|
className="relative float-left mr-2 inline-flex h-4 w-4 cursor-pointer"
|
||||||
|
value={field?.value?.toString()}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
<label
|
||||||
|
className="form-check-label text-token-text-primary w-full cursor-pointer"
|
||||||
|
htmlFor={Capabilities.code_interpreter}
|
||||||
|
onClick={() =>
|
||||||
|
setValue(Capabilities.code_interpreter, !getValues(Capabilities.code_interpreter), {
|
||||||
|
shouldDirty: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<div className="flex select-none items-center">
|
||||||
|
{localize('com_assistants_code_interpreter')}
|
||||||
|
<QuestionMark />
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
{version == 2 && (
|
||||||
|
<CodeFiles
|
||||||
|
assistant_id={assistant_id}
|
||||||
|
version={version}
|
||||||
|
endpoint={endpoint}
|
||||||
|
files={files}
|
||||||
|
tool_resource={Capabilities.code_interpreter}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
93
client/src/components/SidePanel/Builder/CodeFiles.tsx
Normal file
93
client/src/components/SidePanel/Builder/CodeFiles.tsx
Normal file
|
|
@ -0,0 +1,93 @@
|
||||||
|
import { useState, useRef, useEffect } from 'react';
|
||||||
|
import { mergeFileConfig, fileConfig as defaultFileConfig } from 'librechat-data-provider';
|
||||||
|
import type { AssistantsEndpoint } from 'librechat-data-provider';
|
||||||
|
import type { ExtendedFile } from '~/common';
|
||||||
|
import FileRow from '~/components/Chat/Input/Files/FileRow';
|
||||||
|
import { useGetFileConfig } from '~/data-provider';
|
||||||
|
import { useFileHandling } from '~/hooks/Files';
|
||||||
|
import useLocalize from '~/hooks/useLocalize';
|
||||||
|
import { useChatContext } from '~/Providers';
|
||||||
|
|
||||||
|
export default function Knowledge({
|
||||||
|
endpoint,
|
||||||
|
assistant_id,
|
||||||
|
files: _files,
|
||||||
|
tool_resource,
|
||||||
|
}: {
|
||||||
|
version: number | string;
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
|
assistant_id: string;
|
||||||
|
files?: [string, ExtendedFile][];
|
||||||
|
tool_resource?: string;
|
||||||
|
}) {
|
||||||
|
const localize = useLocalize();
|
||||||
|
const { setFilesLoading } = useChatContext();
|
||||||
|
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||||
|
const [files, setFiles] = useState<Map<string, ExtendedFile>>(new Map());
|
||||||
|
const { data: fileConfig = defaultFileConfig } = useGetFileConfig({
|
||||||
|
select: (data) => mergeFileConfig(data),
|
||||||
|
});
|
||||||
|
const { handleFileChange } = useFileHandling({
|
||||||
|
overrideEndpoint: endpoint,
|
||||||
|
additionalMetadata: { assistant_id, tool_resource },
|
||||||
|
fileSetter: setFiles,
|
||||||
|
});
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (_files) {
|
||||||
|
setFiles(new Map(_files));
|
||||||
|
}
|
||||||
|
}, [_files]);
|
||||||
|
|
||||||
|
const endpointFileConfig = fileConfig.endpoints[endpoint];
|
||||||
|
|
||||||
|
if (endpointFileConfig?.disabled) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleButtonClick = () => {
|
||||||
|
// necessary to reset the input
|
||||||
|
if (fileInputRef.current) {
|
||||||
|
fileInputRef.current.value = '';
|
||||||
|
}
|
||||||
|
fileInputRef.current?.click();
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={'mb-2'}>
|
||||||
|
<div className="flex flex-col gap-4">
|
||||||
|
<div className="text-token-text-tertiary rounded-lg text-xs">
|
||||||
|
{assistant_id ? localize('com_assistants_knowledge_info') : ''}
|
||||||
|
</div>
|
||||||
|
<FileRow
|
||||||
|
files={files}
|
||||||
|
setFiles={setFiles}
|
||||||
|
setFilesLoading={setFilesLoading}
|
||||||
|
assistant_id={assistant_id}
|
||||||
|
Wrapper={({ children }) => <div className="flex flex-wrap gap-2">{children}</div>}
|
||||||
|
/>
|
||||||
|
<div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
disabled={!assistant_id}
|
||||||
|
className="btn btn-neutral border-token-border-light relative h-8 rounded-lg font-medium"
|
||||||
|
onClick={handleButtonClick}
|
||||||
|
>
|
||||||
|
<div className="flex w-full items-center justify-center gap-2">
|
||||||
|
<input
|
||||||
|
multiple={true}
|
||||||
|
type="file"
|
||||||
|
style={{ display: 'none' }}
|
||||||
|
tabIndex={-1}
|
||||||
|
ref={fileInputRef}
|
||||||
|
disabled={!assistant_id}
|
||||||
|
onChange={handleFileChange}
|
||||||
|
/>
|
||||||
|
{localize('com_ui_upload_files')}
|
||||||
|
</div>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
@ -1,26 +1,29 @@
|
||||||
import * as Popover from '@radix-ui/react-popover';
|
import * as Popover from '@radix-ui/react-popover';
|
||||||
import type { Assistant, AssistantCreateParams } from 'librechat-data-provider';
|
import type { Assistant, AssistantCreateParams, AssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import type { UseMutationResult } from '@tanstack/react-query';
|
import type { UseMutationResult } from '@tanstack/react-query';
|
||||||
import { Dialog, DialogTrigger, Label } from '~/components/ui';
|
import { Dialog, DialogTrigger, Label } from '~/components/ui';
|
||||||
import DialogTemplate from '~/components/ui/DialogTemplate';
|
import { useChatContext, useToastContext } from '~/Providers';
|
||||||
import { useDeleteAssistantMutation } from '~/data-provider';
|
import { useDeleteAssistantMutation } from '~/data-provider';
|
||||||
|
import DialogTemplate from '~/components/ui/DialogTemplate';
|
||||||
import { useLocalize, useSetIndexOptions } from '~/hooks';
|
import { useLocalize, useSetIndexOptions } from '~/hooks';
|
||||||
import { cn, removeFocusOutlines } from '~/utils/';
|
import { cn, removeFocusOutlines } from '~/utils/';
|
||||||
import { NewTrashIcon } from '~/components/svg';
|
import { NewTrashIcon } from '~/components/svg';
|
||||||
import { useChatContext } from '~/Providers';
|
|
||||||
|
|
||||||
export default function ContextButton({
|
export default function ContextButton({
|
||||||
activeModel,
|
activeModel,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
setCurrentAssistantId,
|
setCurrentAssistantId,
|
||||||
createMutation,
|
createMutation,
|
||||||
|
endpoint,
|
||||||
}: {
|
}: {
|
||||||
activeModel: string;
|
activeModel: string;
|
||||||
assistant_id: string;
|
assistant_id: string;
|
||||||
setCurrentAssistantId: React.Dispatch<React.SetStateAction<string | undefined>>;
|
setCurrentAssistantId: React.Dispatch<React.SetStateAction<string | undefined>>;
|
||||||
createMutation: UseMutationResult<Assistant, Error, AssistantCreateParams>;
|
createMutation: UseMutationResult<Assistant, Error, AssistantCreateParams>;
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
}) {
|
}) {
|
||||||
const localize = useLocalize();
|
const localize = useLocalize();
|
||||||
|
const { showToast } = useToastContext();
|
||||||
const { conversation } = useChatContext();
|
const { conversation } = useChatContext();
|
||||||
const { setOption } = useSetIndexOptions();
|
const { setOption } = useSetIndexOptions();
|
||||||
|
|
||||||
|
|
@ -31,6 +34,11 @@ export default function ContextButton({
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
showToast({
|
||||||
|
message: localize('com_ui_assistant_deleted'),
|
||||||
|
status: 'success',
|
||||||
|
});
|
||||||
|
|
||||||
if (createMutation.data?.id) {
|
if (createMutation.data?.id) {
|
||||||
console.log('[deleteAssistant] resetting createMutation');
|
console.log('[deleteAssistant] resetting createMutation');
|
||||||
createMutation.reset();
|
createMutation.reset();
|
||||||
|
|
@ -55,6 +63,13 @@ export default function ContextButton({
|
||||||
|
|
||||||
setCurrentAssistantId(firstAssistant.id);
|
setCurrentAssistantId(firstAssistant.id);
|
||||||
},
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
console.error(error);
|
||||||
|
showToast({
|
||||||
|
message: localize('com_ui_assistant_delete_error'),
|
||||||
|
status: 'error',
|
||||||
|
});
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!assistant_id) {
|
if (!assistant_id) {
|
||||||
|
|
@ -138,7 +153,8 @@ export default function ContextButton({
|
||||||
</>
|
</>
|
||||||
}
|
}
|
||||||
selection={{
|
selection={{
|
||||||
selectHandler: () => deleteAssistant.mutate({ assistant_id, model: activeModel }),
|
selectHandler: () =>
|
||||||
|
deleteAssistant.mutate({ assistant_id, model: activeModel, endpoint }),
|
||||||
selectClasses: 'bg-red-600 hover:bg-red-700 dark:hover:bg-red-800 text-white',
|
selectClasses: 'bg-red-600 hover:bg-red-700 dark:hover:bg-red-800 text-white',
|
||||||
selectText: localize('com_ui_delete'),
|
selectText: localize('com_ui_delete'),
|
||||||
}}
|
}}
|
||||||
|
|
|
||||||
43
client/src/components/SidePanel/Builder/ImageVision.tsx
Normal file
43
client/src/components/SidePanel/Builder/ImageVision.tsx
Normal file
|
|
@ -0,0 +1,43 @@
|
||||||
|
import { useFormContext, Controller } from 'react-hook-form';
|
||||||
|
import { Capabilities } from 'librechat-data-provider';
|
||||||
|
import type { AssistantForm } from '~/common';
|
||||||
|
import { Checkbox, QuestionMark } from '~/components/ui';
|
||||||
|
import { useLocalize } from '~/hooks';
|
||||||
|
|
||||||
|
export default function ImageVision() {
|
||||||
|
const localize = useLocalize();
|
||||||
|
const methods = useFormContext<AssistantForm>();
|
||||||
|
const { control, setValue, getValues } = methods;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Controller
|
||||||
|
name={Capabilities.image_vision}
|
||||||
|
control={control}
|
||||||
|
render={({ field }) => (
|
||||||
|
<Checkbox
|
||||||
|
{...field}
|
||||||
|
checked={field.value}
|
||||||
|
onCheckedChange={field.onChange}
|
||||||
|
className="relative float-left mr-2 inline-flex h-4 w-4 cursor-pointer"
|
||||||
|
value={field?.value?.toString()}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
<label
|
||||||
|
className="form-check-label text-token-text-primary w-full cursor-pointer"
|
||||||
|
htmlFor={Capabilities.image_vision}
|
||||||
|
onClick={() =>
|
||||||
|
setValue(Capabilities.image_vision, !getValues(Capabilities.image_vision), {
|
||||||
|
shouldDirty: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<div className="flex items-center">
|
||||||
|
{localize('com_assistants_image_vision')}
|
||||||
|
<QuestionMark />
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
import { useState, useRef, useEffect } from 'react';
|
import { useState, useRef, useEffect } from 'react';
|
||||||
import {
|
import {
|
||||||
EModelEndpoint,
|
mergeFileConfig,
|
||||||
retrievalMimeTypes,
|
retrievalMimeTypes,
|
||||||
fileConfig as defaultFileConfig,
|
fileConfig as defaultFileConfig,
|
||||||
mergeFileConfig,
|
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
|
import type { AssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import type { ExtendedFile } from '~/common';
|
import type { ExtendedFile } from '~/common';
|
||||||
import FileRow from '~/components/Chat/Input/Files/FileRow';
|
import FileRow from '~/components/Chat/Input/Files/FileRow';
|
||||||
import { useGetFileConfig } from '~/data-provider';
|
import { useGetFileConfig } from '~/data-provider';
|
||||||
|
|
@ -26,9 +26,11 @@ const CodeInterpreterFiles = ({ children }: { children: React.ReactNode }) => {
|
||||||
};
|
};
|
||||||
|
|
||||||
export default function Knowledge({
|
export default function Knowledge({
|
||||||
|
endpoint,
|
||||||
assistant_id,
|
assistant_id,
|
||||||
files: _files,
|
files: _files,
|
||||||
}: {
|
}: {
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
assistant_id: string;
|
assistant_id: string;
|
||||||
files?: [string, ExtendedFile][];
|
files?: [string, ExtendedFile][];
|
||||||
}) {
|
}) {
|
||||||
|
|
@ -40,7 +42,7 @@ export default function Knowledge({
|
||||||
select: (data) => mergeFileConfig(data),
|
select: (data) => mergeFileConfig(data),
|
||||||
});
|
});
|
||||||
const { handleFileChange } = useFileHandling({
|
const { handleFileChange } = useFileHandling({
|
||||||
overrideEndpoint: EModelEndpoint.assistants,
|
overrideEndpoint: endpoint,
|
||||||
additionalMetadata: { assistant_id },
|
additionalMetadata: { assistant_id },
|
||||||
fileSetter: setFiles,
|
fileSetter: setFiles,
|
||||||
});
|
});
|
||||||
|
|
@ -51,7 +53,7 @@ export default function Knowledge({
|
||||||
}
|
}
|
||||||
}, [_files]);
|
}, [_files]);
|
||||||
|
|
||||||
const endpointFileConfig = fileConfig.endpoints[EModelEndpoint.assistants];
|
const endpointFileConfig = fileConfig.endpoints[endpoint];
|
||||||
|
|
||||||
if (endpointFileConfig?.disabled) {
|
if (endpointFileConfig?.disabled) {
|
||||||
return null;
|
return null;
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
import { useState, useEffect } from 'react';
|
import { useState, useEffect, useMemo } from 'react';
|
||||||
import type { Action } from 'librechat-data-provider';
|
import { defaultAssistantsVersion } from 'librechat-data-provider';
|
||||||
|
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
||||||
|
import type { Action, AssistantsEndpoint, TEndpointsConfig } from 'librechat-data-provider';
|
||||||
import { useGetActionsQuery } from '~/data-provider';
|
import { useGetActionsQuery } from '~/data-provider';
|
||||||
import AssistantPanel from './AssistantPanel';
|
import AssistantPanel from './AssistantPanel';
|
||||||
import { useChatContext } from '~/Providers';
|
import { useChatContext } from '~/Providers';
|
||||||
|
|
@ -9,11 +11,18 @@ import { Panel } from '~/common';
|
||||||
export default function PanelSwitch() {
|
export default function PanelSwitch() {
|
||||||
const { conversation, index } = useChatContext();
|
const { conversation, index } = useChatContext();
|
||||||
const [activePanel, setActivePanel] = useState(Panel.builder);
|
const [activePanel, setActivePanel] = useState(Panel.builder);
|
||||||
|
const [action, setAction] = useState<Action | undefined>(undefined);
|
||||||
const [currentAssistantId, setCurrentAssistantId] = useState<string | undefined>(
|
const [currentAssistantId, setCurrentAssistantId] = useState<string | undefined>(
|
||||||
conversation?.assistant_id,
|
conversation?.assistant_id,
|
||||||
);
|
);
|
||||||
const [action, setAction] = useState<Action | undefined>(undefined);
|
|
||||||
const { data: actions = [] } = useGetActionsQuery();
|
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
|
||||||
|
const { data: actions = [] } = useGetActionsQuery(conversation?.endpoint as AssistantsEndpoint);
|
||||||
|
|
||||||
|
const assistantsConfig = useMemo(
|
||||||
|
() => endpointsConfig?.[conversation?.endpoint ?? ''],
|
||||||
|
[conversation?.endpoint, endpointsConfig],
|
||||||
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (conversation?.assistant_id) {
|
if (conversation?.assistant_id) {
|
||||||
|
|
@ -21,6 +30,12 @@ export default function PanelSwitch() {
|
||||||
}
|
}
|
||||||
}, [conversation?.assistant_id]);
|
}, [conversation?.assistant_id]);
|
||||||
|
|
||||||
|
if (!conversation?.endpoint) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const version = assistantsConfig?.version ?? defaultAssistantsVersion[conversation.endpoint];
|
||||||
|
|
||||||
if (activePanel === Panel.actions || action) {
|
if (activePanel === Panel.actions || action) {
|
||||||
return (
|
return (
|
||||||
<ActionsPanel
|
<ActionsPanel
|
||||||
|
|
@ -32,6 +47,8 @@ export default function PanelSwitch() {
|
||||||
setActivePanel={setActivePanel}
|
setActivePanel={setActivePanel}
|
||||||
assistant_id={currentAssistantId}
|
assistant_id={currentAssistantId}
|
||||||
setCurrentAssistantId={setCurrentAssistantId}
|
setCurrentAssistantId={setCurrentAssistantId}
|
||||||
|
endpoint={conversation.endpoint as AssistantsEndpoint}
|
||||||
|
version={version}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
} else if (activePanel === Panel.builder) {
|
} else if (activePanel === Panel.builder) {
|
||||||
|
|
@ -45,6 +62,9 @@ export default function PanelSwitch() {
|
||||||
setActivePanel={setActivePanel}
|
setActivePanel={setActivePanel}
|
||||||
assistant_id={currentAssistantId}
|
assistant_id={currentAssistantId}
|
||||||
setCurrentAssistantId={setCurrentAssistantId}
|
setCurrentAssistantId={setCurrentAssistantId}
|
||||||
|
endpoint={conversation.endpoint as AssistantsEndpoint}
|
||||||
|
assistantsConfig={assistantsConfig}
|
||||||
|
version={version}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
70
client/src/components/SidePanel/Builder/Retrieval.tsx
Normal file
70
client/src/components/SidePanel/Builder/Retrieval.tsx
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
import { useEffect, useMemo } from 'react';
|
||||||
|
import { useFormContext, Controller, useWatch } from 'react-hook-form';
|
||||||
|
import { Capabilities } from 'librechat-data-provider';
|
||||||
|
import type { AssistantsEndpoint } from 'librechat-data-provider';
|
||||||
|
import type { AssistantForm } from '~/common';
|
||||||
|
import { Checkbox } from '~/components/ui';
|
||||||
|
import { useLocalize } from '~/hooks';
|
||||||
|
import { cn } from '~/utils/';
|
||||||
|
|
||||||
|
export default function Retrieval({
|
||||||
|
version,
|
||||||
|
retrievalModels,
|
||||||
|
}: {
|
||||||
|
version: number | string;
|
||||||
|
retrievalModels: Set<string>;
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
|
}) {
|
||||||
|
const localize = useLocalize();
|
||||||
|
const methods = useFormContext<AssistantForm>();
|
||||||
|
const { control, setValue, getValues } = methods;
|
||||||
|
const model = useWatch({ control, name: 'model' });
|
||||||
|
const assistant = useWatch({ control, name: 'assistant' });
|
||||||
|
|
||||||
|
const files = useMemo(() => {
|
||||||
|
if (typeof assistant === 'string') {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
return assistant.tool_resources?.file_search;
|
||||||
|
}, [assistant]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (model && !retrievalModels.has(model)) {
|
||||||
|
setValue(Capabilities.retrieval, false);
|
||||||
|
}
|
||||||
|
}, [model, setValue, retrievalModels]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex items-center">
|
||||||
|
<Controller
|
||||||
|
name={Capabilities.retrieval}
|
||||||
|
control={control}
|
||||||
|
render={({ field }) => (
|
||||||
|
<Checkbox
|
||||||
|
{...field}
|
||||||
|
checked={field.value}
|
||||||
|
disabled={!retrievalModels.has(model)}
|
||||||
|
onCheckedChange={field.onChange}
|
||||||
|
className="relative float-left mr-2 inline-flex h-4 w-4 cursor-pointer"
|
||||||
|
value={field?.value?.toString()}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
<label
|
||||||
|
className={cn(
|
||||||
|
'form-check-label text-token-text-primary w-full',
|
||||||
|
!retrievalModels.has(model) ? 'cursor-no-drop opacity-50' : 'cursor-pointer',
|
||||||
|
)}
|
||||||
|
htmlFor={Capabilities.retrieval}
|
||||||
|
onClick={() =>
|
||||||
|
retrievalModels.has(model) &&
|
||||||
|
setValue(Capabilities.retrieval, !getValues(Capabilities.retrieval), {
|
||||||
|
shouldDirty: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{localize('com_assistants_retrieval')}
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import throttle from 'lodash/throttle';
|
import throttle from 'lodash/throttle';
|
||||||
import { EModelEndpoint, getConfigDefaults } from 'librechat-data-provider';
|
import { getConfigDefaults } from 'librechat-data-provider';
|
||||||
import { useState, useRef, useCallback, useEffect, useMemo, memo } from 'react';
|
import { useState, useRef, useCallback, useEffect, useMemo, memo } from 'react';
|
||||||
import {
|
import {
|
||||||
useGetEndpointsQuery,
|
useGetEndpointsQuery,
|
||||||
|
|
@ -61,7 +61,7 @@ const SidePanel = ({
|
||||||
return activePanel ? activePanel : undefined;
|
return activePanel ? activePanel : undefined;
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const assistants = useMemo(() => endpointsConfig?.[EModelEndpoint.assistants], [endpointsConfig]);
|
const assistants = useMemo(() => endpointsConfig?.[endpoint ?? ''], [endpoint, endpointsConfig]);
|
||||||
const userProvidesKey = useMemo(
|
const userProvidesKey = useMemo(
|
||||||
() => !!endpointsConfig?.[endpoint ?? '']?.userProvide,
|
() => !!endpointsConfig?.[endpoint ?? '']?.userProvide,
|
||||||
[endpointsConfig, endpoint],
|
[endpointsConfig, endpoint],
|
||||||
|
|
|
||||||
|
|
@ -1,18 +1,18 @@
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
import { isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import type { SwitcherProps } from '~/common';
|
import type { SwitcherProps } from '~/common';
|
||||||
import { Separator } from '~/components/ui/Separator';
|
import { Separator } from '~/components/ui/Separator';
|
||||||
import AssistantSwitcher from './AssistantSwitcher';
|
import AssistantSwitcher from './AssistantSwitcher';
|
||||||
import ModelSwitcher from './ModelSwitcher';
|
import ModelSwitcher from './ModelSwitcher';
|
||||||
|
|
||||||
export default function Switcher(props: SwitcherProps) {
|
export default function Switcher(props: SwitcherProps) {
|
||||||
if (props.endpoint === EModelEndpoint.assistants && props.endpointKeyProvided) {
|
if (isAssistantsEndpoint(props.endpoint) && props.endpointKeyProvided) {
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<AssistantSwitcher {...props} />
|
<AssistantSwitcher {...props} />
|
||||||
<Separator className="bg-gray-100/50 dark:bg-gray-600" />
|
<Separator className="bg-gray-100/50 dark:bg-gray-600" />
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
} else if (props.endpoint === EModelEndpoint.assistants) {
|
} else if (isAssistantsEndpoint(props.endpoint)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import { Search, X } from 'lucide-react';
|
||||||
import { Dialog } from '@headlessui/react';
|
import { Dialog } from '@headlessui/react';
|
||||||
import { useFormContext } from 'react-hook-form';
|
import { useFormContext } from 'react-hook-form';
|
||||||
import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query';
|
import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query';
|
||||||
import type { TError, TPluginAction } from 'librechat-data-provider';
|
import type { AssistantsEndpoint, TError, TPluginAction } from 'librechat-data-provider';
|
||||||
import type { TPluginStoreDialogProps } from '~/common/types';
|
import type { TPluginStoreDialogProps } from '~/common/types';
|
||||||
import { PluginPagination, PluginAuthForm } from '~/components/Plugins/Store';
|
import { PluginPagination, PluginAuthForm } from '~/components/Plugins/Store';
|
||||||
import { useLocalize, usePluginDialogHelpers } from '~/hooks';
|
import { useLocalize, usePluginDialogHelpers } from '~/hooks';
|
||||||
|
|
@ -13,10 +13,11 @@ import ToolItem from './ToolItem';
|
||||||
function ToolSelectDialog({
|
function ToolSelectDialog({
|
||||||
isOpen,
|
isOpen,
|
||||||
setIsOpen,
|
setIsOpen,
|
||||||
}: TPluginStoreDialogProps & { assistant_id?: string }) {
|
endpoint,
|
||||||
|
}: TPluginStoreDialogProps & { assistant_id?: string; endpoint: AssistantsEndpoint }) {
|
||||||
const localize = useLocalize();
|
const localize = useLocalize();
|
||||||
const { getValues, setValue } = useFormContext();
|
const { getValues, setValue } = useFormContext();
|
||||||
const { data: tools = [] } = useAvailableToolsQuery();
|
const { data: tools = [] } = useAvailableToolsQuery(endpoint);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
maxPage,
|
maxPage,
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import { LocalStorageKeys } from 'librechat-data-provider';
|
import { Capabilities, LocalStorageKeys, defaultAssistantsVersion } from 'librechat-data-provider';
|
||||||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||||
import type { UseMutationResult } from '@tanstack/react-query';
|
import type { UseMutationResult } from '@tanstack/react-query';
|
||||||
import type t from 'librechat-data-provider';
|
import type t from 'librechat-data-provider';
|
||||||
|
|
@ -376,9 +376,10 @@ export const useUploadFileMutation = (
|
||||||
const { onSuccess, ...options } = _options || {};
|
const { onSuccess, ...options } = _options || {};
|
||||||
return useMutation([MutationKeys.fileUpload], {
|
return useMutation([MutationKeys.fileUpload], {
|
||||||
mutationFn: (body: FormData) => {
|
mutationFn: (body: FormData) => {
|
||||||
const height = body.get('height');
|
|
||||||
const width = body.get('width');
|
const width = body.get('width');
|
||||||
if (height && width) {
|
const height = body.get('height');
|
||||||
|
const version = body.get('version') as number | string;
|
||||||
|
if (height && width && (!version || version != 2)) {
|
||||||
return dataService.uploadImage(body);
|
return dataService.uploadImage(body);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -391,8 +392,10 @@ export const useUploadFileMutation = (
|
||||||
...(_files ?? []),
|
...(_files ?? []),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
const endpoint = formData.get('endpoint');
|
||||||
const assistant_id = formData.get('assistant_id');
|
const assistant_id = formData.get('assistant_id');
|
||||||
const message_file = formData.get('message_file');
|
const message_file = formData.get('message_file');
|
||||||
|
const tool_resource = formData.get('tool_resource');
|
||||||
|
|
||||||
if (!assistant_id || message_file === 'true') {
|
if (!assistant_id || message_file === 'true') {
|
||||||
onSuccess?.(data, formData, context);
|
onSuccess?.(data, formData, context);
|
||||||
|
|
@ -400,7 +403,7 @@ export const useUploadFileMutation = (
|
||||||
}
|
}
|
||||||
|
|
||||||
queryClient.setQueryData<t.AssistantListResponse>(
|
queryClient.setQueryData<t.AssistantListResponse>(
|
||||||
[QueryKeys.assistants, defaultOrderQuery],
|
[QueryKeys.assistants, endpoint, defaultOrderQuery],
|
||||||
(prev) => {
|
(prev) => {
|
||||||
if (!prev) {
|
if (!prev) {
|
||||||
return prev;
|
return prev;
|
||||||
|
|
@ -409,13 +412,29 @@ export const useUploadFileMutation = (
|
||||||
return {
|
return {
|
||||||
...prev,
|
...prev,
|
||||||
data: prev?.data.map((assistant) => {
|
data: prev?.data.map((assistant) => {
|
||||||
if (assistant.id === assistant_id) {
|
if (assistant.id !== assistant_id) {
|
||||||
return {
|
return assistant;
|
||||||
...assistant,
|
}
|
||||||
file_ids: [...assistant.file_ids, data.file_id],
|
|
||||||
|
const update = {};
|
||||||
|
if (!tool_resource) {
|
||||||
|
update['file_ids'] = [...assistant.file_ids, data.file_id];
|
||||||
|
}
|
||||||
|
if (tool_resource === Capabilities.code_interpreter) {
|
||||||
|
const prevResources = assistant.tool_resources ?? {};
|
||||||
|
const prevResource = assistant.tool_resources?.[tool_resource as string] ?? {
|
||||||
|
file_ids: [],
|
||||||
|
};
|
||||||
|
prevResource.file_ids.push(data.file_id);
|
||||||
|
update['tool_resources'] = {
|
||||||
|
...prevResources,
|
||||||
|
[tool_resource as string]: prevResource,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return assistant;
|
return {
|
||||||
|
...assistant,
|
||||||
|
...update,
|
||||||
|
};
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
@ -542,6 +561,7 @@ export const useCreateAssistantMutation = (
|
||||||
onSuccess: (newAssistant, variables, context) => {
|
onSuccess: (newAssistant, variables, context) => {
|
||||||
const listRes = queryClient.getQueryData<t.AssistantListResponse>([
|
const listRes = queryClient.getQueryData<t.AssistantListResponse>([
|
||||||
QueryKeys.assistants,
|
QueryKeys.assistants,
|
||||||
|
variables.endpoint,
|
||||||
defaultOrderQuery,
|
defaultOrderQuery,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
@ -552,7 +572,7 @@ export const useCreateAssistantMutation = (
|
||||||
const currentAssistants = [newAssistant, ...JSON.parse(JSON.stringify(listRes.data))];
|
const currentAssistants = [newAssistant, ...JSON.parse(JSON.stringify(listRes.data))];
|
||||||
|
|
||||||
queryClient.setQueryData<t.AssistantListResponse>(
|
queryClient.setQueryData<t.AssistantListResponse>(
|
||||||
[QueryKeys.assistants, defaultOrderQuery],
|
[QueryKeys.assistants, variables.endpoint, defaultOrderQuery],
|
||||||
{
|
{
|
||||||
...listRes,
|
...listRes,
|
||||||
data: currentAssistants,
|
data: currentAssistants,
|
||||||
|
|
@ -576,14 +596,23 @@ export const useUpdateAssistantMutation = (
|
||||||
> => {
|
> => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
return useMutation(
|
return useMutation(
|
||||||
({ assistant_id, data }: { assistant_id: string; data: t.AssistantUpdateParams }) =>
|
({ assistant_id, data }: { assistant_id: string; data: t.AssistantUpdateParams }) => {
|
||||||
dataService.updateAssistant(assistant_id, data),
|
const { endpoint } = data;
|
||||||
|
const endpointsConfig = queryClient.getQueryData<t.TEndpointsConfig>([QueryKeys.endpoints]);
|
||||||
|
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||||
|
return dataService.updateAssistant({
|
||||||
|
data,
|
||||||
|
version,
|
||||||
|
assistant_id,
|
||||||
|
});
|
||||||
|
},
|
||||||
{
|
{
|
||||||
onMutate: (variables) => options?.onMutate?.(variables),
|
onMutate: (variables) => options?.onMutate?.(variables),
|
||||||
onError: (error, variables, context) => options?.onError?.(error, variables, context),
|
onError: (error, variables, context) => options?.onError?.(error, variables, context),
|
||||||
onSuccess: (updatedAssistant, variables, context) => {
|
onSuccess: (updatedAssistant, variables, context) => {
|
||||||
const listRes = queryClient.getQueryData<t.AssistantListResponse>([
|
const listRes = queryClient.getQueryData<t.AssistantListResponse>([
|
||||||
QueryKeys.assistants,
|
QueryKeys.assistants,
|
||||||
|
variables.data.endpoint,
|
||||||
defaultOrderQuery,
|
defaultOrderQuery,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
@ -592,7 +621,7 @@ export const useUpdateAssistantMutation = (
|
||||||
}
|
}
|
||||||
|
|
||||||
queryClient.setQueryData<t.AssistantListResponse>(
|
queryClient.setQueryData<t.AssistantListResponse>(
|
||||||
[QueryKeys.assistants, defaultOrderQuery],
|
[QueryKeys.assistants, variables.data.endpoint, defaultOrderQuery],
|
||||||
{
|
{
|
||||||
...listRes,
|
...listRes,
|
||||||
data: listRes.data.map((assistant) => {
|
data: listRes.data.map((assistant) => {
|
||||||
|
|
@ -617,14 +646,18 @@ export const useDeleteAssistantMutation = (
|
||||||
): UseMutationResult<void, Error, t.DeleteAssistantBody> => {
|
): UseMutationResult<void, Error, t.DeleteAssistantBody> => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
return useMutation(
|
return useMutation(
|
||||||
({ assistant_id, model }: t.DeleteAssistantBody) =>
|
({ assistant_id, model, endpoint }: t.DeleteAssistantBody) => {
|
||||||
dataService.deleteAssistant(assistant_id, model),
|
const endpointsConfig = queryClient.getQueryData<t.TEndpointsConfig>([QueryKeys.endpoints]);
|
||||||
|
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||||
|
return dataService.deleteAssistant({ assistant_id, model, version, endpoint });
|
||||||
|
},
|
||||||
{
|
{
|
||||||
onMutate: (variables) => options?.onMutate?.(variables),
|
onMutate: (variables) => options?.onMutate?.(variables),
|
||||||
onError: (error, variables, context) => options?.onError?.(error, variables, context),
|
onError: (error, variables, context) => options?.onError?.(error, variables, context),
|
||||||
onSuccess: (_data, variables, context) => {
|
onSuccess: (_data, variables, context) => {
|
||||||
const listRes = queryClient.getQueryData<t.AssistantListResponse>([
|
const listRes = queryClient.getQueryData<t.AssistantListResponse>([
|
||||||
QueryKeys.assistants,
|
QueryKeys.assistants,
|
||||||
|
variables.endpoint,
|
||||||
defaultOrderQuery,
|
defaultOrderQuery,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
@ -635,7 +668,7 @@ export const useDeleteAssistantMutation = (
|
||||||
const data = listRes.data.filter((assistant) => assistant.id !== variables.assistant_id);
|
const data = listRes.data.filter((assistant) => assistant.id !== variables.assistant_id);
|
||||||
|
|
||||||
queryClient.setQueryData<t.AssistantListResponse>(
|
queryClient.setQueryData<t.AssistantListResponse>(
|
||||||
[QueryKeys.assistants, defaultOrderQuery],
|
[QueryKeys.assistants, variables.endpoint, defaultOrderQuery],
|
||||||
{
|
{
|
||||||
...listRes,
|
...listRes,
|
||||||
data,
|
data,
|
||||||
|
|
@ -687,6 +720,7 @@ export const useUpdateAction = (
|
||||||
onSuccess: (updateActionResponse, variables, context) => {
|
onSuccess: (updateActionResponse, variables, context) => {
|
||||||
const listRes = queryClient.getQueryData<t.AssistantListResponse>([
|
const listRes = queryClient.getQueryData<t.AssistantListResponse>([
|
||||||
QueryKeys.assistants,
|
QueryKeys.assistants,
|
||||||
|
variables.endpoint,
|
||||||
defaultOrderQuery,
|
defaultOrderQuery,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
@ -696,7 +730,9 @@ export const useUpdateAction = (
|
||||||
|
|
||||||
const updatedAssistant = updateActionResponse[1];
|
const updatedAssistant = updateActionResponse[1];
|
||||||
|
|
||||||
queryClient.setQueryData<t.AssistantListResponse>([QueryKeys.assistants, defaultOrderQuery], {
|
queryClient.setQueryData<t.AssistantListResponse>(
|
||||||
|
[QueryKeys.assistants, variables.endpoint, defaultOrderQuery],
|
||||||
|
{
|
||||||
...listRes,
|
...listRes,
|
||||||
data: listRes.data.map((assistant) => {
|
data: listRes.data.map((assistant) => {
|
||||||
if (assistant.id === variables.assistant_id) {
|
if (assistant.id === variables.assistant_id) {
|
||||||
|
|
@ -704,7 +740,8 @@ export const useUpdateAction = (
|
||||||
}
|
}
|
||||||
return assistant;
|
return assistant;
|
||||||
}),
|
}),
|
||||||
});
|
},
|
||||||
|
);
|
||||||
|
|
||||||
queryClient.setQueryData<t.Action[]>([QueryKeys.actions], (prev) => {
|
queryClient.setQueryData<t.Action[]>([QueryKeys.actions], (prev) => {
|
||||||
return prev
|
return prev
|
||||||
|
|
@ -735,8 +772,15 @@ export const useDeleteAction = (
|
||||||
> => {
|
> => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
return useMutation([MutationKeys.deleteAction], {
|
return useMutation([MutationKeys.deleteAction], {
|
||||||
mutationFn: (variables: t.DeleteActionVariables) =>
|
mutationFn: (variables: t.DeleteActionVariables) => {
|
||||||
dataService.deleteAction(variables.assistant_id, variables.action_id, variables.model),
|
const { endpoint } = variables;
|
||||||
|
const endpointsConfig = queryClient.getQueryData<t.TEndpointsConfig>([QueryKeys.endpoints]);
|
||||||
|
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||||
|
return dataService.deleteAction({
|
||||||
|
...variables,
|
||||||
|
version,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
onMutate: (variables) => options?.onMutate?.(variables),
|
onMutate: (variables) => options?.onMutate?.(variables),
|
||||||
onError: (error, variables, context) => options?.onError?.(error, variables, context),
|
onError: (error, variables, context) => options?.onError?.(error, variables, context),
|
||||||
|
|
@ -750,7 +794,7 @@ export const useDeleteAction = (
|
||||||
});
|
});
|
||||||
|
|
||||||
queryClient.setQueryData<t.AssistantListResponse>(
|
queryClient.setQueryData<t.AssistantListResponse>(
|
||||||
[QueryKeys.assistants, defaultOrderQuery],
|
[QueryKeys.assistants, variables.endpoint, defaultOrderQuery],
|
||||||
(prev) => {
|
(prev) => {
|
||||||
if (!prev) {
|
if (!prev) {
|
||||||
return prev;
|
return prev;
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,9 @@
|
||||||
import { EModelEndpoint, QueryKeys, dataService, defaultOrderQuery } from 'librechat-data-provider';
|
import {
|
||||||
|
QueryKeys,
|
||||||
|
dataService,
|
||||||
|
defaultOrderQuery,
|
||||||
|
defaultAssistantsVersion,
|
||||||
|
} from 'librechat-data-provider';
|
||||||
import { useQuery, useInfiniteQuery, useQueryClient } from '@tanstack/react-query';
|
import { useQuery, useInfiniteQuery, useQueryClient } from '@tanstack/react-query';
|
||||||
import type {
|
import type {
|
||||||
UseInfiniteQueryOptions,
|
UseInfiniteQueryOptions,
|
||||||
|
|
@ -194,43 +199,46 @@ export const useSharedLinksInfiniteQuery = (
|
||||||
/**
|
/**
|
||||||
* Hook for getting all available tools for Assistants
|
* Hook for getting all available tools for Assistants
|
||||||
*/
|
*/
|
||||||
export const useAvailableToolsQuery = (): QueryObserverResult<TPlugin[]> => {
|
export const useAvailableToolsQuery = (
|
||||||
|
endpoint: t.AssistantsEndpoint,
|
||||||
|
): QueryObserverResult<TPlugin[]> => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
||||||
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([
|
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([QueryKeys.name, endpoint]);
|
||||||
QueryKeys.name,
|
const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide;
|
||||||
EModelEndpoint.assistants,
|
|
||||||
]);
|
|
||||||
const userProvidesKey = !!endpointsConfig?.[EModelEndpoint.assistants]?.userProvide;
|
|
||||||
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
||||||
const enabled = !!endpointsConfig?.[EModelEndpoint.assistants] && keyProvided;
|
const enabled = !!endpointsConfig?.[endpoint] && keyProvided;
|
||||||
return useQuery<TPlugin[]>([QueryKeys.tools], () => dataService.getAvailableTools(), {
|
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||||
|
return useQuery<TPlugin[]>(
|
||||||
|
[QueryKeys.tools],
|
||||||
|
() => dataService.getAvailableTools(version, endpoint),
|
||||||
|
{
|
||||||
refetchOnWindowFocus: false,
|
refetchOnWindowFocus: false,
|
||||||
refetchOnReconnect: false,
|
refetchOnReconnect: false,
|
||||||
refetchOnMount: false,
|
refetchOnMount: false,
|
||||||
enabled,
|
enabled,
|
||||||
});
|
},
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Hook for listing all assistants, with optional parameters provided for pagination and sorting
|
* Hook for listing all assistants, with optional parameters provided for pagination and sorting
|
||||||
*/
|
*/
|
||||||
export const useListAssistantsQuery = <TData = AssistantListResponse>(
|
export const useListAssistantsQuery = <TData = AssistantListResponse>(
|
||||||
params: AssistantListParams = defaultOrderQuery,
|
endpoint: t.AssistantsEndpoint,
|
||||||
|
params: Omit<AssistantListParams, 'endpoint'> = defaultOrderQuery,
|
||||||
config?: UseQueryOptions<AssistantListResponse, unknown, TData>,
|
config?: UseQueryOptions<AssistantListResponse, unknown, TData>,
|
||||||
): QueryObserverResult<TData> => {
|
): QueryObserverResult<TData> => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
||||||
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([
|
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([QueryKeys.name, endpoint]);
|
||||||
QueryKeys.name,
|
const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide;
|
||||||
EModelEndpoint.assistants,
|
|
||||||
]);
|
|
||||||
const userProvidesKey = !!endpointsConfig?.[EModelEndpoint.assistants]?.userProvide;
|
|
||||||
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
||||||
const enabled = !!endpointsConfig?.[EModelEndpoint.assistants] && keyProvided;
|
const enabled = !!endpointsConfig?.[endpoint] && keyProvided;
|
||||||
|
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||||
return useQuery<AssistantListResponse, unknown, TData>(
|
return useQuery<AssistantListResponse, unknown, TData>(
|
||||||
[QueryKeys.assistants, params],
|
[QueryKeys.assistants, endpoint, params],
|
||||||
() => dataService.listAssistants(params),
|
() => dataService.listAssistants({ ...params, endpoint }, version),
|
||||||
{
|
{
|
||||||
// Example selector to sort them by created_at
|
// Example selector to sort them by created_at
|
||||||
// select: (res) => {
|
// select: (res) => {
|
||||||
|
|
@ -246,6 +254,7 @@ export const useListAssistantsQuery = <TData = AssistantListResponse>(
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
export const useListAssistantsInfiniteQuery = (
|
export const useListAssistantsInfiniteQuery = (
|
||||||
params?: AssistantListParams,
|
params?: AssistantListParams,
|
||||||
config?: UseInfiniteQueryOptions<AssistantListResponse, Error>,
|
config?: UseInfiniteQueryOptions<AssistantListResponse, Error>,
|
||||||
|
|
@ -275,26 +284,31 @@ export const useListAssistantsInfiniteQuery = (
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Hook for retrieving details about a single assistant
|
* Hook for retrieving details about a single assistant
|
||||||
*/
|
*/
|
||||||
export const useGetAssistantByIdQuery = (
|
export const useGetAssistantByIdQuery = (
|
||||||
|
endpoint: t.AssistantsEndpoint,
|
||||||
assistant_id: string,
|
assistant_id: string,
|
||||||
config?: UseQueryOptions<Assistant>,
|
config?: UseQueryOptions<Assistant>,
|
||||||
): QueryObserverResult<Assistant> => {
|
): QueryObserverResult<Assistant> => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
||||||
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([
|
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([QueryKeys.name, endpoint]);
|
||||||
QueryKeys.name,
|
const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide;
|
||||||
EModelEndpoint.assistants,
|
|
||||||
]);
|
|
||||||
const userProvidesKey = !!endpointsConfig?.[EModelEndpoint.assistants]?.userProvide;
|
|
||||||
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
||||||
const enabled = !!endpointsConfig?.[EModelEndpoint.assistants] && keyProvided;
|
const enabled = !!endpointsConfig?.[endpoint] && keyProvided;
|
||||||
|
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||||
return useQuery<Assistant>(
|
return useQuery<Assistant>(
|
||||||
[QueryKeys.assistant, assistant_id],
|
[QueryKeys.assistant, assistant_id],
|
||||||
() => dataService.getAssistantById(assistant_id),
|
() =>
|
||||||
|
dataService.getAssistantById({
|
||||||
|
endpoint,
|
||||||
|
assistant_id,
|
||||||
|
version,
|
||||||
|
}),
|
||||||
{
|
{
|
||||||
refetchOnWindowFocus: false,
|
refetchOnWindowFocus: false,
|
||||||
refetchOnReconnect: false,
|
refetchOnReconnect: false,
|
||||||
|
|
@ -311,43 +325,53 @@ export const useGetAssistantByIdQuery = (
|
||||||
* Hook for retrieving user's saved Assistant Actions
|
* Hook for retrieving user's saved Assistant Actions
|
||||||
*/
|
*/
|
||||||
export const useGetActionsQuery = <TData = Action[]>(
|
export const useGetActionsQuery = <TData = Action[]>(
|
||||||
|
endpoint: t.AssistantsEndpoint,
|
||||||
config?: UseQueryOptions<Action[], unknown, TData>,
|
config?: UseQueryOptions<Action[], unknown, TData>,
|
||||||
): QueryObserverResult<TData> => {
|
): QueryObserverResult<TData> => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
||||||
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([
|
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([QueryKeys.name, endpoint]);
|
||||||
QueryKeys.name,
|
const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide;
|
||||||
EModelEndpoint.assistants,
|
|
||||||
]);
|
|
||||||
const userProvidesKey = !!endpointsConfig?.[EModelEndpoint.assistants]?.userProvide;
|
|
||||||
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
||||||
const enabled = !!endpointsConfig?.[EModelEndpoint.assistants] && keyProvided;
|
const enabled = !!endpointsConfig?.[endpoint] && keyProvided;
|
||||||
return useQuery<Action[], unknown, TData>([QueryKeys.actions], () => dataService.getActions(), {
|
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||||
|
return useQuery<Action[], unknown, TData>(
|
||||||
|
[QueryKeys.actions],
|
||||||
|
() =>
|
||||||
|
dataService.getActions({
|
||||||
|
endpoint,
|
||||||
|
version,
|
||||||
|
}),
|
||||||
|
{
|
||||||
refetchOnWindowFocus: false,
|
refetchOnWindowFocus: false,
|
||||||
refetchOnReconnect: false,
|
refetchOnReconnect: false,
|
||||||
refetchOnMount: false,
|
refetchOnMount: false,
|
||||||
...config,
|
...config,
|
||||||
enabled: config?.enabled !== undefined ? config?.enabled && enabled : enabled,
|
enabled: config?.enabled !== undefined ? config?.enabled && enabled : enabled,
|
||||||
});
|
},
|
||||||
|
);
|
||||||
};
|
};
|
||||||
/**
|
/**
|
||||||
* Hook for retrieving user's saved Assistant Documents (metadata saved to Database)
|
* Hook for retrieving user's saved Assistant Documents (metadata saved to Database)
|
||||||
*/
|
*/
|
||||||
export const useGetAssistantDocsQuery = (
|
export const useGetAssistantDocsQuery = (
|
||||||
|
endpoint: t.AssistantsEndpoint,
|
||||||
config?: UseQueryOptions<AssistantDocument[]>,
|
config?: UseQueryOptions<AssistantDocument[]>,
|
||||||
): QueryObserverResult<AssistantDocument[], unknown> => {
|
): QueryObserverResult<AssistantDocument[], unknown> => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
||||||
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([
|
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([QueryKeys.name, endpoint]);
|
||||||
QueryKeys.name,
|
const userProvidesKey = !!endpointsConfig?.[endpoint]?.userProvide;
|
||||||
EModelEndpoint.assistants,
|
|
||||||
]);
|
|
||||||
const userProvidesKey = !!endpointsConfig?.[EModelEndpoint.assistants]?.userProvide;
|
|
||||||
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
const keyProvided = userProvidesKey ? !!keyExpiry?.expiresAt : true;
|
||||||
const enabled = !!endpointsConfig?.[EModelEndpoint.assistants] && keyProvided;
|
const enabled = !!endpointsConfig?.[endpoint] && keyProvided;
|
||||||
|
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||||
return useQuery<AssistantDocument[]>(
|
return useQuery<AssistantDocument[]>(
|
||||||
[QueryKeys.assistantDocs],
|
[QueryKeys.assistantDocs],
|
||||||
() => dataService.getAssistantDocs(),
|
() =>
|
||||||
|
dataService.getAssistantDocs({
|
||||||
|
endpoint,
|
||||||
|
version,
|
||||||
|
}),
|
||||||
{
|
{
|
||||||
refetchOnWindowFocus: false,
|
refetchOnWindowFocus: false,
|
||||||
refetchOnReconnect: false,
|
refetchOnReconnect: false,
|
||||||
|
|
|
||||||
|
|
@ -1,2 +1,3 @@
|
||||||
export { default as useAssistantsMap } from './useAssistantsMap';
|
export { default as useAssistantsMap } from './useAssistantsMap';
|
||||||
export { default as useSelectAssistant } from './useSelectAssistant';
|
export { default as useSelectAssistant } from './useSelectAssistant';
|
||||||
|
export { default as useAssistantListMap } from './useAssistantListMap';
|
||||||
|
|
|
||||||
44
client/src/hooks/Assistants/useAssistantListMap.ts
Normal file
44
client/src/hooks/Assistants/useAssistantListMap.ts
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
import { useMemo } from 'react';
|
||||||
|
import { EModelEndpoint } from 'librechat-data-provider';
|
||||||
|
import type { AssistantListResponse, AssistantsEndpoint } from 'librechat-data-provider';
|
||||||
|
import type { AssistantListItem } from '~/common';
|
||||||
|
import { useListAssistantsQuery } from '~/data-provider';
|
||||||
|
|
||||||
|
const selectAssistantsResponse = (res: AssistantListResponse): AssistantListItem[] =>
|
||||||
|
res.data.map(({ id, name, metadata, model }) => ({
|
||||||
|
id,
|
||||||
|
name: name ?? '',
|
||||||
|
metadata,
|
||||||
|
model,
|
||||||
|
}));
|
||||||
|
|
||||||
|
export default function useAssistantListMap<T = AssistantListItem[] | null>(
|
||||||
|
selector: (res: AssistantListResponse) => T = selectAssistantsResponse as (
|
||||||
|
res: AssistantListResponse,
|
||||||
|
) => T,
|
||||||
|
): Record<AssistantsEndpoint, T> {
|
||||||
|
const { data: assistantsList = null } = useListAssistantsQuery(
|
||||||
|
EModelEndpoint.assistants,
|
||||||
|
undefined,
|
||||||
|
{
|
||||||
|
select: selector,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const { data: azureAssistants = null } = useListAssistantsQuery(
|
||||||
|
EModelEndpoint.azureAssistants,
|
||||||
|
undefined,
|
||||||
|
{
|
||||||
|
select: selector,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const assistantListMap = useMemo(() => {
|
||||||
|
return {
|
||||||
|
[EModelEndpoint.assistants]: assistantsList as T,
|
||||||
|
[EModelEndpoint.azureAssistants]: azureAssistants as T,
|
||||||
|
};
|
||||||
|
}, [assistantsList, azureAssistants]);
|
||||||
|
|
||||||
|
return assistantListMap;
|
||||||
|
}
|
||||||
|
|
@ -1,12 +1,28 @@
|
||||||
import { defaultOrderQuery } from 'librechat-data-provider';
|
import { EModelEndpoint } from 'librechat-data-provider';
|
||||||
|
import type { TAssistantsMap } from 'librechat-data-provider';
|
||||||
import { useListAssistantsQuery } from '~/data-provider';
|
import { useListAssistantsQuery } from '~/data-provider';
|
||||||
import { mapAssistants } from '~/utils';
|
import { mapAssistants } from '~/utils';
|
||||||
|
|
||||||
export default function useAssistantsMap({ isAuthenticated }: { isAuthenticated: boolean }) {
|
export default function useAssistantsMap({
|
||||||
const { data: assistantMap = {} } = useListAssistantsQuery(defaultOrderQuery, {
|
isAuthenticated,
|
||||||
|
}: {
|
||||||
|
isAuthenticated: boolean;
|
||||||
|
}): TAssistantsMap {
|
||||||
|
const { data: assistants = {} } = useListAssistantsQuery(EModelEndpoint.assistants, undefined, {
|
||||||
select: (res) => mapAssistants(res.data),
|
select: (res) => mapAssistants(res.data),
|
||||||
enabled: isAuthenticated,
|
enabled: isAuthenticated,
|
||||||
});
|
});
|
||||||
|
const { data: azureAssistants = {} } = useListAssistantsQuery(
|
||||||
|
EModelEndpoint.azureAssistants,
|
||||||
|
undefined,
|
||||||
|
{
|
||||||
|
select: (res) => mapAssistants(res.data),
|
||||||
|
enabled: isAuthenticated,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
return assistantMap;
|
return {
|
||||||
|
[EModelEndpoint.assistants]: assistants,
|
||||||
|
[EModelEndpoint.azureAssistants]: azureAssistants,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,32 +1,30 @@
|
||||||
import { useCallback } from 'react';
|
import { useCallback } from 'react';
|
||||||
import { EModelEndpoint, defaultOrderQuery } from 'librechat-data-provider';
|
import { isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import type { TConversation, TPreset } from 'librechat-data-provider';
|
import type { AssistantsEndpoint, TConversation, TPreset } from 'librechat-data-provider';
|
||||||
import useDefaultConvo from '~/hooks/Conversations/useDefaultConvo';
|
import useDefaultConvo from '~/hooks/Conversations/useDefaultConvo';
|
||||||
import { useListAssistantsQuery } from '~/data-provider';
|
|
||||||
import { useChatContext } from '~/Providers/ChatContext';
|
import { useChatContext } from '~/Providers/ChatContext';
|
||||||
|
import useAssistantListMap from './useAssistantListMap';
|
||||||
import { mapAssistants } from '~/utils';
|
import { mapAssistants } from '~/utils';
|
||||||
|
|
||||||
export default function useSelectAssistant() {
|
export default function useSelectAssistant(endpoint: AssistantsEndpoint) {
|
||||||
const getDefaultConversation = useDefaultConvo();
|
const getDefaultConversation = useDefaultConvo();
|
||||||
const { conversation, newConversation } = useChatContext();
|
const { conversation, newConversation } = useChatContext();
|
||||||
const { data: assistantMap = {} } = useListAssistantsQuery(defaultOrderQuery, {
|
const assistantMap = useAssistantListMap((res) => mapAssistants(res.data));
|
||||||
select: (res) => mapAssistants(res.data),
|
|
||||||
});
|
|
||||||
|
|
||||||
const onSelect = useCallback(
|
const onSelect = useCallback(
|
||||||
(value: string) => {
|
(value: string) => {
|
||||||
const assistant = assistantMap?.[value];
|
const assistant = assistantMap?.[endpoint]?.[value];
|
||||||
if (!assistant) {
|
if (!assistant) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const template: Partial<TPreset | TConversation> = {
|
const template: Partial<TPreset | TConversation> = {
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint,
|
||||||
assistant_id: assistant.id,
|
assistant_id: assistant.id,
|
||||||
model: assistant.model,
|
model: assistant.model,
|
||||||
conversationId: 'new',
|
conversationId: 'new',
|
||||||
};
|
};
|
||||||
|
|
||||||
if (conversation?.endpoint === EModelEndpoint.assistants) {
|
if (isAssistantsEndpoint(conversation?.endpoint)) {
|
||||||
const currentConvo = getDefaultConversation({
|
const currentConvo = getDefaultConversation({
|
||||||
conversation: { ...(conversation ?? {}) },
|
conversation: { ...(conversation ?? {}) },
|
||||||
preset: template,
|
preset: template,
|
||||||
|
|
@ -44,7 +42,7 @@ export default function useSelectAssistant() {
|
||||||
preset: template as Partial<TPreset>,
|
preset: template as Partial<TPreset>,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
[assistantMap, conversation, getDefaultConversation, newConversation],
|
[endpoint, assistantMap, conversation, getDefaultConversation, newConversation],
|
||||||
);
|
);
|
||||||
|
|
||||||
return { onSelect };
|
return { onSelect };
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import exportFromJSON from 'export-from-json';
|
||||||
import { useCallback, useEffect, useRef } from 'react';
|
import { useCallback, useEffect, useRef } from 'react';
|
||||||
import { useQueryClient } from '@tanstack/react-query';
|
import { useQueryClient } from '@tanstack/react-query';
|
||||||
import { useRecoilState, useSetRecoilState, useRecoilValue } from 'recoil';
|
import { useRecoilState, useSetRecoilState, useRecoilValue } from 'recoil';
|
||||||
import { QueryKeys, modularEndpoints, EModelEndpoint } from 'librechat-data-provider';
|
import { QueryKeys, modularEndpoints, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import { useCreatePresetMutation, useGetModelsQuery } from 'librechat-data-provider/react-query';
|
import { useCreatePresetMutation, useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||||
import type { TPreset, TEndpointsConfig } from 'librechat-data-provider';
|
import type { TPreset, TEndpointsConfig } from 'librechat-data-provider';
|
||||||
import {
|
import {
|
||||||
|
|
@ -174,8 +174,8 @@ export default function usePresets() {
|
||||||
const currentEndpointType = getEndpointField(endpointsConfig, endpoint, 'type');
|
const currentEndpointType = getEndpointField(endpointsConfig, endpoint, 'type');
|
||||||
const endpointType = getEndpointField(endpointsConfig, newPreset.endpoint, 'type');
|
const endpointType = getEndpointField(endpointsConfig, newPreset.endpoint, 'type');
|
||||||
const isAssistantSwitch =
|
const isAssistantSwitch =
|
||||||
newPreset.endpoint === EModelEndpoint.assistants &&
|
isAssistantsEndpoint(newPreset.endpoint) &&
|
||||||
conversation?.endpoint === EModelEndpoint.assistants &&
|
isAssistantsEndpoint(conversation?.endpoint) &&
|
||||||
conversation?.endpoint === newPreset.endpoint;
|
conversation?.endpoint === newPreset.endpoint;
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,18 @@
|
||||||
import { v4 } from 'uuid';
|
import { v4 } from 'uuid';
|
||||||
import debounce from 'lodash/debounce';
|
import debounce from 'lodash/debounce';
|
||||||
|
import { useQueryClient } from '@tanstack/react-query';
|
||||||
import { useState, useEffect, useCallback } from 'react';
|
import { useState, useEffect, useCallback } from 'react';
|
||||||
import {
|
import {
|
||||||
megabyte,
|
megabyte,
|
||||||
|
QueryKeys,
|
||||||
EModelEndpoint,
|
EModelEndpoint,
|
||||||
codeTypeMapping,
|
codeTypeMapping,
|
||||||
mergeFileConfig,
|
mergeFileConfig,
|
||||||
|
isAssistantsEndpoint,
|
||||||
|
defaultAssistantsVersion,
|
||||||
fileConfig as defaultFileConfig,
|
fileConfig as defaultFileConfig,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
|
import type { TEndpointsConfig } from 'librechat-data-provider';
|
||||||
import type { ExtendedFile, FileSetter } from '~/common';
|
import type { ExtendedFile, FileSetter } from '~/common';
|
||||||
import { useUploadFileMutation, useGetFileConfig } from '~/data-provider';
|
import { useUploadFileMutation, useGetFileConfig } from '~/data-provider';
|
||||||
import { useDelayedUploadToast } from './useDelayedUploadToast';
|
import { useDelayedUploadToast } from './useDelayedUploadToast';
|
||||||
|
|
@ -20,10 +25,12 @@ const { checkType } = defaultFileConfig;
|
||||||
type UseFileHandling = {
|
type UseFileHandling = {
|
||||||
overrideEndpoint?: EModelEndpoint;
|
overrideEndpoint?: EModelEndpoint;
|
||||||
fileSetter?: FileSetter;
|
fileSetter?: FileSetter;
|
||||||
additionalMetadata?: Record<string, string>;
|
fileFilter?: (file: File) => boolean;
|
||||||
|
additionalMetadata?: Record<string, string | undefined>;
|
||||||
};
|
};
|
||||||
|
|
||||||
const useFileHandling = (params?: UseFileHandling) => {
|
const useFileHandling = (params?: UseFileHandling) => {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
const { showToast } = useToastContext();
|
const { showToast } = useToastContext();
|
||||||
const [errors, setErrors] = useState<string[]>([]);
|
const [errors, setErrors] = useState<string[]>([]);
|
||||||
const { startUploadTimer, clearUploadTimer } = useDelayedUploadToast();
|
const { startUploadTimer, clearUploadTimer } = useDelayedUploadToast();
|
||||||
|
|
@ -141,15 +148,20 @@ const useFileHandling = (params?: UseFileHandling) => {
|
||||||
|
|
||||||
if (params?.additionalMetadata) {
|
if (params?.additionalMetadata) {
|
||||||
for (const [key, value] of Object.entries(params.additionalMetadata)) {
|
for (const [key, value] of Object.entries(params.additionalMetadata)) {
|
||||||
|
if (value) {
|
||||||
formData.append(key, value);
|
formData.append(key, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
endpoint === EModelEndpoint.assistants &&
|
isAssistantsEndpoint(endpoint) &&
|
||||||
!formData.get('assistant_id') &&
|
!formData.get('assistant_id') &&
|
||||||
conversation?.assistant_id
|
conversation?.assistant_id
|
||||||
) {
|
) {
|
||||||
|
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
||||||
|
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||||
|
formData.append('version', version);
|
||||||
formData.append('assistant_id', conversation.assistant_id);
|
formData.append('assistant_id', conversation.assistant_id);
|
||||||
formData.append('model', conversation?.model ?? '');
|
formData.append('model', conversation?.model ?? '');
|
||||||
formData.append('message_file', 'true');
|
formData.append('message_file', 'true');
|
||||||
|
|
|
||||||
|
|
@ -5,15 +5,42 @@ import {
|
||||||
useGetEndpointsQuery,
|
useGetEndpointsQuery,
|
||||||
} from 'librechat-data-provider/react-query';
|
} from 'librechat-data-provider/react-query';
|
||||||
import { getConfigDefaults, EModelEndpoint, alternateName } from 'librechat-data-provider';
|
import { getConfigDefaults, EModelEndpoint, alternateName } from 'librechat-data-provider';
|
||||||
import type { Assistant } from 'librechat-data-provider';
|
import type { AssistantsEndpoint, TAssistantsMap, TEndpointsConfig } from 'librechat-data-provider';
|
||||||
import { useGetPresetsQuery, useListAssistantsQuery } from '~/data-provider';
|
import type { MentionOption } from '~/common';
|
||||||
|
import useAssistantListMap from '~/hooks/Assistants/useAssistantListMap';
|
||||||
import { mapEndpoints, getPresetTitle } from '~/utils';
|
import { mapEndpoints, getPresetTitle } from '~/utils';
|
||||||
import { EndpointIcon } from '~/components/Endpoints';
|
import { EndpointIcon } from '~/components/Endpoints';
|
||||||
|
import { useGetPresetsQuery } from '~/data-provider';
|
||||||
import useSelectMention from './useSelectMention';
|
import useSelectMention from './useSelectMention';
|
||||||
|
|
||||||
const defaultInterface = getConfigDefaults().interface;
|
const defaultInterface = getConfigDefaults().interface;
|
||||||
|
|
||||||
export default function useMentions({ assistantMap }: { assistantMap: Record<string, Assistant> }) {
|
const assistantMapFn =
|
||||||
|
({
|
||||||
|
endpoint,
|
||||||
|
assistantMap,
|
||||||
|
endpointsConfig,
|
||||||
|
}: {
|
||||||
|
endpoint: AssistantsEndpoint;
|
||||||
|
assistantMap: TAssistantsMap;
|
||||||
|
endpointsConfig: TEndpointsConfig;
|
||||||
|
}) =>
|
||||||
|
({ id, name, description }) => ({
|
||||||
|
type: endpoint,
|
||||||
|
label: name ?? '',
|
||||||
|
value: id,
|
||||||
|
description: description ?? '',
|
||||||
|
icon: EndpointIcon({
|
||||||
|
conversation: { assistant_id: id, endpoint },
|
||||||
|
containerClassName: 'shadow-stroke overflow-hidden rounded-full',
|
||||||
|
endpointsConfig: endpointsConfig,
|
||||||
|
context: 'menu-item',
|
||||||
|
assistantMap,
|
||||||
|
size: 20,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export default function useMentions({ assistantMap }: { assistantMap: TAssistantsMap }) {
|
||||||
const { data: presets } = useGetPresetsQuery();
|
const { data: presets } = useGetPresetsQuery();
|
||||||
const { data: modelsConfig } = useGetModelsQuery();
|
const { data: modelsConfig } = useGetModelsQuery();
|
||||||
const { data: startupConfig } = useGetStartupConfig();
|
const { data: startupConfig } = useGetStartupConfig();
|
||||||
|
|
@ -21,30 +48,43 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
||||||
const { data: endpoints = [] } = useGetEndpointsQuery({
|
const { data: endpoints = [] } = useGetEndpointsQuery({
|
||||||
select: mapEndpoints,
|
select: mapEndpoints,
|
||||||
});
|
});
|
||||||
const { data: assistants = [] } = useListAssistantsQuery(undefined, {
|
const listMap = useAssistantListMap((res) =>
|
||||||
select: (res) =>
|
res.data.map(({ id, name, description }) => ({
|
||||||
res.data
|
id,
|
||||||
.map(({ id, name, description }) => ({
|
name,
|
||||||
type: 'assistant',
|
description,
|
||||||
label: name ?? '',
|
})),
|
||||||
value: id,
|
);
|
||||||
description: description ?? '',
|
const assistantListMap = useMemo(
|
||||||
icon: EndpointIcon({
|
() => ({
|
||||||
conversation: { assistant_id: id, endpoint: EModelEndpoint.assistants },
|
[EModelEndpoint.assistants]: listMap[EModelEndpoint.assistants]
|
||||||
containerClassName: 'shadow-stroke overflow-hidden rounded-full',
|
.map(
|
||||||
endpointsConfig: endpointsConfig,
|
assistantMapFn({
|
||||||
context: 'menu-item',
|
endpoint: EModelEndpoint.assistants,
|
||||||
assistantMap,
|
assistantMap,
|
||||||
size: 20,
|
endpointsConfig,
|
||||||
}),
|
}),
|
||||||
}))
|
)
|
||||||
.filter(Boolean),
|
.filter(Boolean),
|
||||||
});
|
[EModelEndpoint.azureAssistants]: listMap[EModelEndpoint.azureAssistants]
|
||||||
|
.map(
|
||||||
|
assistantMapFn({
|
||||||
|
endpoint: EModelEndpoint.azureAssistants,
|
||||||
|
assistantMap,
|
||||||
|
endpointsConfig,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.filter(Boolean),
|
||||||
|
}),
|
||||||
|
[listMap, assistantMap, endpointsConfig],
|
||||||
|
);
|
||||||
|
|
||||||
const modelSpecs = useMemo(() => startupConfig?.modelSpecs?.list ?? [], [startupConfig]);
|
const modelSpecs = useMemo(() => startupConfig?.modelSpecs?.list ?? [], [startupConfig]);
|
||||||
const interfaceConfig = useMemo(
|
const interfaceConfig = useMemo(
|
||||||
() => startupConfig?.interface ?? defaultInterface,
|
() => startupConfig?.interface ?? defaultInterface,
|
||||||
[startupConfig],
|
[startupConfig],
|
||||||
);
|
);
|
||||||
|
|
||||||
const { onSelectMention } = useSelectMention({
|
const { onSelectMention } = useSelectMention({
|
||||||
modelSpecs,
|
modelSpecs,
|
||||||
endpointsConfig,
|
endpointsConfig,
|
||||||
|
|
@ -52,7 +92,7 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
||||||
assistantMap,
|
assistantMap,
|
||||||
});
|
});
|
||||||
|
|
||||||
const options = useMemo(() => {
|
const options: MentionOption[] = useMemo(() => {
|
||||||
const mentions = [
|
const mentions = [
|
||||||
...(modelSpecs?.length > 0 ? modelSpecs : []).map((modelSpec) => ({
|
...(modelSpecs?.length > 0 ? modelSpecs : []).map((modelSpec) => ({
|
||||||
value: modelSpec.name,
|
value: modelSpec.name,
|
||||||
|
|
@ -67,12 +107,12 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
||||||
context: 'menu-item',
|
context: 'menu-item',
|
||||||
size: 20,
|
size: 20,
|
||||||
}),
|
}),
|
||||||
type: 'modelSpec',
|
type: 'modelSpec' as const,
|
||||||
})),
|
})),
|
||||||
...(interfaceConfig.endpointsMenu ? endpoints : []).map((endpoint) => ({
|
...(interfaceConfig.endpointsMenu ? endpoints : []).map((endpoint) => ({
|
||||||
value: endpoint,
|
value: endpoint,
|
||||||
label: alternateName[endpoint] ?? endpoint ?? '',
|
label: alternateName[endpoint] ?? endpoint ?? '',
|
||||||
type: 'endpoint',
|
type: 'endpoint' as const,
|
||||||
icon: EndpointIcon({
|
icon: EndpointIcon({
|
||||||
conversation: { endpoint },
|
conversation: { endpoint },
|
||||||
endpointsConfig,
|
endpointsConfig,
|
||||||
|
|
@ -80,7 +120,12 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
||||||
size: 20,
|
size: 20,
|
||||||
}),
|
}),
|
||||||
})),
|
})),
|
||||||
...(endpointsConfig?.[EModelEndpoint.assistants] ? assistants : []),
|
...(endpointsConfig?.[EModelEndpoint.assistants]
|
||||||
|
? assistantListMap[EModelEndpoint.assistants]
|
||||||
|
: []),
|
||||||
|
...(endpointsConfig?.[EModelEndpoint.azureAssistants]
|
||||||
|
? assistantListMap[EModelEndpoint.azureAssistants]
|
||||||
|
: []),
|
||||||
...((interfaceConfig.presets ? presets : [])?.map((preset, index) => ({
|
...((interfaceConfig.presets ? presets : [])?.map((preset, index) => ({
|
||||||
value: preset.presetId ?? `preset-${index}`,
|
value: preset.presetId ?? `preset-${index}`,
|
||||||
label: preset.title ?? preset.modelLabel ?? preset.chatGptLabel ?? '',
|
label: preset.title ?? preset.modelLabel ?? preset.chatGptLabel ?? '',
|
||||||
|
|
@ -93,7 +138,7 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
||||||
assistantMap,
|
assistantMap,
|
||||||
size: 20,
|
size: 20,
|
||||||
}),
|
}),
|
||||||
type: 'preset',
|
type: 'preset' as const,
|
||||||
})) ?? []),
|
})) ?? []),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
@ -102,17 +147,17 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
||||||
presets,
|
presets,
|
||||||
endpoints,
|
endpoints,
|
||||||
modelSpecs,
|
modelSpecs,
|
||||||
assistants,
|
|
||||||
assistantMap,
|
assistantMap,
|
||||||
endpointsConfig,
|
endpointsConfig,
|
||||||
|
assistantListMap,
|
||||||
interfaceConfig.presets,
|
interfaceConfig.presets,
|
||||||
interfaceConfig.endpointsMenu,
|
interfaceConfig.endpointsMenu,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
options,
|
options,
|
||||||
assistants,
|
|
||||||
modelsConfig,
|
modelsConfig,
|
||||||
onSelectMention,
|
onSelectMention,
|
||||||
|
assistantListMap,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
import { useCallback } from 'react';
|
import { useCallback } from 'react';
|
||||||
import { useRecoilValue } from 'recoil';
|
import { useRecoilValue } from 'recoil';
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import type {
|
import type {
|
||||||
|
TPreset,
|
||||||
TModelSpec,
|
TModelSpec,
|
||||||
TConversation,
|
TConversation,
|
||||||
|
TAssistantsMap,
|
||||||
TEndpointsConfig,
|
TEndpointsConfig,
|
||||||
TPreset,
|
|
||||||
Assistant,
|
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
import type { MentionOption } from '~/common';
|
import type { MentionOption } from '~/common';
|
||||||
import { getConvoSwitchLogic, getModelSpecIconURL, removeUnavailableTools } from '~/utils';
|
import { getConvoSwitchLogic, getModelSpecIconURL, removeUnavailableTools } from '~/utils';
|
||||||
|
|
@ -23,7 +23,7 @@ export default function useSelectMention({
|
||||||
presets?: TPreset[];
|
presets?: TPreset[];
|
||||||
modelSpecs: TModelSpec[];
|
modelSpecs: TModelSpec[];
|
||||||
endpointsConfig: TEndpointsConfig;
|
endpointsConfig: TEndpointsConfig;
|
||||||
assistantMap: Record<string, Assistant>;
|
assistantMap: TAssistantsMap;
|
||||||
}) {
|
}) {
|
||||||
const { conversation } = useChatContext();
|
const { conversation } = useChatContext();
|
||||||
const { newConversation } = useNewConvo();
|
const { newConversation } = useNewConvo();
|
||||||
|
|
@ -194,10 +194,10 @@ export default function useSelectMention({
|
||||||
onSelectEndpoint(key, { model: option.label });
|
onSelectEndpoint(key, { model: option.label });
|
||||||
} else if (option.type === 'endpoint') {
|
} else if (option.type === 'endpoint') {
|
||||||
onSelectEndpoint(key);
|
onSelectEndpoint(key);
|
||||||
} else if (option.type === 'assistant') {
|
} else if (isAssistantsEndpoint(option.type)) {
|
||||||
onSelectEndpoint(EModelEndpoint.assistants, {
|
onSelectEndpoint(option.type, {
|
||||||
assistant_id: key,
|
assistant_id: key,
|
||||||
model: assistantMap?.[key]?.model ?? '',
|
model: assistantMap?.[option.type]?.[key]?.model ?? '',
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import debounce from 'lodash/debounce';
|
import debounce from 'lodash/debounce';
|
||||||
import { useEffect, useRef, useCallback } from 'react';
|
import { useEffect, useRef, useCallback } from 'react';
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
import { isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import { useRecoilValue, useSetRecoilState } from 'recoil';
|
import { useRecoilValue, useSetRecoilState } from 'recoil';
|
||||||
import type { TEndpointOption } from 'librechat-data-provider';
|
import type { TEndpointOption } from 'librechat-data-provider';
|
||||||
import type { KeyboardEvent } from 'react';
|
import type { KeyboardEvent } from 'react';
|
||||||
|
|
@ -45,10 +45,11 @@ export default function useTextarea({
|
||||||
const { conversationId, jailbreak, endpoint = '', assistant_id } = conversation || {};
|
const { conversationId, jailbreak, endpoint = '', assistant_id } = conversation || {};
|
||||||
const isNotAppendable =
|
const isNotAppendable =
|
||||||
((latestMessage?.unfinished && !isSubmitting) || latestMessage?.error) &&
|
((latestMessage?.unfinished && !isSubmitting) || latestMessage?.error) &&
|
||||||
endpoint !== EModelEndpoint.assistants;
|
!isAssistantsEndpoint(endpoint);
|
||||||
// && (conversationId?.length ?? 0) > 6; // also ensures that we don't show the wrong placeholder
|
// && (conversationId?.length ?? 0) > 6; // also ensures that we don't show the wrong placeholder
|
||||||
|
|
||||||
const assistant = endpoint === EModelEndpoint.assistants && assistantMap?.[assistant_id ?? ''];
|
const assistant =
|
||||||
|
isAssistantsEndpoint(endpoint) && assistantMap?.[endpoint ?? '']?.[assistant_id ?? ''];
|
||||||
const assistantName = (assistant && assistant?.name) || '';
|
const assistantName = (assistant && assistant?.name) || '';
|
||||||
|
|
||||||
// auto focus to input, when enter a conversation.
|
// auto focus to input, when enter a conversation.
|
||||||
|
|
@ -86,9 +87,11 @@ export default function useTextarea({
|
||||||
if (disabled) {
|
if (disabled) {
|
||||||
return localize('com_endpoint_config_placeholder');
|
return localize('com_endpoint_config_placeholder');
|
||||||
}
|
}
|
||||||
|
const currentEndpoint = conversation?.endpoint ?? '';
|
||||||
|
const currentAssistantId = conversation?.assistant_id ?? '';
|
||||||
if (
|
if (
|
||||||
conversation?.endpoint === EModelEndpoint.assistants &&
|
isAssistantsEndpoint(currentEndpoint) &&
|
||||||
(!conversation?.assistant_id || !assistantMap?.[conversation?.assistant_id ?? ''])
|
(!currentAssistantId || !assistantMap?.[currentEndpoint]?.[currentAssistantId ?? ''])
|
||||||
) {
|
) {
|
||||||
return localize('com_endpoint_assistant_placeholder');
|
return localize('com_endpoint_assistant_placeholder');
|
||||||
}
|
}
|
||||||
|
|
@ -97,8 +100,7 @@ export default function useTextarea({
|
||||||
return localize('com_endpoint_message_not_appendable');
|
return localize('com_endpoint_message_not_appendable');
|
||||||
}
|
}
|
||||||
|
|
||||||
const sender =
|
const sender = isAssistantsEndpoint(currentEndpoint)
|
||||||
conversation?.endpoint === EModelEndpoint.assistants
|
|
||||||
? getAssistantName({ name: assistantName, localize })
|
? getAssistantName({ name: assistantName, localize })
|
||||||
: getSender(conversation as TEndpointOption);
|
: getSender(conversation as TEndpointOption);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { useEffect, useRef, useCallback } from 'react';
|
import { useEffect, useRef, useCallback } from 'react';
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
import { isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import type { TMessageProps } from '~/common';
|
import type { TMessageProps } from '~/common';
|
||||||
import { useChatContext, useAssistantsMapContext } from '~/Providers';
|
import { useChatContext, useAssistantsMapContext } from '~/Providers';
|
||||||
import useCopyToClipboard from './useCopyToClipboard';
|
import useCopyToClipboard from './useCopyToClipboard';
|
||||||
|
|
@ -55,7 +55,8 @@ export default function useMessageHelpers(props: TMessageProps) {
|
||||||
}, [isSubmitting, setAbortScroll]);
|
}, [isSubmitting, setAbortScroll]);
|
||||||
|
|
||||||
const assistant =
|
const assistant =
|
||||||
conversation?.endpoint === EModelEndpoint.assistants && assistantMap?.[message?.model ?? ''];
|
isAssistantsEndpoint(conversation?.endpoint) &&
|
||||||
|
assistantMap?.[conversation?.endpoint ?? '']?.[message?.model ?? ''];
|
||||||
|
|
||||||
const regenerateMessage = () => {
|
const regenerateMessage = () => {
|
||||||
if ((isSubmitting && isCreatedByUser) || !message) {
|
if ((isSubmitting && isCreatedByUser) || !message) {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
import { useState, useEffect } from 'react';
|
import { useState, useEffect, useMemo, useCallback } from 'react';
|
||||||
|
|
||||||
export default function useProgress(initialProgress = 0.01, increment = 0.007, fileSize?: number) {
|
export default function useProgress(initialProgress = 0.01, increment = 0.007, fileSize?: number) {
|
||||||
const calculateIncrement = (size?: number) => {
|
const calculateIncrement = useCallback(
|
||||||
|
(size?: number) => {
|
||||||
const baseRate = 0.05;
|
const baseRate = 0.05;
|
||||||
const minRate = 0.002;
|
const minRate = 0.002;
|
||||||
const sizeMB = size ? size / (1024 * 1024) : 0;
|
const sizeMB = size ? size / (1024 * 1024) : 0;
|
||||||
|
|
@ -15,12 +16,18 @@ export default function useProgress(initialProgress = 0.01, increment = 0.007, f
|
||||||
} else {
|
} else {
|
||||||
return Math.max(baseRate / Math.sqrt(sizeMB), minRate);
|
return Math.max(baseRate / Math.sqrt(sizeMB), minRate);
|
||||||
}
|
}
|
||||||
};
|
},
|
||||||
|
[increment],
|
||||||
|
);
|
||||||
|
|
||||||
const incrementValue = calculateIncrement(fileSize);
|
const incrementValue = useMemo(
|
||||||
|
() => calculateIncrement(fileSize),
|
||||||
|
[fileSize, calculateIncrement],
|
||||||
|
);
|
||||||
const [progress, setProgress] = useState(initialProgress);
|
const [progress, setProgress] = useState(initialProgress);
|
||||||
|
|
||||||
const getDynamicIncrement = (currentProgress: number) => {
|
const getDynamicIncrement = useCallback(
|
||||||
|
(currentProgress: number) => {
|
||||||
if (!fileSize) {
|
if (!fileSize) {
|
||||||
return incrementValue;
|
return incrementValue;
|
||||||
}
|
}
|
||||||
|
|
@ -29,7 +36,9 @@ export default function useProgress(initialProgress = 0.01, increment = 0.007, f
|
||||||
} else {
|
} else {
|
||||||
return Math.max(0.0005, incrementValue * (1 - currentProgress));
|
return Math.max(0.0005, incrementValue * (1 - currentProgress));
|
||||||
}
|
}
|
||||||
};
|
},
|
||||||
|
[incrementValue, fileSize],
|
||||||
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
let timeout: ReturnType<typeof setTimeout>;
|
let timeout: ReturnType<typeof setTimeout>;
|
||||||
|
|
@ -58,7 +67,7 @@ export default function useProgress(initialProgress = 0.01, increment = 0.007, f
|
||||||
clearInterval(timer);
|
clearInterval(timer);
|
||||||
clearTimeout(timeout);
|
clearTimeout(timeout);
|
||||||
};
|
};
|
||||||
}, [progress, initialProgress, incrementValue, fileSize]);
|
}, [progress, initialProgress, incrementValue, fileSize, getDynamicIncrement]);
|
||||||
|
|
||||||
return progress;
|
return progress;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import {
|
||||||
ArrowRightToLine,
|
ArrowRightToLine,
|
||||||
// Settings2,
|
// Settings2,
|
||||||
} from 'lucide-react';
|
} from 'lucide-react';
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
import type { TConfig, TInterfaceConfig } from 'librechat-data-provider';
|
import type { TConfig, TInterfaceConfig } from 'librechat-data-provider';
|
||||||
import type { NavLink } from '~/common';
|
import type { NavLink } from '~/common';
|
||||||
import PanelSwitch from '~/components/SidePanel/Builder/PanelSwitch';
|
import PanelSwitch from '~/components/SidePanel/Builder/PanelSwitch';
|
||||||
|
|
@ -26,7 +26,7 @@ export default function useSideNavLinks({
|
||||||
}) {
|
}) {
|
||||||
const Links = useMemo(() => {
|
const Links = useMemo(() => {
|
||||||
const links: NavLink[] = [];
|
const links: NavLink[] = [];
|
||||||
// if (endpoint !== EModelEndpoint.assistants) {
|
// if (!isAssistantsEndpoint(endpoint)) {
|
||||||
// links.push({
|
// links.push({
|
||||||
// title: 'com_sidepanel_parameters',
|
// title: 'com_sidepanel_parameters',
|
||||||
// label: '',
|
// label: '',
|
||||||
|
|
@ -36,7 +36,7 @@ export default function useSideNavLinks({
|
||||||
// });
|
// });
|
||||||
// }
|
// }
|
||||||
if (
|
if (
|
||||||
endpoint === EModelEndpoint.assistants &&
|
isAssistantsEndpoint(endpoint) &&
|
||||||
assistants &&
|
assistants &&
|
||||||
assistants.disableBuilder !== true &&
|
assistants.disableBuilder !== true &&
|
||||||
keyProvided &&
|
keyProvided &&
|
||||||
|
|
|
||||||
|
|
@ -12,10 +12,10 @@ import {
|
||||||
createPayload,
|
createPayload,
|
||||||
tPresetSchema,
|
tPresetSchema,
|
||||||
tMessageSchema,
|
tMessageSchema,
|
||||||
EModelEndpoint,
|
|
||||||
LocalStorageKeys,
|
LocalStorageKeys,
|
||||||
tConvoUpdateSchema,
|
tConvoUpdateSchema,
|
||||||
removeNullishValues,
|
removeNullishValues,
|
||||||
|
isAssistantsEndpoint,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
import { useGetUserBalance, useGetStartupConfig } from 'librechat-data-provider/react-query';
|
import { useGetUserBalance, useGetStartupConfig } from 'librechat-data-provider/react-query';
|
||||||
import type {
|
import type {
|
||||||
|
|
@ -441,7 +441,7 @@ export default function useSSE(submission: TSubmission | null, index = 0) {
|
||||||
Authorization: `Bearer ${token}`,
|
Authorization: `Bearer ${token}`,
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
abortKey: _endpoint === EModelEndpoint.assistants ? runAbortKey : conversationId,
|
abortKey: isAssistantsEndpoint(_endpoint) ? runAbortKey : conversationId,
|
||||||
endpoint,
|
endpoint,
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
@ -513,7 +513,7 @@ export default function useSSE(submission: TSubmission | null, index = 0) {
|
||||||
|
|
||||||
const payloadData = createPayload(submission);
|
const payloadData = createPayload(submission);
|
||||||
let { payload } = payloadData;
|
let { payload } = payloadData;
|
||||||
if (payload.endpoint === EModelEndpoint.assistants) {
|
if (isAssistantsEndpoint(payload.endpoint)) {
|
||||||
payload = removeNullishValues(payload);
|
payload = removeNullishValues(payload);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,6 @@ export { default as useNewConvo } from './useNewConvo';
|
||||||
export { default as useLocalize } from './useLocalize';
|
export { default as useLocalize } from './useLocalize';
|
||||||
export { default as useMediaQuery } from './useMediaQuery';
|
export { default as useMediaQuery } from './useMediaQuery';
|
||||||
export { default as useChatHelpers } from './useChatHelpers';
|
export { default as useChatHelpers } from './useChatHelpers';
|
||||||
export { default as useGenerations } from './useGenerations';
|
|
||||||
export { default as useScrollToRef } from './useScrollToRef';
|
export { default as useScrollToRef } from './useScrollToRef';
|
||||||
export { default as useLocalStorage } from './useLocalStorage';
|
export { default as useLocalStorage } from './useLocalStorage';
|
||||||
export { default as useDelayedRender } from './useDelayedRender';
|
export { default as useDelayedRender } from './useDelayedRender';
|
||||||
|
|
|
||||||
|
|
@ -3,10 +3,10 @@ import { useCallback, useState } from 'react';
|
||||||
import { useQueryClient } from '@tanstack/react-query';
|
import { useQueryClient } from '@tanstack/react-query';
|
||||||
import {
|
import {
|
||||||
Constants,
|
Constants,
|
||||||
EModelEndpoint,
|
|
||||||
QueryKeys,
|
QueryKeys,
|
||||||
parseCompactConvo,
|
|
||||||
ContentTypes,
|
ContentTypes,
|
||||||
|
parseCompactConvo,
|
||||||
|
isAssistantsEndpoint,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
import { useRecoilState, useResetRecoilState, useSetRecoilState } from 'recoil';
|
import { useRecoilState, useResetRecoilState, useSetRecoilState } from 'recoil';
|
||||||
import { useGetMessagesByConvoId } from 'librechat-data-provider/react-query';
|
import { useGetMessagesByConvoId } from 'librechat-data-provider/react-query';
|
||||||
|
|
@ -215,7 +215,7 @@ export default function useChatHelpers(index = 0, paramId: string | undefined) {
|
||||||
error: false,
|
error: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (endpoint === EModelEndpoint.assistants) {
|
if (isAssistantsEndpoint(endpoint)) {
|
||||||
initialResponse.model = conversation?.assistant_id ?? '';
|
initialResponse.model = conversation?.assistant_id ?? '';
|
||||||
initialResponse.text = '';
|
initialResponse.text = '';
|
||||||
initialResponse.content = [
|
initialResponse.content = [
|
||||||
|
|
|
||||||
|
|
@ -1,68 +0,0 @@
|
||||||
import type { TMessage } from 'librechat-data-provider';
|
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
|
||||||
import { useRecoilValue } from 'recoil';
|
|
||||||
import store from '~/store';
|
|
||||||
|
|
||||||
type TUseGenerations = {
|
|
||||||
endpoint?: string;
|
|
||||||
message: TMessage;
|
|
||||||
isSubmitting: boolean;
|
|
||||||
isEditing?: boolean;
|
|
||||||
latestMessage?: TMessage | null;
|
|
||||||
};
|
|
||||||
|
|
||||||
export default function useGenerations({
|
|
||||||
endpoint,
|
|
||||||
message,
|
|
||||||
isSubmitting,
|
|
||||||
isEditing = false,
|
|
||||||
latestMessage: _latestMessage,
|
|
||||||
}: TUseGenerations) {
|
|
||||||
const latestMessage = useRecoilValue(store.latestMessage) ?? _latestMessage;
|
|
||||||
|
|
||||||
const { error, messageId, searchResult, finish_reason, isCreatedByUser } = message ?? {};
|
|
||||||
const isEditableEndpoint = !![
|
|
||||||
EModelEndpoint.openAI,
|
|
||||||
EModelEndpoint.google,
|
|
||||||
EModelEndpoint.assistants,
|
|
||||||
EModelEndpoint.anthropic,
|
|
||||||
EModelEndpoint.gptPlugins,
|
|
||||||
EModelEndpoint.azureOpenAI,
|
|
||||||
].find((e) => e === endpoint);
|
|
||||||
|
|
||||||
const continueSupported =
|
|
||||||
latestMessage?.messageId === messageId &&
|
|
||||||
finish_reason &&
|
|
||||||
finish_reason !== 'stop' &&
|
|
||||||
!isEditing &&
|
|
||||||
!searchResult &&
|
|
||||||
isEditableEndpoint;
|
|
||||||
|
|
||||||
const branchingSupported =
|
|
||||||
// 5/21/23: Bing is allowing editing and Message regenerating
|
|
||||||
!![
|
|
||||||
EModelEndpoint.azureOpenAI,
|
|
||||||
EModelEndpoint.openAI,
|
|
||||||
EModelEndpoint.chatGPTBrowser,
|
|
||||||
EModelEndpoint.google,
|
|
||||||
EModelEndpoint.bingAI,
|
|
||||||
EModelEndpoint.gptPlugins,
|
|
||||||
EModelEndpoint.anthropic,
|
|
||||||
].find((e) => e === endpoint);
|
|
||||||
|
|
||||||
const regenerateEnabled =
|
|
||||||
!isCreatedByUser && !searchResult && !isEditing && !isSubmitting && branchingSupported;
|
|
||||||
|
|
||||||
const hideEditButton =
|
|
||||||
isSubmitting ||
|
|
||||||
error ||
|
|
||||||
searchResult ||
|
|
||||||
!branchingSupported ||
|
|
||||||
(!isEditableEndpoint && !isCreatedByUser);
|
|
||||||
|
|
||||||
return {
|
|
||||||
continueSupported,
|
|
||||||
regenerateEnabled,
|
|
||||||
hideEditButton,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import type { TMessage } from 'librechat-data-provider';
|
import type { TMessage } from 'librechat-data-provider';
|
||||||
import { EModelEndpoint } from 'librechat-data-provider';
|
import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
|
|
||||||
type TUseGenerations = {
|
type TUseGenerations = {
|
||||||
endpoint?: string;
|
endpoint?: string;
|
||||||
|
|
@ -21,7 +21,6 @@ export default function useGenerationsByLatest({
|
||||||
EModelEndpoint.openAI,
|
EModelEndpoint.openAI,
|
||||||
EModelEndpoint.custom,
|
EModelEndpoint.custom,
|
||||||
EModelEndpoint.google,
|
EModelEndpoint.google,
|
||||||
EModelEndpoint.assistants,
|
|
||||||
EModelEndpoint.anthropic,
|
EModelEndpoint.anthropic,
|
||||||
EModelEndpoint.gptPlugins,
|
EModelEndpoint.gptPlugins,
|
||||||
EModelEndpoint.azureOpenAI,
|
EModelEndpoint.azureOpenAI,
|
||||||
|
|
@ -58,12 +57,13 @@ export default function useGenerationsByLatest({
|
||||||
!branchingSupported ||
|
!branchingSupported ||
|
||||||
(!isEditableEndpoint && !isCreatedByUser);
|
(!isEditableEndpoint && !isCreatedByUser);
|
||||||
|
|
||||||
const forkingSupported = endpoint !== EModelEndpoint.assistants && !searchResult;
|
const forkingSupported = !isAssistantsEndpoint(endpoint) && !searchResult;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
forkingSupported,
|
forkingSupported,
|
||||||
continueSupported,
|
continueSupported,
|
||||||
regenerateEnabled,
|
regenerateEnabled,
|
||||||
|
isEditableEndpoint,
|
||||||
hideEditButton,
|
hideEditButton,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,12 +4,7 @@ import {
|
||||||
useGetStartupConfig,
|
useGetStartupConfig,
|
||||||
useGetEndpointsQuery,
|
useGetEndpointsQuery,
|
||||||
} from 'librechat-data-provider/react-query';
|
} from 'librechat-data-provider/react-query';
|
||||||
import {
|
import { FileSources, LocalStorageKeys, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||||
FileSources,
|
|
||||||
EModelEndpoint,
|
|
||||||
LocalStorageKeys,
|
|
||||||
defaultOrderQuery,
|
|
||||||
} from 'librechat-data-provider';
|
|
||||||
import {
|
import {
|
||||||
useRecoilState,
|
useRecoilState,
|
||||||
useRecoilValue,
|
useRecoilValue,
|
||||||
|
|
@ -24,6 +19,7 @@ import type {
|
||||||
TConversation,
|
TConversation,
|
||||||
TEndpointsConfig,
|
TEndpointsConfig,
|
||||||
} from 'librechat-data-provider';
|
} from 'librechat-data-provider';
|
||||||
|
import type { AssistantListItem } from '~/common';
|
||||||
import {
|
import {
|
||||||
getEndpointField,
|
getEndpointField,
|
||||||
buildDefaultConvo,
|
buildDefaultConvo,
|
||||||
|
|
@ -32,7 +28,8 @@ import {
|
||||||
getModelSpecIconURL,
|
getModelSpecIconURL,
|
||||||
updateLastSelectedModel,
|
updateLastSelectedModel,
|
||||||
} from '~/utils';
|
} from '~/utils';
|
||||||
import { useDeleteFilesMutation, useListAssistantsQuery } from '~/data-provider';
|
import useAssistantListMap from './Assistants/useAssistantListMap';
|
||||||
|
import { useDeleteFilesMutation } from '~/data-provider';
|
||||||
import useOriginNavigate from './useOriginNavigate';
|
import useOriginNavigate from './useOriginNavigate';
|
||||||
import { mainTextareaId } from '~/common';
|
import { mainTextareaId } from '~/common';
|
||||||
import store from '~/store';
|
import store from '~/store';
|
||||||
|
|
@ -48,11 +45,7 @@ const useNewConvo = (index = 0) => {
|
||||||
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
|
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
|
||||||
const modelsQuery = useGetModelsQuery();
|
const modelsQuery = useGetModelsQuery();
|
||||||
const timeoutIdRef = useRef<NodeJS.Timeout>();
|
const timeoutIdRef = useRef<NodeJS.Timeout>();
|
||||||
|
const assistantsListMap = useAssistantListMap();
|
||||||
const { data: assistants = [] } = useListAssistantsQuery(defaultOrderQuery, {
|
|
||||||
select: (res) =>
|
|
||||||
res.data.map(({ id, name, metadata, model }) => ({ id, name, metadata, model })),
|
|
||||||
});
|
|
||||||
|
|
||||||
const { mutateAsync } = useDeleteFilesMutation({
|
const { mutateAsync } = useDeleteFilesMutation({
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
|
|
@ -100,12 +93,21 @@ const useNewConvo = (index = 0) => {
|
||||||
conversation.endpointType = undefined;
|
conversation.endpointType = undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
const isAssistantEndpoint = defaultEndpoint === EModelEndpoint.assistants;
|
const isAssistantEndpoint = isAssistantsEndpoint(defaultEndpoint);
|
||||||
|
const assistants: AssistantListItem[] = assistantsListMap[defaultEndpoint] ?? [];
|
||||||
|
|
||||||
|
if (
|
||||||
|
conversation.assistant_id &&
|
||||||
|
!assistantsListMap[defaultEndpoint]?.[conversation.assistant_id]
|
||||||
|
) {
|
||||||
|
conversation.assistant_id = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
if (!conversation.assistant_id && isAssistantEndpoint) {
|
if (!conversation.assistant_id && isAssistantEndpoint) {
|
||||||
conversation.assistant_id =
|
conversation.assistant_id =
|
||||||
localStorage.getItem(`${LocalStorageKeys.ASST_ID_PREFIX}${index}`) ??
|
localStorage.getItem(
|
||||||
assistants[0]?.id;
|
`${LocalStorageKeys.ASST_ID_PREFIX}${index}${defaultEndpoint}`,
|
||||||
|
) ?? assistants[0]?.id;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
|
@ -116,7 +118,7 @@ const useNewConvo = (index = 0) => {
|
||||||
const assistant = assistants.find((asst) => asst.id === conversation.assistant_id);
|
const assistant = assistants.find((asst) => asst.id === conversation.assistant_id);
|
||||||
conversation.model = assistant?.model;
|
conversation.model = assistant?.model;
|
||||||
updateLastSelectedModel({
|
updateLastSelectedModel({
|
||||||
endpoint: EModelEndpoint.assistants,
|
endpoint: defaultEndpoint,
|
||||||
model: conversation.model,
|
model: conversation.model,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -156,7 +158,7 @@ const useNewConvo = (index = 0) => {
|
||||||
}
|
}
|
||||||
}, 150);
|
}, 150);
|
||||||
},
|
},
|
||||||
[endpointsConfig, defaultPreset, assistants, modelsQuery.data],
|
[endpointsConfig, defaultPreset, assistantsListMap, modelsQuery.data],
|
||||||
);
|
);
|
||||||
|
|
||||||
const newConversation = useCallback(
|
const newConversation = useCallback(
|
||||||
|
|
|
||||||
|
|
@ -196,6 +196,8 @@ export default {
|
||||||
com_ui_result: 'Result',
|
com_ui_result: 'Result',
|
||||||
com_ui_image_gen: 'Image Gen',
|
com_ui_image_gen: 'Image Gen',
|
||||||
com_ui_assistant: 'Assistant',
|
com_ui_assistant: 'Assistant',
|
||||||
|
com_ui_assistant_deleted: 'Successfully deleted assistant',
|
||||||
|
com_ui_assistant_delete_error: 'There was an error deleting the assistant',
|
||||||
com_ui_assistants: 'Assistants',
|
com_ui_assistants: 'Assistants',
|
||||||
com_ui_attachment: 'Attachment',
|
com_ui_attachment: 'Attachment',
|
||||||
com_ui_assistants_output: 'Assistants Output',
|
com_ui_assistants_output: 'Assistants Output',
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,15 @@
|
||||||
import { useEffect, useRef } from 'react';
|
import { useEffect, useRef } from 'react';
|
||||||
import { useParams } from 'react-router-dom';
|
import { useParams } from 'react-router-dom';
|
||||||
import { defaultOrderQuery } from 'librechat-data-provider';
|
import { EModelEndpoint } from 'librechat-data-provider';
|
||||||
import {
|
import {
|
||||||
useGetModelsQuery,
|
useGetModelsQuery,
|
||||||
useGetStartupConfig,
|
useGetStartupConfig,
|
||||||
useGetEndpointsQuery,
|
useGetEndpointsQuery,
|
||||||
} from 'librechat-data-provider/react-query';
|
} from 'librechat-data-provider/react-query';
|
||||||
import type { TPreset } from 'librechat-data-provider';
|
import type { TPreset } from 'librechat-data-provider';
|
||||||
import { useGetConvoIdQuery, useListAssistantsQuery } from '~/data-provider';
|
import { useNewConvo, useAppStartup, useAssistantListMap } from '~/hooks';
|
||||||
import { getDefaultModelSpec, getModelSpecIconURL } from '~/utils';
|
import { getDefaultModelSpec, getModelSpecIconURL } from '~/utils';
|
||||||
import { useNewConvo, useAppStartup } from '~/hooks';
|
import { useGetConvoIdQuery } from '~/data-provider';
|
||||||
import ChatView from '~/components/Chat/ChatView';
|
import ChatView from '~/components/Chat/ChatView';
|
||||||
import useAuthRedirect from './useAuthRedirect';
|
import useAuthRedirect from './useAuthRedirect';
|
||||||
import { Spinner } from '~/components/svg';
|
import { Spinner } from '~/components/svg';
|
||||||
|
|
@ -35,10 +35,7 @@ export default function ChatRoute() {
|
||||||
enabled: isAuthenticated && conversationId !== 'new',
|
enabled: isAuthenticated && conversationId !== 'new',
|
||||||
});
|
});
|
||||||
const endpointsQuery = useGetEndpointsQuery({ enabled: isAuthenticated });
|
const endpointsQuery = useGetEndpointsQuery({ enabled: isAuthenticated });
|
||||||
const { data: assistants = null } = useListAssistantsQuery(defaultOrderQuery, {
|
const assistantListMap = useAssistantListMap();
|
||||||
select: (res) =>
|
|
||||||
res.data.map(({ id, name, metadata, model }) => ({ id, name, metadata, model })),
|
|
||||||
});
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (
|
if (
|
||||||
|
|
@ -87,7 +84,8 @@ export default function ChatRoute() {
|
||||||
!hasSetConversation.current &&
|
!hasSetConversation.current &&
|
||||||
!modelsQuery.data?.initial &&
|
!modelsQuery.data?.initial &&
|
||||||
conversationId === 'new' &&
|
conversationId === 'new' &&
|
||||||
assistants
|
assistantListMap[EModelEndpoint.assistants] &&
|
||||||
|
assistantListMap[EModelEndpoint.azureAssistants]
|
||||||
) {
|
) {
|
||||||
const spec = getDefaultModelSpec(startupConfig.modelSpecs?.list);
|
const spec = getDefaultModelSpec(startupConfig.modelSpecs?.list);
|
||||||
newConversation({
|
newConversation({
|
||||||
|
|
@ -108,7 +106,8 @@ export default function ChatRoute() {
|
||||||
startupConfig &&
|
startupConfig &&
|
||||||
!hasSetConversation.current &&
|
!hasSetConversation.current &&
|
||||||
!modelsQuery.data?.initial &&
|
!modelsQuery.data?.initial &&
|
||||||
assistants
|
assistantListMap[EModelEndpoint.assistants] &&
|
||||||
|
assistantListMap[EModelEndpoint.azureAssistants]
|
||||||
) {
|
) {
|
||||||
newConversation({
|
newConversation({
|
||||||
template: initialConvoQuery.data,
|
template: initialConvoQuery.data,
|
||||||
|
|
@ -120,7 +119,13 @@ export default function ChatRoute() {
|
||||||
}
|
}
|
||||||
/* Creates infinite render if all dependencies included due to newConversation invocations exceeding call stack before hasSetConversation.current becomes truthy */
|
/* Creates infinite render if all dependencies included due to newConversation invocations exceeding call stack before hasSetConversation.current becomes truthy */
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [startupConfig, initialConvoQuery.data, endpointsQuery.data, modelsQuery.data, assistants]);
|
}, [
|
||||||
|
startupConfig,
|
||||||
|
initialConvoQuery.data,
|
||||||
|
endpointsQuery.data,
|
||||||
|
modelsQuery.data,
|
||||||
|
assistantListMap,
|
||||||
|
]);
|
||||||
|
|
||||||
if (endpointsQuery.isLoading || modelsQuery.isLoading) {
|
if (endpointsQuery.isLoading || modelsQuery.isLoading) {
|
||||||
return <Spinner className="m-auto text-black dark:text-white" />;
|
return <Spinner className="m-auto text-black dark:text-white" />;
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import type { TEndpointsConfig } from 'librechat-data-provider';
|
||||||
|
|
||||||
const defaultConfig: TEndpointsConfig = {
|
const defaultConfig: TEndpointsConfig = {
|
||||||
[EModelEndpoint.azureOpenAI]: null,
|
[EModelEndpoint.azureOpenAI]: null,
|
||||||
|
[EModelEndpoint.azureAssistants]: null,
|
||||||
[EModelEndpoint.assistants]: null,
|
[EModelEndpoint.assistants]: null,
|
||||||
[EModelEndpoint.openAI]: null,
|
[EModelEndpoint.openAI]: null,
|
||||||
[EModelEndpoint.bingAI]: null,
|
[EModelEndpoint.bingAI]: null,
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue