🤖 feat: Private Assistants (#2881)

* feat: add configuration for user private assistants

* filter private assistant message requests

* add test for privateAssistants

* add privateAssistants configuration to tests

* fix: destructuring error when assistants config is not added

* chore: revert chat controller changes

* chore: add payload type, add metadata types

* feat: validateAssistant

* refactor(fetchAssistants): allow for flexibility

* feat: validateAuthor

* refactor: return all assistants to ADMIN role

* feat: add assistant doc on assistant creation

* refactor(listAssistants): use `listAllAssistants` to exhaustively fetch all assistants

* chore: add suggestion to tts error

* refactor(validateAuthor): attempt database check first

* refactor: author validation when patching/deleting assistant

---------

Co-authored-by: Leon Juenemann <leon.juenemann@maibornwolff.de>
This commit is contained in:
Danny Avila 2024-05-28 08:27:45 -04:00 committed by GitHub
parent 9f2538fcd9
commit 5dc5d875ba
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 308 additions and 109 deletions

View file

@ -20,6 +20,7 @@ const {
} = require('~/server/services/Threads');
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
const { formatMessage, createVisionPrompt } = require('~/app/clients/prompts');
const { createRun, StreamRunManager } = require('~/server/services/Runs');
const { addTitle } = require('~/server/services/Endpoints/assistants');
@ -31,15 +32,14 @@ const { getModelMaxTokens } = require('~/utils');
const { getOpenAIClient } = require('./helpers');
const { logger } = require('~/config');
const { handleAbortError } = require('~/server/middleware');
const ten_minutes = 1000 * 60 * 10;
/**
* @route POST /
* @desc Chat with an assistant
* @access Public
* @param {Express.Request} req - The request object, containing the request data.
* @param {object} req - The request object, containing the request data.
* @param {object} req.body - The request payload.
* @param {Express.Response} res - The response object, used to send back a response.
* @returns {void}
*/
@ -60,30 +60,6 @@ const chatV1 = async (req, res) => {
parentMessageId: _parentId = Constants.NO_PARENT,
} = req.body;
/** @type {Partial<TAssistantEndpoint>} */
const assistantsConfig = req.app.locals?.[endpoint];
if (assistantsConfig) {
const { supportedIds, excludedIds } = assistantsConfig;
const error = { message: 'Assistant not supported' };
if (supportedIds?.length && !supportedIds.includes(assistant_id)) {
return await handleAbortError(res, req, error, {
sender: 'System',
conversationId: convoId,
messageId: v4(),
parentMessageId: _messageId,
error,
});
} else if (excludedIds?.length && excludedIds.includes(assistant_id)) {
return await handleAbortError(res, req, error, {
sender: 'System',
conversationId: convoId,
messageId: v4(),
parentMessageId: _messageId,
});
}
}
/** @type {OpenAIClient} */
let openai;
/** @type {string|undefined} - the current thread id */
@ -311,6 +287,7 @@ const chatV1 = async (req, res) => {
});
openai = _openai;
await validateAuthor({ req, openai });
if (previousMessages.length) {
parentMessageId = previousMessages[previousMessages.length - 1].messageId;

View file

@ -20,6 +20,7 @@ const {
} = require('~/server/services/Threads');
const { sendResponse, sendMessage, sleep, isEnabled, countTokens } = require('~/server/utils');
const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService');
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
const { createRun, StreamRunManager } = require('~/server/services/Runs');
const { addTitle } = require('~/server/services/Endpoints/assistants');
const { getTransactions } = require('~/models/Transaction');
@ -30,8 +31,6 @@ const { getModelMaxTokens } = require('~/utils');
const { getOpenAIClient } = require('./helpers');
const { logger } = require('~/config');
const { handleAbortError } = require('~/server/middleware');
const ten_minutes = 1000 * 60 * 10;
/**
@ -60,30 +59,6 @@ const chatV2 = async (req, res) => {
parentMessageId: _parentId = Constants.NO_PARENT,
} = req.body;
/** @type {Partial<TAssistantEndpoint>} */
const assistantsConfig = req.app.locals?.[endpoint];
if (assistantsConfig) {
const { supportedIds, excludedIds } = assistantsConfig;
const error = { message: 'Assistant not supported' };
if (supportedIds?.length && !supportedIds.includes(assistant_id)) {
return await handleAbortError(res, req, error, {
sender: 'System',
conversationId: convoId,
messageId: v4(),
parentMessageId: _messageId,
error,
});
} else if (excludedIds?.length && excludedIds.includes(assistant_id)) {
return await handleAbortError(res, req, error, {
sender: 'System',
conversationId: convoId,
messageId: v4(),
parentMessageId: _messageId,
});
}
}
/** @type {OpenAIClient} */
let openai;
/** @type {string|undefined} - the current thread id */
@ -309,6 +284,7 @@ const chatV2 = async (req, res) => {
});
openai = _openai;
await validateAuthor({ req, openai });
if (previousMessages.length) {
parentMessageId = previousMessages[previousMessages.length - 1].messageId;

View file

@ -1,4 +1,9 @@
const { EModelEndpoint, CacheKeys, defaultAssistantsVersion } = require('librechat-data-provider');
const {
EModelEndpoint,
CacheKeys,
defaultAssistantsVersion,
defaultOrderQuery,
} = require('librechat-data-provider');
const {
initializeClient: initAzureClient,
} = require('~/server/services/Endpoints/azureAssistants');
@ -35,6 +40,7 @@ const getCurrentVersion = async (req, endpoint) => {
* Initializes the client with the current request and response objects and lists assistants
* according to the query parameters. This function abstracts the logic for non-Azure paths.
*
* @deprecated
* @async
* @param {object} params - The parameters object.
* @param {object} params.req - The request object, used for initializing the client.
@ -43,11 +49,65 @@ const getCurrentVersion = async (req, endpoint) => {
* @param {object} params.query - The query parameters to list assistants (e.g., limit, order).
* @returns {Promise<object>} A promise that resolves to the response from the `openai.beta.assistants.list` method call.
*/
const listAssistants = async ({ req, res, version, query }) => {
const _listAssistants = async ({ req, res, version, query }) => {
const { openai } = await getOpenAIClient({ req, res, version });
return openai.beta.assistants.list(query);
};
/**
* Fetches all assistants based on provided query params, until `has_more` is `false`.
*
* @async
* @param {object} params - The parameters object.
* @param {object} params.req - The request object, used for initializing the client.
* @param {object} params.res - The response object, used for initializing the client.
* @param {string} params.version - The API version to use.
* @param {Omit<AssistantListParams, 'endpoint'>} params.query - The query parameters to list assistants (e.g., limit, order).
* @returns {Promise<object>} A promise that resolves to the response from the `openai.beta.assistants.list` method call.
*/
const listAllAssistants = async ({ req, res, version, query }) => {
/** @type {{ openai: OpenAIClient }} */
const { openai } = await getOpenAIClient({ req, res, version });
const allAssistants = [];
let first_id;
let last_id;
let afterToken = query.after;
let hasMore = true;
while (hasMore) {
const response = await openai.beta.assistants.list({
...query,
after: afterToken,
});
const { body } = response;
allAssistants.push(...body.data);
hasMore = body.has_more;
if (!first_id) {
first_id = body.first_id;
}
if (hasMore) {
afterToken = body.last_id;
} else {
last_id = body.last_id;
}
}
return {
data: allAssistants,
body: {
data: allAssistants,
has_more: false,
first_id,
last_id,
},
};
};
/**
* Asynchronously lists assistants for Azure configured groups.
*
@ -82,7 +142,7 @@ const listAssistantsForAzure = async ({ req, res, version, azureConfig = {}, que
/* The specified model is only necessary to
fetch assistants for the shared instance */
req.body.model = currentModelTuples[0][0];
promises.push(listAssistants({ req, res, version, query }));
promises.push(listAllAssistants({ req, res, version, query }));
}
const resolvedQueries = await Promise.all(promises);
@ -133,8 +193,27 @@ async function getOpenAIClient({ req, res, endpointOption, initAppClient, overri
return result;
}
const fetchAssistants = async (req, res) => {
const { limit = 100, order = 'desc', after, before, endpoint } = req.query;
/**
* Returns a list of assistants.
* @param {object} params
* @param {object} params.req - Express Request
* @param {AssistantListParams} [params.req.query] - The assistant list parameters for pagination and sorting.
* @param {object} params.res - Express Response
* @param {string} [params.overrideEndpoint] - The endpoint to override the request endpoint.
* @returns {Promise<AssistantListResponse>} 200 - success response - application/json
*/
const fetchAssistants = async ({ req, res, overrideEndpoint }) => {
const {
limit = 100,
order = 'desc',
after,
before,
endpoint,
} = req.query ?? {
endpoint: overrideEndpoint,
...defaultOrderQuery,
};
const version = await getCurrentVersion(req, endpoint);
const query = { limit, order, after, before };
@ -142,15 +221,47 @@ const fetchAssistants = async (req, res) => {
let body;
if (endpoint === EModelEndpoint.assistants) {
({ body } = await listAssistants({ req, res, version, query }));
({ body } = await listAllAssistants({ req, res, version, query }));
} else if (endpoint === EModelEndpoint.azureAssistants) {
const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI];
body = await listAssistantsForAzure({ req, res, version, azureConfig, query });
}
if (req.user.role === 'ADMIN') {
return body;
} else if (!req.app.locals[endpoint]) {
return body;
}
body.data = filterAssistants({
userId: req.user.id,
assistants: body.data,
assistantsConfig: req.app.locals[endpoint],
});
return body;
};
/**
* Filter assistants based on configuration.
*
* @param {object} params - The parameters object.
* @param {string} params.userId - The user ID to filter private assistants.
* @param {Assistant[]} params.assistants - The list of assistants to filter.
* @param {Partial<TAssistantEndpoint>} params.assistantsConfig - The assistant configuration.
* @returns {Assistant[]} - The filtered list of assistants.
*/
function filterAssistants({ assistants, userId, assistantsConfig }) {
const { supportedIds, excludedIds, privateAssistants } = assistantsConfig;
if (privateAssistants) {
return assistants.filter((assistant) => userId === assistant.metadata?.author);
} else if (supportedIds?.length) {
return assistants.filter((assistant) => supportedIds.includes(assistant.id));
} else if (excludedIds?.length) {
return assistants.filter((assistant) => !excludedIds.includes(assistant.id));
}
return assistants;
}
module.exports = {
getOpenAIClient,
fetchAssistants,

View file

@ -1,8 +1,9 @@
const { FileContext } = require('librechat-data-provider');
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { deleteAssistantActions } = require('~/server/services/ActionService');
const { updateAssistantDoc, getAssistants } = require('~/models/Assistant');
const { uploadImageBuffer } = require('~/server/services/Files/process');
const { updateAssistant, getAssistants } = require('~/models/Assistant');
const { getOpenAIClient, fetchAssistants } = require('./helpers');
const { deleteFileByFilter } = require('~/models/File');
const { logger } = require('~/config');
@ -40,9 +41,11 @@ const createAssistant = async (req, res) => {
};
const assistant = await openai.beta.assistants.create(assistantData);
const promise = updateAssistantDoc({ assistant_id: assistant.id }, { user: req.user.id });
if (azureModelIdentifier) {
assistant.model = azureModelIdentifier;
}
await promise;
logger.debug('/assistants/', assistant);
res.status(201).json(assistant);
} catch (error) {
@ -61,7 +64,6 @@ const retrieveAssistant = async (req, res) => {
try {
/* NOTE: not actually being used right now */
const { openai } = await getOpenAIClient({ req, res });
const assistant_id = req.params.id;
const assistant = await openai.beta.assistants.retrieve(assistant_id);
res.json(assistant);
@ -83,6 +85,7 @@ const retrieveAssistant = async (req, res) => {
const patchAssistant = async (req, res) => {
try {
const { openai } = await getOpenAIClient({ req, res });
await validateAuthor({ req, openai });
const assistant_id = req.params.id;
const { endpoint: _e, ...updateData } = req.body;
@ -119,6 +122,7 @@ const patchAssistant = async (req, res) => {
const deleteAssistant = async (req, res) => {
try {
const { openai } = await getOpenAIClient({ req, res });
await validateAuthor({ req, openai });
const assistant_id = req.params.id;
const deletionStatus = await openai.beta.assistants.del(assistant_id);
@ -141,19 +145,7 @@ const deleteAssistant = async (req, res) => {
*/
const listAssistants = async (req, res) => {
try {
const body = await fetchAssistants(req, res);
if (req.app.locals?.[req.query.endpoint]) {
/** @type {Partial<TAssistantEndpoint>} */
const assistantsConfig = req.app.locals[req.query.endpoint];
const { supportedIds, excludedIds } = assistantsConfig;
if (supportedIds?.length) {
body.data = body.data.filter((assistant) => supportedIds.includes(assistant.id));
} else if (excludedIds?.length) {
body.data = body.data.filter((assistant) => !excludedIds.includes(assistant.id));
}
}
const body = await fetchAssistants({ req, res });
res.json(body);
} catch (error) {
logger.error('[/assistants] Error listing assistants', error);
@ -195,6 +187,7 @@ const uploadAssistantAvatar = async (req, res) => {
let { metadata: _metadata = '{}' } = req.body;
const { openai } = await getOpenAIClient({ req, res });
await validateAuthor({ req, openai });
const image = await uploadImageBuffer({
req,
@ -229,7 +222,7 @@ const uploadAssistantAvatar = async (req, res) => {
const promises = [];
promises.push(
updateAssistant(
updateAssistantDoc(
{ assistant_id },
{
avatar: {

View file

@ -1,5 +1,7 @@
const { ToolCallTypes } = require('librechat-data-provider');
const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
const { validateAndUpdateTool } = require('~/server/services/ActionService');
const { updateAssistantDoc } = require('~/models/Assistant');
const { getOpenAIClient } = require('./helpers');
const { logger } = require('~/config');
@ -37,9 +39,11 @@ const createAssistant = async (req, res) => {
};
const assistant = await openai.beta.assistants.create(assistantData);
const promise = updateAssistantDoc({ assistant_id: assistant.id }, { user: req.user.id });
if (azureModelIdentifier) {
assistant.model = azureModelIdentifier;
}
await promise;
logger.debug('/assistants/', assistant);
res.status(201).json(assistant);
} catch (error) {
@ -58,6 +62,7 @@ const createAssistant = async (req, res) => {
* @returns {Promise<Assistant>} The updated assistant.
*/
const updateAssistant = async ({ req, openai, assistant_id, updateData }) => {
await validateAuthor({ req, openai });
const tools = [];
let hasFileSearch = false;

View file

@ -0,0 +1,43 @@
const { v4 } = require('uuid');
const { handleAbortError } = require('~/server/middleware/abortMiddleware');
/**
* Checks if the assistant is supported or excluded
* @param {object} req - Express Request
* @param {object} req.body - The request payload.
* @param {object} res - Express Response
* @param {function} next - Express next middleware function.
* @returns {Promise<void>}
*/
const validateAssistant = async (req, res, next) => {
const { endpoint, conversationId, assistant_id, messageId } = req.body;
/** @type {Partial<TAssistantEndpoint>} */
const assistantsConfig = req.app.locals?.[endpoint];
if (!assistantsConfig) {
return next();
}
const { supportedIds, excludedIds } = assistantsConfig;
const error = { message: 'Assistant not supported' };
if (supportedIds?.length && !supportedIds.includes(assistant_id)) {
return await handleAbortError(res, req, error, {
sender: 'System',
conversationId,
messageId: v4(),
parentMessageId: messageId,
error,
});
} else if (excludedIds?.length && excludedIds.includes(assistant_id)) {
return await handleAbortError(res, req, error, {
sender: 'System',
conversationId,
messageId: v4(),
parentMessageId: messageId,
});
}
return next();
};
module.exports = validateAssistant;

View file

@ -0,0 +1,42 @@
const { getAssistant } = require('~/models/Assistant');
/**
* Checks if the assistant is supported or excluded
* @param {object} params
* @param {object} params.req - Express Request
* @param {object} params.req.body - The request payload.
* @param {string} params.overrideEndpoint - The override endpoint
* @param {string} params.overrideAssistantId - The override assistant ID
* @param {OpenAIClient} params.openai - OpenAI API Client
* @returns {Promise<void>}
*/
const validateAuthor = async ({ req, openai, overrideEndpoint, overrideAssistantId }) => {
if (req.user.role === 'ADMIN') {
return;
}
const endpoint = overrideEndpoint ?? req.body.endpoint ?? req.query.endpoint;
const assistant_id =
overrideAssistantId ?? req.params.id ?? req.body.assistant_id ?? req.query.assistant_id;
/** @type {Partial<TAssistantEndpoint>} */
const assistantsConfig = req.app.locals?.[endpoint];
if (!assistantsConfig) {
return;
}
if (!assistantsConfig.privateAssistants) {
return;
}
const assistantDoc = await getAssistant({ assistant_id, user: req.user.id });
if (assistantDoc) {
return;
}
const assistant = await openai.beta.assistants.retrieve(assistant_id);
if (req.user.id !== assistant?.metadata?.author) {
throw new Error(`Assistant ${assistant_id} is not authored by the user.`);
}
};
module.exports = validateAuthor;

View file

@ -4,7 +4,7 @@ const { encryptMetadata, domainParser } = require('~/server/services/ActionServi
const { actionDelimiter, EModelEndpoint } = require('librechat-data-provider');
const { getOpenAIClient } = require('~/server/controllers/assistants/helpers');
const { updateAction, getActions, deleteAction } = require('~/models/Action');
const { updateAssistant, getAssistant } = require('~/models/Assistant');
const { updateAssistantDoc, getAssistant } = require('~/models/Assistant');
const { logger } = require('~/config');
const router = express.Router();
@ -109,7 +109,7 @@ router.post('/:assistant_id', async (req, res) => {
let updatedAssistant = await openai.beta.assistants.update(assistant_id, { tools });
const promises = [];
promises.push(
updateAssistant(
updateAssistantDoc(
{ assistant_id },
{
actions,
@ -186,7 +186,7 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
const promises = [];
promises.push(
updateAssistant(
updateAssistantDoc(
{ assistant_id },
{
actions: updatedActions,

View file

@ -8,6 +8,7 @@ const {
// validateEndpoint,
buildEndpointOption,
} = require('~/server/middleware');
const validateAssistant = require('~/server/middleware/assistants/validate');
const chatController = require('~/server/controllers/assistants/chatV1');
router.post('/abort', handleAbort());
@ -20,6 +21,6 @@ router.post('/abort', handleAbort());
* @param {express.Response} res - The response object, used to send back a response.
* @returns {void}
*/
router.post('/', validateModel, buildEndpointOption, setHeaders, chatController);
router.post('/', validateModel, buildEndpointOption, validateAssistant, setHeaders, chatController);
module.exports = router;

View file

@ -8,6 +8,7 @@ const {
// validateEndpoint,
buildEndpointOption,
} = require('~/server/middleware');
const validateAssistant = require('~/server/middleware/assistants/validate');
const chatController = require('~/server/controllers/assistants/chatV2');
router.post('/abort', handleAbort());
@ -20,6 +21,6 @@ router.post('/abort', handleAbort());
* @param {express.Response} res - The response object, used to send back a response.
* @returns {void}
*/
router.post('/', validateModel, buildEndpointOption, setHeaders, chatController);
router.post('/', validateModel, buildEndpointOption, validateAssistant, setHeaders, chatController);
module.exports = router;

View file

@ -218,6 +218,7 @@ describe('AppService', () => {
pollIntervalMs: 5000,
timeoutMs: 30000,
supportedIds: ['id1', 'id2'],
privateAssistants: false,
},
},
}),
@ -232,6 +233,7 @@ describe('AppService', () => {
pollIntervalMs: 5000,
timeoutMs: 30000,
supportedIds: expect.arrayContaining(['id1', 'id2']),
privateAssistants: false,
}),
);
});
@ -505,7 +507,31 @@ describe('AppService updating app.locals and issuing warnings', () => {
const { logger } = require('~/config');
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining('Both `supportedIds` and `excludedIds` are defined'),
expect.stringContaining(
'The \'assistants\' endpoint has both \'supportedIds\' and \'excludedIds\' defined.',
),
);
});
it('should log a warning when privateAssistants and supportedIds or excludedIds are provided', async () => {
const mockConfig = {
endpoints: {
assistants: {
privateAssistants: true,
supportedIds: ['id1'],
},
},
};
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve(mockConfig));
const app = { locals: {} };
await require('./AppService')(app);
const { logger } = require('~/config');
expect(logger.warn).toHaveBeenCalledWith(
expect.stringContaining(
'The \'assistants\' endpoint has both \'privateAssistants\' and \'supportedIds\' or \'excludedIds\' defined.',
),
);
});

View file

@ -300,7 +300,7 @@ async function textToSpeech(req, res) {
break;
}
} catch (innerError) {
logger.error('Error processing update:', chunk, innerError);
logger.error('Error processing manual update:', chunk, innerError);
if (!res.headersSent) {
res.status(500).end();
}
@ -312,7 +312,10 @@ async function textToSpeech(req, res) {
res.end();
}
} catch (error) {
logger.error('An error occurred while creating the audio stream:', error);
logger.error(
'Error creating the audio stream. Suggestion: check your provider quota. Error:',
error,
);
res.status(500).send('An error occurred');
}
}

View file

@ -29,7 +29,15 @@ function assistantsConfigSetup(config, assistantsEndpoint, prevConfig = {}) {
const parsedConfig = assistantEndpointSchema.parse(assistantsConfig);
if (assistantsConfig.supportedIds?.length && assistantsConfig.excludedIds?.length) {
logger.warn(
`Both \`supportedIds\` and \`excludedIds\` are defined for the ${assistantsEndpoint} endpoint; \`excludedIds\` field will be ignored.`,
`Configuration conflict: The '${assistantsEndpoint}' endpoint has both 'supportedIds' and 'excludedIds' defined. The 'excludedIds' will be ignored.`,
);
}
if (
assistantsConfig.privateAssistants &&
(assistantsConfig.supportedIds?.length || assistantsConfig.excludedIds?.length)
) {
logger.warn(
`Configuration conflict: The '${assistantsEndpoint}' endpoint has both 'privateAssistants' and 'supportedIds' or 'excludedIds' defined. The 'supportedIds' and 'excludedIds' will be ignored.`,
);
}
@ -41,6 +49,7 @@ function assistantsConfigSetup(config, assistantsEndpoint, prevConfig = {}) {
supportedIds: parsedConfig.supportedIds,
capabilities: parsedConfig.capabilities,
excludedIds: parsedConfig.excludedIds,
privateAssistants: parsedConfig.privateAssistants,
timeoutMs: parsedConfig.timeoutMs,
};
}