LibreChat/api/server/middleware/concurrentLimiter.js
Danny Avila 5145121eb7
feat(api): initial Redis support; fix(SearchBar): proper debounce (#1039)
* refactor: use keyv for search caching with 1 min expirations

* feat: keyvRedis; chore: bump keyv, bun.lockb, add jsconfig for vscode file resolution

* feat: api/search redis support

* refactor(redis) use ioredis cluster for keyv
fix(OpenID): when redis is configured, use redis memory store for express-session

* fix: revert using uri for keyvredis

* fix(SearchBar): properly debounce search queries, fix weird render behaviors

* refactor: add authentication to search endpoint and show error messages in results

* feat: redis support for violation logs

* fix(logViolation): ensure a number is always being stored in cache

* feat(concurrentLimiter): uses clearPendingReq, clears pendingReq on abort, redis support

* fix(api/search/enable): query only when authenticated

* feat(ModelService): redis support

* feat(checkBan): redis support

* refactor(api/search): consolidate keyv logic

* fix(ci): add default empty value for REDIS_URI

* refactor(keyvRedis): use condition to initialize keyvRedis assignment

* refactor(connectDb): handle disconnected state (should create a new conn)

* fix(ci/e2e): handle case where cleanUp did not successfully run

* fix(getDefaultEndpoint): return endpoint from localStorage if defined and endpointsConfig is default

* ci(e2e): remove afterAll messages as startup/cleanUp will clear messages

* ci(e2e): remove teardown for CI until further notice

* chore: bump playwright/test

* ci(e2e): reinstate teardown as CI issue is specific to github env

* fix(ci): click settings menu trigger by testid
2023-10-11 17:05:47 -04:00

75 lines
2.2 KiB
JavaScript

const clearPendingReq = require('../../cache/clearPendingReq');
const { logViolation, getLogStores } = require('../../cache');
const denyRequest = require('./denyRequest');
const {
USE_REDIS,
CONCURRENT_MESSAGE_MAX = 1,
CONCURRENT_VIOLATION_SCORE: score,
} = process.env ?? {};
const ttl = 1000 * 60 * 1;
/**
* Middleware to limit concurrent requests for a user.
*
* This middleware checks if a user has exceeded a specified concurrent request limit.
* If the user exceeds the limit, an error is returned. If the user is within the limit,
* their request count is incremented. After the request is processed, the count is decremented.
* If the `cache` store is not available, the middleware will skip its logic.
*
* @function
* @param {Object} req - Express request object containing user information.
* @param {Object} res - Express response object.
* @param {function} next - Express next middleware function.
* @throws {Error} Throws an error if the user exceeds the concurrent request limit.
*/
const concurrentLimiter = async (req, res, next) => {
const namespace = 'pending_req';
const cache = getLogStores(namespace);
if (!cache) {
return next();
}
if (Object.keys(req?.body ?? {}).length === 1 && req?.body?.abortKey) {
return next();
}
const userId = req.user?.id ?? req.user?._id ?? '';
const limit = Math.max(CONCURRENT_MESSAGE_MAX, 1);
const type = 'concurrent';
const key = `${USE_REDIS ? namespace : ''}:${userId}`;
const pendingRequests = +((await cache.get(key)) ?? 0);
if (pendingRequests >= limit) {
const errorMessage = {
type,
limit,
pendingRequests,
};
await logViolation(req, res, type, errorMessage, score);
return await denyRequest(req, res, errorMessage);
} else {
await cache.set(key, pendingRequests + 1, ttl);
}
// Ensure the requests are removed from the store once the request is done
let cleared = false;
const cleanUp = async () => {
if (cleared) {
return;
}
cleared = true;
await clearPendingReq({ userId, cache });
};
if (pendingRequests < limit) {
res.on('finish', cleanUp);
res.on('close', cleanUp);
}
next();
};
module.exports = concurrentLimiter;