mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 08:20:14 +01:00
🔄 refactor: Migrate Cache Logic to TypeScript (#9771)
* Refactor: Moved Redis cache infra logic into `packages/api` - Moved cacheFactory and redisClients from `api/cache` into `packages/api/src/cache` so that features in `packages/api` can use cache without importing backward from the backend. - Converted all moved files into TS with proper typing. - Created integration tests to run against actual Redis servers for redisClients and cacheFactory. - Added a GitHub workflow to run integration tests for the cache feature. - Bug fix: keyvRedisClient now implements the PING feature properly. * chore: consolidate imports in getLogStores.js * chore: reorder imports * chore: re-add fs-extra as dev dep. * chore: reorder imports in cacheConfig.ts, cacheFactory.ts, and keyvMongo.ts --------- Co-authored-by: Danny Avila <danny@librechat.ai>
This commit is contained in:
parent
341435fb25
commit
0e5bb6f98c
38 changed files with 1552 additions and 1340 deletions
78
.github/workflows/cache-integration-tests.yml
vendored
Normal file
78
.github/workflows/cache-integration-tests.yml
vendored
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
name: Cache Integration Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- dev
|
||||
- release/*
|
||||
paths:
|
||||
- 'packages/api/src/cache/**'
|
||||
- 'redis-config/**'
|
||||
- '.github/workflows/cache-integration-tests.yml'
|
||||
|
||||
jobs:
|
||||
cache_integration_tests:
|
||||
name: Run Cache Integration Tests
|
||||
timeout-minutes: 30
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Use Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install Redis tools
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y redis-server redis-tools
|
||||
|
||||
- name: Start Single Redis Instance
|
||||
run: |
|
||||
redis-server --daemonize yes --port 6379
|
||||
sleep 2
|
||||
# Verify single Redis is running
|
||||
redis-cli -p 6379 ping || exit 1
|
||||
|
||||
- name: Start Redis Cluster
|
||||
working-directory: redis-config
|
||||
run: |
|
||||
chmod +x start-cluster.sh stop-cluster.sh
|
||||
./start-cluster.sh
|
||||
sleep 10
|
||||
# Verify cluster is running
|
||||
redis-cli -p 7001 cluster info || exit 1
|
||||
redis-cli -p 7002 cluster info || exit 1
|
||||
redis-cli -p 7003 cluster info || exit 1
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Build packages
|
||||
run: |
|
||||
npm run build:data-provider
|
||||
npm run build:data-schemas
|
||||
npm run build:api
|
||||
|
||||
- name: Run cache integration tests
|
||||
working-directory: packages/api
|
||||
env:
|
||||
NODE_ENV: test
|
||||
USE_REDIS: true
|
||||
REDIS_URI: redis://127.0.0.1:6379
|
||||
REDIS_CLUSTER_URI: redis://127.0.0.1:7001,redis://127.0.0.1:7002,redis://127.0.0.1:7003
|
||||
run: npm run test:cache:integration
|
||||
|
||||
- name: Stop Redis Cluster
|
||||
if: always()
|
||||
working-directory: redis-config
|
||||
run: ./stop-cluster.sh || true
|
||||
|
||||
- name: Stop Single Redis Instance
|
||||
if: always()
|
||||
run: redis-cli -p 6379 shutdown || true
|
||||
108
api/cache/cacheFactory.js
vendored
108
api/cache/cacheFactory.js
vendored
|
|
@ -1,108 +0,0 @@
|
|||
const KeyvRedis = require('@keyv/redis').default;
|
||||
const { Keyv } = require('keyv');
|
||||
const { RedisStore } = require('rate-limit-redis');
|
||||
const { Time } = require('librechat-data-provider');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { RedisStore: ConnectRedis } = require('connect-redis');
|
||||
const MemoryStore = require('memorystore')(require('express-session'));
|
||||
const { keyvRedisClient, ioredisClient, GLOBAL_PREFIX_SEPARATOR } = require('./redisClients');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { violationFile } = require('./keyvFiles');
|
||||
|
||||
/**
|
||||
* Creates a cache instance using Redis or a fallback store. Suitable for general caching needs.
|
||||
* @param {string} namespace - The cache namespace.
|
||||
* @param {number} [ttl] - Time to live for cache entries.
|
||||
* @param {object} [fallbackStore] - Optional fallback store if Redis is not used.
|
||||
* @returns {Keyv} Cache instance.
|
||||
*/
|
||||
const standardCache = (namespace, ttl = undefined, fallbackStore = undefined) => {
|
||||
if (
|
||||
cacheConfig.USE_REDIS &&
|
||||
!cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES?.includes(namespace)
|
||||
) {
|
||||
try {
|
||||
const keyvRedis = new KeyvRedis(keyvRedisClient);
|
||||
const cache = new Keyv(keyvRedis, { namespace, ttl });
|
||||
keyvRedis.namespace = cacheConfig.REDIS_KEY_PREFIX;
|
||||
keyvRedis.keyPrefixSeparator = GLOBAL_PREFIX_SEPARATOR;
|
||||
|
||||
cache.on('error', (err) => {
|
||||
logger.error(`Cache error in namespace ${namespace}:`, err);
|
||||
});
|
||||
|
||||
return cache;
|
||||
} catch (err) {
|
||||
logger.error(`Failed to create Redis cache for namespace ${namespace}:`, err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (fallbackStore) return new Keyv({ store: fallbackStore, namespace, ttl });
|
||||
return new Keyv({ namespace, ttl });
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a cache instance for storing violation data.
|
||||
* Uses a file-based fallback store if Redis is not enabled.
|
||||
* @param {string} namespace - The cache namespace for violations.
|
||||
* @param {number} [ttl] - Time to live for cache entries.
|
||||
* @returns {Keyv} Cache instance for violations.
|
||||
*/
|
||||
const violationCache = (namespace, ttl = undefined) => {
|
||||
return standardCache(`violations:${namespace}`, ttl, violationFile);
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a session cache instance using Redis or in-memory store.
|
||||
* @param {string} namespace - The session namespace.
|
||||
* @param {number} [ttl] - Time to live for session entries.
|
||||
* @returns {MemoryStore | ConnectRedis} Session store instance.
|
||||
*/
|
||||
const sessionCache = (namespace, ttl = undefined) => {
|
||||
namespace = namespace.endsWith(':') ? namespace : `${namespace}:`;
|
||||
if (!cacheConfig.USE_REDIS) return new MemoryStore({ ttl, checkPeriod: Time.ONE_DAY });
|
||||
const store = new ConnectRedis({ client: ioredisClient, ttl, prefix: namespace });
|
||||
if (ioredisClient) {
|
||||
ioredisClient.on('error', (err) => {
|
||||
logger.error(`Session store Redis error for namespace ${namespace}:`, err);
|
||||
});
|
||||
}
|
||||
return store;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a rate limiter cache using Redis.
|
||||
* @param {string} prefix - The key prefix for rate limiting.
|
||||
* @returns {RedisStore|undefined} RedisStore instance or undefined if Redis is not used.
|
||||
*/
|
||||
const limiterCache = (prefix) => {
|
||||
if (!prefix) throw new Error('prefix is required');
|
||||
if (!cacheConfig.USE_REDIS) return undefined;
|
||||
prefix = prefix.endsWith(':') ? prefix : `${prefix}:`;
|
||||
|
||||
try {
|
||||
if (!ioredisClient) {
|
||||
logger.warn(`Redis client not available for rate limiter with prefix ${prefix}`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return new RedisStore({ sendCommand, prefix });
|
||||
} catch (err) {
|
||||
logger.error(`Failed to create Redis rate limiter for prefix ${prefix}:`, err);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const sendCommand = (...args) => {
|
||||
if (!ioredisClient) {
|
||||
logger.warn('Redis client not available for command execution');
|
||||
return Promise.reject(new Error('Redis client not available'));
|
||||
}
|
||||
|
||||
return ioredisClient.call(...args).catch((err) => {
|
||||
logger.error('Redis command execution failed:', err);
|
||||
throw err;
|
||||
});
|
||||
};
|
||||
|
||||
module.exports = { standardCache, sessionCache, violationCache, limiterCache };
|
||||
432
api/cache/cacheFactory.spec.js
vendored
432
api/cache/cacheFactory.spec.js
vendored
|
|
@ -1,432 +0,0 @@
|
|||
const { Time } = require('librechat-data-provider');
|
||||
|
||||
// Mock dependencies first
|
||||
const mockKeyvRedis = {
|
||||
namespace: '',
|
||||
keyPrefixSeparator: '',
|
||||
};
|
||||
|
||||
const mockKeyv = jest.fn().mockReturnValue({
|
||||
mock: 'keyv',
|
||||
on: jest.fn(),
|
||||
});
|
||||
const mockConnectRedis = jest.fn().mockReturnValue({ mock: 'connectRedis' });
|
||||
const mockMemoryStore = jest.fn().mockReturnValue({ mock: 'memoryStore' });
|
||||
const mockRedisStore = jest.fn().mockReturnValue({ mock: 'redisStore' });
|
||||
|
||||
const mockIoredisClient = {
|
||||
call: jest.fn(),
|
||||
on: jest.fn(),
|
||||
};
|
||||
|
||||
const mockKeyvRedisClient = {};
|
||||
const mockViolationFile = {};
|
||||
|
||||
// Mock modules before requiring the main module
|
||||
jest.mock('@keyv/redis', () => ({
|
||||
default: jest.fn().mockImplementation(() => mockKeyvRedis),
|
||||
}));
|
||||
|
||||
jest.mock('keyv', () => ({
|
||||
Keyv: mockKeyv,
|
||||
}));
|
||||
|
||||
jest.mock('./cacheConfig', () => ({
|
||||
cacheConfig: {
|
||||
USE_REDIS: false,
|
||||
REDIS_KEY_PREFIX: 'test',
|
||||
FORCED_IN_MEMORY_CACHE_NAMESPACES: [],
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('./redisClients', () => ({
|
||||
keyvRedisClient: mockKeyvRedisClient,
|
||||
ioredisClient: mockIoredisClient,
|
||||
GLOBAL_PREFIX_SEPARATOR: '::',
|
||||
}));
|
||||
|
||||
jest.mock('./keyvFiles', () => ({
|
||||
violationFile: mockViolationFile,
|
||||
}));
|
||||
|
||||
jest.mock('connect-redis', () => ({ RedisStore: mockConnectRedis }));
|
||||
|
||||
jest.mock('memorystore', () => jest.fn(() => mockMemoryStore));
|
||||
|
||||
jest.mock('rate-limit-redis', () => ({
|
||||
RedisStore: mockRedisStore,
|
||||
}));
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
error: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
info: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Import after mocking
|
||||
const { standardCache, sessionCache, violationCache, limiterCache } = require('./cacheFactory');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
|
||||
describe('cacheFactory', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Reset cache config mock
|
||||
cacheConfig.USE_REDIS = false;
|
||||
cacheConfig.REDIS_KEY_PREFIX = 'test';
|
||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = [];
|
||||
});
|
||||
|
||||
describe('redisCache', () => {
|
||||
it('should create Redis cache when USE_REDIS is true', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'test-namespace';
|
||||
const ttl = 3600;
|
||||
|
||||
standardCache(namespace, ttl);
|
||||
|
||||
expect(require('@keyv/redis').default).toHaveBeenCalledWith(mockKeyvRedisClient);
|
||||
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl });
|
||||
expect(mockKeyvRedis.namespace).toBe(cacheConfig.REDIS_KEY_PREFIX);
|
||||
expect(mockKeyvRedis.keyPrefixSeparator).toBe('::');
|
||||
});
|
||||
|
||||
it('should create Redis cache with undefined ttl when not provided', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'test-namespace';
|
||||
|
||||
standardCache(namespace);
|
||||
|
||||
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl: undefined });
|
||||
});
|
||||
|
||||
it('should use fallback store when USE_REDIS is false and fallbackStore is provided', () => {
|
||||
cacheConfig.USE_REDIS = false;
|
||||
const namespace = 'test-namespace';
|
||||
const ttl = 3600;
|
||||
const fallbackStore = { some: 'store' };
|
||||
|
||||
standardCache(namespace, ttl, fallbackStore);
|
||||
|
||||
expect(mockKeyv).toHaveBeenCalledWith({ store: fallbackStore, namespace, ttl });
|
||||
});
|
||||
|
||||
it('should create default Keyv instance when USE_REDIS is false and no fallbackStore', () => {
|
||||
cacheConfig.USE_REDIS = false;
|
||||
const namespace = 'test-namespace';
|
||||
const ttl = 3600;
|
||||
|
||||
standardCache(namespace, ttl);
|
||||
|
||||
expect(mockKeyv).toHaveBeenCalledWith({ namespace, ttl });
|
||||
});
|
||||
|
||||
it('should handle namespace and ttl as undefined', () => {
|
||||
cacheConfig.USE_REDIS = false;
|
||||
|
||||
standardCache();
|
||||
|
||||
expect(mockKeyv).toHaveBeenCalledWith({ namespace: undefined, ttl: undefined });
|
||||
});
|
||||
|
||||
it('should use fallback when namespace is in FORCED_IN_MEMORY_CACHE_NAMESPACES', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = ['forced-memory'];
|
||||
const namespace = 'forced-memory';
|
||||
const ttl = 3600;
|
||||
|
||||
standardCache(namespace, ttl);
|
||||
|
||||
expect(require('@keyv/redis').default).not.toHaveBeenCalled();
|
||||
expect(mockKeyv).toHaveBeenCalledWith({ namespace, ttl });
|
||||
});
|
||||
|
||||
it('should use Redis when namespace is not in FORCED_IN_MEMORY_CACHE_NAMESPACES', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES = ['other-namespace'];
|
||||
const namespace = 'test-namespace';
|
||||
const ttl = 3600;
|
||||
|
||||
standardCache(namespace, ttl);
|
||||
|
||||
expect(require('@keyv/redis').default).toHaveBeenCalledWith(mockKeyvRedisClient);
|
||||
expect(mockKeyv).toHaveBeenCalledWith(mockKeyvRedis, { namespace, ttl });
|
||||
});
|
||||
|
||||
it('should throw error when Redis cache creation fails', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'test-namespace';
|
||||
const ttl = 3600;
|
||||
const testError = new Error('Redis connection failed');
|
||||
|
||||
const KeyvRedis = require('@keyv/redis').default;
|
||||
KeyvRedis.mockImplementationOnce(() => {
|
||||
throw testError;
|
||||
});
|
||||
|
||||
expect(() => standardCache(namespace, ttl)).toThrow('Redis connection failed');
|
||||
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
`Failed to create Redis cache for namespace ${namespace}:`,
|
||||
testError,
|
||||
);
|
||||
|
||||
expect(mockKeyv).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('violationCache', () => {
|
||||
it('should create violation cache with prefixed namespace', () => {
|
||||
const namespace = 'test-violations';
|
||||
const ttl = 7200;
|
||||
|
||||
// We can't easily mock the internal redisCache call since it's in the same module
|
||||
// But we can test that the function executes without throwing
|
||||
expect(() => violationCache(namespace, ttl)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should create violation cache with undefined ttl', () => {
|
||||
const namespace = 'test-violations';
|
||||
|
||||
violationCache(namespace);
|
||||
|
||||
// The function should call redisCache with violations: prefixed namespace
|
||||
// Since we can't easily mock the internal redisCache call, we test the behavior
|
||||
expect(() => violationCache(namespace)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should handle undefined namespace', () => {
|
||||
expect(() => violationCache(undefined)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('sessionCache', () => {
|
||||
it('should return MemoryStore when USE_REDIS is false', () => {
|
||||
cacheConfig.USE_REDIS = false;
|
||||
const namespace = 'sessions';
|
||||
const ttl = 86400;
|
||||
|
||||
const result = sessionCache(namespace, ttl);
|
||||
|
||||
expect(mockMemoryStore).toHaveBeenCalledWith({ ttl, checkPeriod: Time.ONE_DAY });
|
||||
expect(result).toBe(mockMemoryStore());
|
||||
});
|
||||
|
||||
it('should return ConnectRedis when USE_REDIS is true', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'sessions';
|
||||
const ttl = 86400;
|
||||
|
||||
const result = sessionCache(namespace, ttl);
|
||||
|
||||
expect(mockConnectRedis).toHaveBeenCalledWith({
|
||||
client: mockIoredisClient,
|
||||
ttl,
|
||||
prefix: `${namespace}:`,
|
||||
});
|
||||
expect(result).toBe(mockConnectRedis());
|
||||
});
|
||||
|
||||
it('should add colon to namespace if not present', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'sessions';
|
||||
|
||||
sessionCache(namespace);
|
||||
|
||||
expect(mockConnectRedis).toHaveBeenCalledWith({
|
||||
client: mockIoredisClient,
|
||||
ttl: undefined,
|
||||
prefix: 'sessions:',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not add colon to namespace if already present', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'sessions:';
|
||||
|
||||
sessionCache(namespace);
|
||||
|
||||
expect(mockConnectRedis).toHaveBeenCalledWith({
|
||||
client: mockIoredisClient,
|
||||
ttl: undefined,
|
||||
prefix: 'sessions:',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle undefined ttl', () => {
|
||||
cacheConfig.USE_REDIS = false;
|
||||
const namespace = 'sessions';
|
||||
|
||||
sessionCache(namespace);
|
||||
|
||||
expect(mockMemoryStore).toHaveBeenCalledWith({
|
||||
ttl: undefined,
|
||||
checkPeriod: Time.ONE_DAY,
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw error when ConnectRedis constructor fails', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'sessions';
|
||||
const ttl = 86400;
|
||||
|
||||
// Mock ConnectRedis to throw an error during construction
|
||||
const redisError = new Error('Redis connection failed');
|
||||
mockConnectRedis.mockImplementationOnce(() => {
|
||||
throw redisError;
|
||||
});
|
||||
|
||||
// The error should propagate up, not be caught
|
||||
expect(() => sessionCache(namespace, ttl)).toThrow('Redis connection failed');
|
||||
|
||||
// Verify that MemoryStore was NOT used as fallback
|
||||
expect(mockMemoryStore).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should register error handler but let errors propagate to Express', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'sessions';
|
||||
|
||||
// Create a mock session store with middleware methods
|
||||
const mockSessionStore = {
|
||||
get: jest.fn(),
|
||||
set: jest.fn(),
|
||||
destroy: jest.fn(),
|
||||
};
|
||||
mockConnectRedis.mockReturnValue(mockSessionStore);
|
||||
|
||||
const store = sessionCache(namespace);
|
||||
|
||||
// Verify error handler was registered
|
||||
expect(mockIoredisClient.on).toHaveBeenCalledWith('error', expect.any(Function));
|
||||
|
||||
// Get the error handler
|
||||
const errorHandler = mockIoredisClient.on.mock.calls.find((call) => call[0] === 'error')[1];
|
||||
|
||||
// Simulate an error from Redis during a session operation
|
||||
const redisError = new Error('Socket closed unexpectedly');
|
||||
|
||||
// The error handler should log but not swallow the error
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
errorHandler(redisError);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
`Session store Redis error for namespace ${namespace}::`,
|
||||
redisError,
|
||||
);
|
||||
|
||||
// Now simulate what happens when session middleware tries to use the store
|
||||
const callback = jest.fn();
|
||||
mockSessionStore.get.mockImplementation((sid, cb) => {
|
||||
cb(new Error('Redis connection lost'));
|
||||
});
|
||||
|
||||
// Call the store's get method (as Express session would)
|
||||
store.get('test-session-id', callback);
|
||||
|
||||
// The error should be passed to the callback, not swallowed
|
||||
expect(callback).toHaveBeenCalledWith(new Error('Redis connection lost'));
|
||||
});
|
||||
|
||||
it('should handle null ioredisClient gracefully', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const namespace = 'sessions';
|
||||
|
||||
// Temporarily set ioredisClient to null (simulating connection not established)
|
||||
const originalClient = require('./redisClients').ioredisClient;
|
||||
require('./redisClients').ioredisClient = null;
|
||||
|
||||
// ConnectRedis might accept null client but would fail on first use
|
||||
// The important thing is it doesn't throw uncaught exceptions during construction
|
||||
const store = sessionCache(namespace);
|
||||
expect(store).toBeDefined();
|
||||
|
||||
// Restore original client
|
||||
require('./redisClients').ioredisClient = originalClient;
|
||||
});
|
||||
});
|
||||
|
||||
describe('limiterCache', () => {
|
||||
it('should return undefined when USE_REDIS is false', () => {
|
||||
cacheConfig.USE_REDIS = false;
|
||||
const result = limiterCache('prefix');
|
||||
|
||||
expect(result).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return RedisStore when USE_REDIS is true', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
const result = limiterCache('rate-limit');
|
||||
|
||||
expect(mockRedisStore).toHaveBeenCalledWith({
|
||||
sendCommand: expect.any(Function),
|
||||
prefix: `rate-limit:`,
|
||||
});
|
||||
expect(result).toBe(mockRedisStore());
|
||||
});
|
||||
|
||||
it('should add colon to prefix if not present', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
limiterCache('rate-limit');
|
||||
|
||||
expect(mockRedisStore).toHaveBeenCalledWith({
|
||||
sendCommand: expect.any(Function),
|
||||
prefix: 'rate-limit:',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not add colon to prefix if already present', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
limiterCache('rate-limit:');
|
||||
|
||||
expect(mockRedisStore).toHaveBeenCalledWith({
|
||||
sendCommand: expect.any(Function),
|
||||
prefix: 'rate-limit:',
|
||||
});
|
||||
});
|
||||
|
||||
it('should pass sendCommand function that calls ioredisClient.call', async () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
mockIoredisClient.call.mockResolvedValue('test-value');
|
||||
|
||||
limiterCache('rate-limit');
|
||||
|
||||
const sendCommandCall = mockRedisStore.mock.calls[0][0];
|
||||
const sendCommand = sendCommandCall.sendCommand;
|
||||
|
||||
// Test that sendCommand properly delegates to ioredisClient.call
|
||||
const args = ['GET', 'test-key'];
|
||||
const result = await sendCommand(...args);
|
||||
|
||||
expect(mockIoredisClient.call).toHaveBeenCalledWith(...args);
|
||||
expect(result).toBe('test-value');
|
||||
});
|
||||
|
||||
it('should handle sendCommand errors properly', async () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
|
||||
// Mock the call method to reject with an error
|
||||
const testError = new Error('Redis error');
|
||||
mockIoredisClient.call.mockRejectedValue(testError);
|
||||
|
||||
limiterCache('rate-limit');
|
||||
|
||||
const sendCommandCall = mockRedisStore.mock.calls[0][0];
|
||||
const sendCommand = sendCommandCall.sendCommand;
|
||||
|
||||
// Test that sendCommand properly handles errors
|
||||
const args = ['GET', 'test-key'];
|
||||
|
||||
await expect(sendCommand(...args)).rejects.toThrow('Redis error');
|
||||
expect(mockIoredisClient.call).toHaveBeenCalledWith(...args);
|
||||
});
|
||||
|
||||
it('should handle undefined prefix', () => {
|
||||
cacheConfig.USE_REDIS = true;
|
||||
expect(() => limiterCache()).toThrow('prefix is required');
|
||||
});
|
||||
});
|
||||
});
|
||||
14
api/cache/getLogStores.js
vendored
14
api/cache/getLogStores.js
vendored
|
|
@ -1,9 +1,13 @@
|
|||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { Keyv } = require('keyv');
|
||||
const { CacheKeys, ViolationTypes, Time } = require('librechat-data-provider');
|
||||
const { logFile } = require('./keyvFiles');
|
||||
const keyvMongo = require('./keyvMongo');
|
||||
const { standardCache, sessionCache, violationCache } = require('./cacheFactory');
|
||||
const { Time, CacheKeys, ViolationTypes } = require('librechat-data-provider');
|
||||
const {
|
||||
logFile,
|
||||
keyvMongo,
|
||||
cacheConfig,
|
||||
sessionCache,
|
||||
standardCache,
|
||||
violationCache,
|
||||
} = require('@librechat/api');
|
||||
|
||||
const namespaces = {
|
||||
[ViolationTypes.GENERAL]: new Keyv({ store: logFile, namespace: 'violations' }),
|
||||
|
|
|
|||
3
api/cache/index.js
vendored
3
api/cache/index.js
vendored
|
|
@ -1,5 +1,4 @@
|
|||
const keyvFiles = require('./keyvFiles');
|
||||
const getLogStores = require('./getLogStores');
|
||||
const logViolation = require('./logViolation');
|
||||
|
||||
module.exports = { ...keyvFiles, getLogStores, logViolation };
|
||||
module.exports = { getLogStores, logViolation };
|
||||
|
|
|
|||
9
api/cache/keyvFiles.js
vendored
9
api/cache/keyvFiles.js
vendored
|
|
@ -1,9 +0,0 @@
|
|||
const { KeyvFile } = require('keyv-file');
|
||||
|
||||
const logFile = new KeyvFile({ filename: './data/logs.json' }).setMaxListeners(20);
|
||||
const violationFile = new KeyvFile({ filename: './data/violations.json' }).setMaxListeners(20);
|
||||
|
||||
module.exports = {
|
||||
logFile,
|
||||
violationFile,
|
||||
};
|
||||
|
|
@ -42,7 +42,6 @@
|
|||
"@azure/storage-blob": "^12.27.0",
|
||||
"@google/generative-ai": "^0.24.0",
|
||||
"@googleapis/youtube": "^20.0.0",
|
||||
"@keyv/redis": "^4.3.3",
|
||||
"@langchain/community": "^0.3.47",
|
||||
"@langchain/core": "^0.3.62",
|
||||
"@langchain/google-genai": "^0.2.13",
|
||||
|
|
@ -59,7 +58,6 @@
|
|||
"axios": "^1.12.1",
|
||||
"bcryptjs": "^2.4.3",
|
||||
"compression": "^1.8.1",
|
||||
"connect-redis": "^8.1.0",
|
||||
"cookie": "^0.7.2",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"cors": "^2.8.5",
|
||||
|
|
@ -77,17 +75,13 @@
|
|||
"googleapis": "^126.0.1",
|
||||
"handlebars": "^4.7.7",
|
||||
"https-proxy-agent": "^7.0.6",
|
||||
"ioredis": "^5.3.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"jwks-rsa": "^3.2.0",
|
||||
"keyv": "^5.3.2",
|
||||
"keyv-file": "^5.1.2",
|
||||
"klona": "^2.0.6",
|
||||
"librechat-data-provider": "*",
|
||||
"lodash": "^4.17.21",
|
||||
"meilisearch": "^0.38.0",
|
||||
"memorystore": "^1.6.7",
|
||||
"mime": "^3.0.0",
|
||||
"module-alias": "^2.2.3",
|
||||
"mongoose": "^8.12.1",
|
||||
|
|
@ -107,7 +101,6 @@
|
|||
"passport-jwt": "^4.0.1",
|
||||
"passport-ldapauth": "^3.0.1",
|
||||
"passport-local": "^1.0.0",
|
||||
"rate-limit-redis": "^4.2.0",
|
||||
"sharp": "^0.33.5",
|
||||
"tiktoken": "^1.0.15",
|
||||
"traverse": "^0.6.7",
|
||||
|
|
|
|||
|
|
@ -1,10 +1,9 @@
|
|||
const { Keyv } = require('keyv');
|
||||
const uap = require('ua-parser-js');
|
||||
const { isEnabled } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { isEnabled, keyvMongo } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { removePorts } = require('~/server/utils');
|
||||
const keyvMongo = require('~/cache/keyvMongo');
|
||||
const denyRequest = require('./denyRequest');
|
||||
const { getLogStores } = require('~/cache');
|
||||
const { findUser } = require('~/models');
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
const logViolation = require('~/cache/logViolation');
|
||||
|
||||
const getEnvironmentVariables = () => {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
const logViolation = require('~/cache/logViolation');
|
||||
|
||||
const getEnvironmentVariables = () => {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { removePorts } = require('~/server/utils');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
const { logViolation } = require('~/cache');
|
||||
|
||||
const { LOGIN_WINDOW = 5, LOGIN_MAX = 7, LOGIN_VIOLATION_SCORE: score } = process.env;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const denyRequest = require('~/server/middleware/denyRequest');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
const { logViolation } = require('~/cache');
|
||||
|
||||
const {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { removePorts } = require('~/server/utils');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
const { logViolation } = require('~/cache');
|
||||
|
||||
const { REGISTER_WINDOW = 60, REGISTER_MAX = 5, REGISTRATION_VIOLATION_SCORE: score } = process.env;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { removePorts } = require('~/server/utils');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
const { logViolation } = require('~/cache');
|
||||
|
||||
const {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
const logViolation = require('~/cache/logViolation');
|
||||
|
||||
const getEnvironmentVariables = () => {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
const logViolation = require('~/cache/logViolation');
|
||||
|
||||
const { TOOL_CALL_VIOLATION_SCORE: score } = process.env;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const logViolation = require('~/cache/logViolation');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
|
||||
const getEnvironmentVariables = () => {
|
||||
const TTS_IP_MAX = parseInt(process.env.TTS_IP_MAX) || 100;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
const logViolation = require('~/cache/logViolation');
|
||||
|
||||
const getEnvironmentVariables = () => {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const rateLimit = require('express-rate-limit');
|
||||
const { limiterCache } = require('@librechat/api');
|
||||
const { ViolationTypes } = require('librechat-data-provider');
|
||||
const { removePorts } = require('~/server/utils');
|
||||
const { limiterCache } = require('~/cache/cacheFactory');
|
||||
const { logViolation } = require('~/cache');
|
||||
|
||||
const {
|
||||
|
|
|
|||
|
|
@ -136,6 +136,7 @@
|
|||
"babel-plugin-transform-import-meta": "^2.3.2",
|
||||
"babel-plugin-transform-vite-meta-env": "^1.0.3",
|
||||
"eslint-plugin-jest": "^28.11.0",
|
||||
"fs-extra": "^11.3.2",
|
||||
"identity-obj-proxy": "^3.0.0",
|
||||
"jest": "^29.7.0",
|
||||
"jest-canvas-mock": "^2.5.2",
|
||||
|
|
|
|||
783
package-lock.json
generated
783
package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -18,8 +18,9 @@
|
|||
"build:dev": "npm run clean && NODE_ENV=development rollup -c --bundleConfigAsCjs",
|
||||
"build:watch": "NODE_ENV=development rollup -c -w --bundleConfigAsCjs",
|
||||
"build:watch:prod": "rollup -c -w --bundleConfigAsCjs",
|
||||
"test": "jest --coverage --watch",
|
||||
"test:ci": "jest --coverage --ci",
|
||||
"test": "jest --coverage --watch --testPathIgnorePatterns=\"\\.integration\\.\"",
|
||||
"test:ci": "jest --coverage --ci --testPathIgnorePatterns=\"\\.integration\\.\"",
|
||||
"test:cache:integration": "jest --testPathPattern=\"src/cache/.*\\.integration\\.spec\\.ts$\" --coverage=false",
|
||||
"verify": "npm run test:ci",
|
||||
"b:clean": "bun run rimraf dist",
|
||||
"b:build": "bun run b:clean && bun run rollup -c --silent --bundleConfigAsCjs",
|
||||
|
|
@ -43,6 +44,7 @@
|
|||
"@babel/preset-env": "^7.21.5",
|
||||
"@babel/preset-react": "^7.18.6",
|
||||
"@babel/preset-typescript": "^7.21.0",
|
||||
"@keyv/redis": "^4.3.3",
|
||||
"@rollup/plugin-alias": "^5.1.0",
|
||||
"@rollup/plugin-commonjs": "^25.0.2",
|
||||
"@rollup/plugin-json": "^6.1.0",
|
||||
|
|
@ -52,16 +54,23 @@
|
|||
"@types/bun": "^1.2.15",
|
||||
"@types/diff": "^6.0.0",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/express-session": "^1.18.2",
|
||||
"@types/jest": "^29.5.2",
|
||||
"@types/jsonwebtoken": "^9.0.0",
|
||||
"@types/multer": "^1.4.13",
|
||||
"@types/node": "^20.3.0",
|
||||
"@types/react": "^18.2.18",
|
||||
"@types/winston": "^2.4.4",
|
||||
"connect-redis": "^8.1.0",
|
||||
"ioredis": "^5.3.2",
|
||||
"jest": "^29.5.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"keyv": "^5.3.2",
|
||||
"keyv-file": "^5.1.2",
|
||||
"librechat-data-provider": "*",
|
||||
"memorystore": "^1.6.7",
|
||||
"mongoose": "^8.12.1",
|
||||
"rate-limit-redis": "^4.2.0",
|
||||
"rimraf": "^5.0.1",
|
||||
"rollup": "^4.22.4",
|
||||
"rollup-plugin-peer-deps-external": "^2.2.4",
|
||||
|
|
@ -72,20 +81,27 @@
|
|||
"registry": "https://registry.npmjs.org/"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@keyv/redis": "^4.3.3",
|
||||
"@langchain/core": "^0.3.62",
|
||||
"@librechat/agents": "^2.4.82",
|
||||
"@librechat/data-schemas": "*",
|
||||
"@modelcontextprotocol/sdk": "^1.17.1",
|
||||
"axios": "^1.12.1",
|
||||
"connect-redis": "^8.1.0",
|
||||
"diff": "^7.0.0",
|
||||
"eventsource": "^3.0.2",
|
||||
"express": "^4.21.2",
|
||||
"express-session": "^1.18.2",
|
||||
"form-data": "^4.0.4",
|
||||
"ioredis": "^5.3.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
"jsonwebtoken": "^9.0.0",
|
||||
"keyv": "^5.3.2",
|
||||
"keyv-file": "^5.1.2",
|
||||
"librechat-data-provider": "*",
|
||||
"memorystore": "^1.6.7",
|
||||
"node-fetch": "2.7.0",
|
||||
"rate-limit-redis": "^4.2.0",
|
||||
"tiktoken": "^1.0.15",
|
||||
"undici": "^7.10.0",
|
||||
"zod": "^3.22.4"
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ const plugins = [
|
|||
peerDepsExternal(),
|
||||
resolve({
|
||||
preferBuiltins: true,
|
||||
skipSelf: true,
|
||||
}),
|
||||
replace({
|
||||
__IS_DEV__: isDevelopment,
|
||||
|
|
|
|||
|
|
@ -1,12 +1,8 @@
|
|||
const fs = require('fs');
|
||||
|
||||
describe('cacheConfig', () => {
|
||||
let originalEnv;
|
||||
let originalReadFileSync;
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
originalReadFileSync = fs.readFileSync;
|
||||
|
||||
// Clear all related env vars first
|
||||
delete process.env.REDIS_URI;
|
||||
|
|
@ -18,116 +14,116 @@ describe('cacheConfig', () => {
|
|||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES;
|
||||
|
||||
// Clear require cache
|
||||
// Clear module cache
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
fs.readFileSync = originalReadFileSync;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
describe('REDIS_KEY_PREFIX validation and resolution', () => {
|
||||
test('should throw error when both REDIS_KEY_PREFIX_VAR and REDIS_KEY_PREFIX are set', () => {
|
||||
test('should throw error when both REDIS_KEY_PREFIX_VAR and REDIS_KEY_PREFIX are set', async () => {
|
||||
process.env.REDIS_KEY_PREFIX_VAR = 'DEPLOYMENT_ID';
|
||||
process.env.REDIS_KEY_PREFIX = 'manual-prefix';
|
||||
|
||||
expect(() => {
|
||||
require('./cacheConfig');
|
||||
}).toThrow('Only either REDIS_KEY_PREFIX_VAR or REDIS_KEY_PREFIX can be set.');
|
||||
await expect(async () => {
|
||||
await import('../cacheConfig');
|
||||
}).rejects.toThrow('Only either REDIS_KEY_PREFIX_VAR or REDIS_KEY_PREFIX can be set.');
|
||||
});
|
||||
|
||||
test('should resolve REDIS_KEY_PREFIX from variable reference', () => {
|
||||
test('should resolve REDIS_KEY_PREFIX from variable reference', async () => {
|
||||
process.env.REDIS_KEY_PREFIX_VAR = 'DEPLOYMENT_ID';
|
||||
process.env.DEPLOYMENT_ID = 'test-deployment-123';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('test-deployment-123');
|
||||
});
|
||||
|
||||
test('should use direct REDIS_KEY_PREFIX value', () => {
|
||||
test('should use direct REDIS_KEY_PREFIX value', async () => {
|
||||
process.env.REDIS_KEY_PREFIX = 'direct-prefix';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('direct-prefix');
|
||||
});
|
||||
|
||||
test('should default to empty string when no prefix is configured', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
test('should default to empty string when no prefix is configured', async () => {
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('');
|
||||
});
|
||||
|
||||
test('should handle empty variable reference', () => {
|
||||
test('should handle empty variable reference', async () => {
|
||||
process.env.REDIS_KEY_PREFIX_VAR = 'EMPTY_VAR';
|
||||
process.env.EMPTY_VAR = '';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('');
|
||||
});
|
||||
|
||||
test('should handle undefined variable reference', () => {
|
||||
test('should handle undefined variable reference', async () => {
|
||||
process.env.REDIS_KEY_PREFIX_VAR = 'UNDEFINED_VAR';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_KEY_PREFIX).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('USE_REDIS and REDIS_URI validation', () => {
|
||||
test('should throw error when USE_REDIS is enabled but REDIS_URI is not set', () => {
|
||||
test('should throw error when USE_REDIS is enabled but REDIS_URI is not set', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
|
||||
expect(() => {
|
||||
require('./cacheConfig');
|
||||
}).toThrow('USE_REDIS is enabled but REDIS_URI is not set.');
|
||||
await expect(async () => {
|
||||
await import('../cacheConfig');
|
||||
}).rejects.toThrow('USE_REDIS is enabled but REDIS_URI is not set.');
|
||||
});
|
||||
|
||||
test('should not throw error when USE_REDIS is enabled and REDIS_URI is set', () => {
|
||||
test('should not throw error when USE_REDIS is enabled and REDIS_URI is set', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.REDIS_URI = 'redis://localhost:6379';
|
||||
|
||||
expect(() => {
|
||||
require('./cacheConfig');
|
||||
}).not.toThrow();
|
||||
const importModule = async () => {
|
||||
await import('../cacheConfig');
|
||||
};
|
||||
await expect(importModule()).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
test('should handle empty REDIS_URI when USE_REDIS is enabled', () => {
|
||||
test('should handle empty REDIS_URI when USE_REDIS is enabled', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.REDIS_URI = '';
|
||||
|
||||
expect(() => {
|
||||
require('./cacheConfig');
|
||||
}).toThrow('USE_REDIS is enabled but REDIS_URI is not set.');
|
||||
await expect(async () => {
|
||||
await import('../cacheConfig');
|
||||
}).rejects.toThrow('USE_REDIS is enabled but REDIS_URI is not set.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('USE_REDIS_CLUSTER configuration', () => {
|
||||
test('should default to false when USE_REDIS_CLUSTER is not set', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
test('should default to false when USE_REDIS_CLUSTER is not set', async () => {
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(false);
|
||||
});
|
||||
|
||||
test('should be false when USE_REDIS_CLUSTER is set to false', () => {
|
||||
test('should be false when USE_REDIS_CLUSTER is set to false', async () => {
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(false);
|
||||
});
|
||||
|
||||
test('should be true when USE_REDIS_CLUSTER is set to true', () => {
|
||||
test('should be true when USE_REDIS_CLUSTER is set to true', async () => {
|
||||
process.env.USE_REDIS_CLUSTER = 'true';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(true);
|
||||
});
|
||||
|
||||
test('should work with USE_REDIS enabled and REDIS_URI set', () => {
|
||||
test('should work with USE_REDIS enabled and REDIS_URI set', async () => {
|
||||
process.env.USE_REDIS_CLUSTER = 'true';
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.REDIS_URI = 'redis://localhost:6379';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.USE_REDIS_CLUSTER).toBe(true);
|
||||
expect(cacheConfig.USE_REDIS).toBe(true);
|
||||
expect(cacheConfig.REDIS_URI).toBe('redis://localhost:6379');
|
||||
|
|
@ -135,54 +131,51 @@ describe('cacheConfig', () => {
|
|||
});
|
||||
|
||||
describe('REDIS_CA file reading', () => {
|
||||
test('should be null when REDIS_CA is not set', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
test('should be null when REDIS_CA is not set', async () => {
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_CA).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('REDIS_PING_INTERVAL configuration', () => {
|
||||
test('should default to 0 when REDIS_PING_INTERVAL is not set', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
test('should default to 0 when REDIS_PING_INTERVAL is not set', async () => {
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_PING_INTERVAL).toBe(0);
|
||||
});
|
||||
|
||||
test('should use provided REDIS_PING_INTERVAL value', () => {
|
||||
test('should use provided REDIS_PING_INTERVAL value', async () => {
|
||||
process.env.REDIS_PING_INTERVAL = '300';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.REDIS_PING_INTERVAL).toBe(300);
|
||||
});
|
||||
});
|
||||
|
||||
describe('FORCED_IN_MEMORY_CACHE_NAMESPACES validation', () => {
|
||||
test('should parse comma-separated cache keys correctly', () => {
|
||||
test('should parse comma-separated cache keys correctly', async () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = ' ROLES, MESSAGES ';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([
|
||||
'ROLES',
|
||||
'MESSAGES',
|
||||
]);
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual(['ROLES', 'MESSAGES']);
|
||||
});
|
||||
|
||||
test('should throw error for invalid cache keys', () => {
|
||||
test('should throw error for invalid cache keys', async () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = 'INVALID_KEY,ROLES';
|
||||
|
||||
expect(() => {
|
||||
require('./cacheConfig');
|
||||
}).toThrow('Invalid cache keys in FORCED_IN_MEMORY_CACHE_NAMESPACES: INVALID_KEY');
|
||||
await expect(async () => {
|
||||
await import('../cacheConfig');
|
||||
}).rejects.toThrow('Invalid cache keys in FORCED_IN_MEMORY_CACHE_NAMESPACES: INVALID_KEY');
|
||||
});
|
||||
|
||||
test('should handle empty string gracefully', () => {
|
||||
test('should handle empty string gracefully', async () => {
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = '';
|
||||
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]);
|
||||
});
|
||||
|
||||
test('should handle undefined env var gracefully', () => {
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
test('should handle undefined env var gracefully', async () => {
|
||||
const { cacheConfig } = await import('../cacheConfig');
|
||||
expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]);
|
||||
});
|
||||
});
|
||||
113
packages/api/src/cache/__tests__/cacheFactory/limiterCache.integration.spec.ts
vendored
Normal file
113
packages/api/src/cache/__tests__/cacheFactory/limiterCache.integration.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,113 @@
|
|||
import type { RedisStore } from 'rate-limit-redis';
|
||||
|
||||
describe('limiterCache', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
let testStore: RedisStore | undefined = undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear cache-related env vars
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
|
||||
// Set test configuration
|
||||
process.env.REDIS_PING_INTERVAL = '0';
|
||||
process.env.REDIS_KEY_PREFIX = 'Cache-Integration-Test';
|
||||
process.env.REDIS_RETRY_MAX_ATTEMPTS = '5';
|
||||
|
||||
// Clear require cache to reload modules
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
test('should throw error when prefix is not provided', async () => {
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
expect(() => cacheFactory.limiterCache('')).toThrow('prefix is required');
|
||||
});
|
||||
|
||||
test('should return undefined when USE_REDIS is false', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
testStore = cacheFactory.limiterCache('test-limiter');
|
||||
|
||||
expect(testStore).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should return RedisStore with sendCommand when USE_REDIS is true', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
testStore = cacheFactory.limiterCache('test-limiter');
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
// Verify it returns a RedisStore instance
|
||||
expect(testStore).toBeDefined();
|
||||
expect(testStore!.constructor.name).toBe('RedisStore');
|
||||
expect(testStore!.prefix).toBe('test-limiter:');
|
||||
expect(typeof testStore!.sendCommand).toBe('function');
|
||||
|
||||
const testKey = 'user:123';
|
||||
|
||||
// SET operation
|
||||
await testStore!.sendCommand('SET', testKey, '1', 'EX', '60');
|
||||
|
||||
// Verify the key was created WITHOUT prefix using ioredis
|
||||
// Note: Using call method since get method seems to have issues in test environment
|
||||
// Type assertion for ioredis call method
|
||||
type RedisClientWithCall = typeof ioredisClient & {
|
||||
call: (command: string, key: string) => Promise<string | null>;
|
||||
};
|
||||
const directValue = await (ioredisClient as RedisClientWithCall).call('GET', testKey);
|
||||
|
||||
expect(directValue).toBe('1');
|
||||
|
||||
// GET operation
|
||||
const value = await testStore!.sendCommand('GET', testKey);
|
||||
expect(value).toBe('1');
|
||||
|
||||
// INCR operation
|
||||
const incremented = await testStore!.sendCommand('INCR', testKey);
|
||||
expect(incremented).toBe(2);
|
||||
|
||||
// Verify increment worked with ioredis
|
||||
const incrementedValue = await (ioredisClient as RedisClientWithCall).call('GET', testKey);
|
||||
expect(incrementedValue).toBe('2');
|
||||
|
||||
// TTL operation
|
||||
const ttl = (await testStore!.sendCommand('TTL', testKey)) as number;
|
||||
expect(ttl).toBeGreaterThan(0);
|
||||
expect(ttl).toBeLessThanOrEqual(60);
|
||||
|
||||
// DEL operation
|
||||
const deleted = await testStore!.sendCommand('DEL', testKey);
|
||||
expect(deleted).toBe(1);
|
||||
|
||||
// Verify deletion
|
||||
const afterDelete = await testStore!.sendCommand('GET', testKey);
|
||||
expect(afterDelete).toBeNull();
|
||||
const directAfterDelete = await ioredisClient!.get(testKey);
|
||||
expect(directAfterDelete).toBeNull();
|
||||
|
||||
// Test error handling
|
||||
await expect(testStore!.sendCommand('INVALID_COMMAND')).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
211
packages/api/src/cache/__tests__/cacheFactory/sessionCache.integration.spec.ts
vendored
Normal file
211
packages/api/src/cache/__tests__/cacheFactory/sessionCache.integration.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
interface SessionData {
|
||||
[key: string]: unknown;
|
||||
cookie?: { maxAge: number };
|
||||
user?: { id: string; name: string };
|
||||
userId?: string;
|
||||
}
|
||||
|
||||
interface SessionStore {
|
||||
prefix?: string;
|
||||
set: (id: string, data: SessionData, callback?: (err?: Error) => void) => void;
|
||||
get: (id: string, callback: (err: Error | null, data?: SessionData | null) => void) => void;
|
||||
destroy: (id: string, callback?: (err?: Error) => void) => void;
|
||||
touch: (id: string, data: SessionData, callback?: (err?: Error) => void) => void;
|
||||
on?: (event: string, handler: (...args: unknown[]) => void) => void;
|
||||
}
|
||||
|
||||
describe('sessionCache', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
// Helper to make session stores async
|
||||
const asyncStore = (store: SessionStore) => ({
|
||||
set: (id: string, data: SessionData) =>
|
||||
new Promise<void>((resolve) => store.set(id, data, () => resolve())),
|
||||
get: (id: string) =>
|
||||
new Promise<SessionData | null | undefined>((resolve) =>
|
||||
store.get(id, (_, data) => resolve(data)),
|
||||
),
|
||||
destroy: (id: string) => new Promise<void>((resolve) => store.destroy(id, () => resolve())),
|
||||
touch: (id: string, data: SessionData) =>
|
||||
new Promise<void>((resolve) => store.touch(id, data, () => resolve())),
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear cache-related env vars
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
|
||||
// Set test configuration
|
||||
process.env.REDIS_PING_INTERVAL = '0';
|
||||
process.env.REDIS_KEY_PREFIX = 'Cache-Integration-Test';
|
||||
process.env.REDIS_RETRY_MAX_ATTEMPTS = '5';
|
||||
|
||||
// Clear require cache to reload modules
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
test('should return ConnectRedis store when USE_REDIS is true', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const store = cacheFactory.sessionCache('test-sessions', 3600);
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
// Verify it returns a ConnectRedis instance
|
||||
expect(store).toBeDefined();
|
||||
expect(store.constructor.name).toBe('RedisStore');
|
||||
expect(store.prefix).toBe('test-sessions:');
|
||||
|
||||
// Test session operations
|
||||
const sessionId = 'sess:123456';
|
||||
const sessionData: SessionData = {
|
||||
user: { id: 'user123', name: 'Test User' },
|
||||
cookie: { maxAge: 3600000 },
|
||||
};
|
||||
|
||||
const async = asyncStore(store);
|
||||
|
||||
// Set session
|
||||
await async.set(sessionId, sessionData);
|
||||
|
||||
// Get session
|
||||
const retrieved = await async.get(sessionId);
|
||||
expect(retrieved).toEqual(sessionData);
|
||||
|
||||
// Touch session (update expiry)
|
||||
await async.touch(sessionId, sessionData);
|
||||
|
||||
// Destroy session
|
||||
await async.destroy(sessionId);
|
||||
|
||||
// Verify deletion
|
||||
const afterDelete = await async.get(sessionId);
|
||||
expect(afterDelete).toBeNull();
|
||||
});
|
||||
|
||||
test('should return MemoryStore when USE_REDIS is false', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const store = cacheFactory.sessionCache('test-sessions', 3600);
|
||||
|
||||
// Verify it returns a MemoryStore instance
|
||||
expect(store).toBeDefined();
|
||||
expect(store.constructor.name).toBe('MemoryStore');
|
||||
|
||||
// Test session operations
|
||||
const sessionId = 'mem:789012';
|
||||
const sessionData: SessionData = {
|
||||
user: { id: 'user456', name: 'Memory User' },
|
||||
cookie: { maxAge: 3600000 },
|
||||
};
|
||||
|
||||
const async = asyncStore(store);
|
||||
|
||||
// Set session
|
||||
await async.set(sessionId, sessionData);
|
||||
|
||||
// Get session
|
||||
const retrieved = await async.get(sessionId);
|
||||
expect(retrieved).toEqual(sessionData);
|
||||
|
||||
// Destroy session
|
||||
await async.destroy(sessionId);
|
||||
|
||||
// Verify deletion
|
||||
const afterDelete = await async.get(sessionId);
|
||||
expect(afterDelete).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should handle namespace with and without trailing colon', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
|
||||
const store1 = cacheFactory.sessionCache('namespace1');
|
||||
const store2 = cacheFactory.sessionCache('namespace2:');
|
||||
|
||||
expect(store1.prefix).toBe('namespace1:');
|
||||
expect(store2.prefix).toBe('namespace2:');
|
||||
});
|
||||
|
||||
test('should register error handler for Redis connection', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
|
||||
// Spy on ioredisClient.on
|
||||
const onSpy = jest.spyOn(ioredisClient!, 'on');
|
||||
|
||||
// Create session store
|
||||
cacheFactory.sessionCache('error-test');
|
||||
|
||||
// Verify error handler was registered
|
||||
expect(onSpy).toHaveBeenCalledWith('error', expect.any(Function));
|
||||
|
||||
onSpy.mockRestore();
|
||||
});
|
||||
|
||||
test('should handle session expiration with TTL', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const ttl = 1; // 1 second TTL
|
||||
const store = cacheFactory.sessionCache('ttl-sessions', ttl);
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
const sessionId = 'ttl:12345';
|
||||
const sessionData: SessionData = { userId: 'ttl-user' };
|
||||
const async = asyncStore(store);
|
||||
|
||||
// Set session with short TTL
|
||||
await async.set(sessionId, sessionData);
|
||||
|
||||
// Verify session exists immediately
|
||||
const immediate = await async.get(sessionId);
|
||||
expect(immediate).toEqual(sessionData);
|
||||
|
||||
// Wait for TTL to expire
|
||||
await new Promise((resolve) => setTimeout(resolve, (ttl + 0.5) * 1000));
|
||||
|
||||
// Verify session has expired
|
||||
const expired = await async.get(sessionId);
|
||||
expect(expired).toBeNull();
|
||||
});
|
||||
});
|
||||
185
packages/api/src/cache/__tests__/cacheFactory/standardCache.integration.spec.ts
vendored
Normal file
185
packages/api/src/cache/__tests__/cacheFactory/standardCache.integration.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
import type { Keyv } from 'keyv';
|
||||
|
||||
// Mock GLOBAL_PREFIX_SEPARATOR
|
||||
jest.mock('../../redisClients', () => {
|
||||
const originalModule = jest.requireActual('../../redisClients');
|
||||
return {
|
||||
...originalModule,
|
||||
GLOBAL_PREFIX_SEPARATOR: '>>',
|
||||
};
|
||||
});
|
||||
|
||||
describe('standardCache', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
let testCache: Keyv | null = null;
|
||||
|
||||
// Helper function to verify Redis keys exist
|
||||
const expectRedisKeysExist = async (expectedKeys: string[]) => {
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
if (!ioredisClient) throw new Error('ioredisClient is null');
|
||||
const allKeys = await ioredisClient.keys('Cache-Integration-Test*');
|
||||
expectedKeys.forEach((expectedKey) => {
|
||||
expect(allKeys).toContain(expectedKey);
|
||||
});
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear cache-related env vars
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
delete process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES;
|
||||
|
||||
// Set test configuration
|
||||
process.env.REDIS_PING_INTERVAL = '0';
|
||||
process.env.REDIS_KEY_PREFIX = 'Cache-Integration-Test';
|
||||
process.env.REDIS_RETRY_MAX_ATTEMPTS = '5';
|
||||
|
||||
// Clear require cache to reload modules
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up test keys using prefix and test namespaces
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
if (ioredisClient && ioredisClient.status === 'ready') {
|
||||
try {
|
||||
const patterns = [
|
||||
'Cache-Integration-Test>>*',
|
||||
'Cache-Integration-Test>>test-namespace:*',
|
||||
'Cache-Integration-Test>>another-namespace:*',
|
||||
];
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const keys = await ioredisClient.keys(pattern);
|
||||
if (keys.length > 0) {
|
||||
await ioredisClient.del(...keys);
|
||||
}
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
console.warn('Error cleaning up test keys:', error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up cache instance
|
||||
if (testCache) {
|
||||
try {
|
||||
await testCache.clear();
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
console.warn('Error clearing cache:', error.message);
|
||||
}
|
||||
}
|
||||
testCache = null;
|
||||
}
|
||||
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
describe('when USE_REDIS is false', () => {
|
||||
test('should create in-memory cache', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
testCache = cacheFactory.standardCache('test-namespace');
|
||||
|
||||
expect(testCache).toBeDefined();
|
||||
expect(testCache.constructor.name).toBe('Keyv');
|
||||
});
|
||||
|
||||
test('should use fallback store when provided', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
const fallbackStore = new Map();
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
testCache = cacheFactory.standardCache('test-namespace', 200, fallbackStore);
|
||||
|
||||
expect(testCache).toBeDefined();
|
||||
// Type assertion to access internal options
|
||||
const cacheWithOpts = testCache as Keyv & {
|
||||
opts: { store: unknown; namespace: string; ttl: number };
|
||||
};
|
||||
expect(cacheWithOpts.opts.store).toBe(fallbackStore);
|
||||
expect(cacheWithOpts.opts.namespace).toBe('test-namespace');
|
||||
expect(cacheWithOpts.opts.ttl).toBe(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when connecting to a Redis server', () => {
|
||||
test('should handle different namespaces with correct prefixes', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
|
||||
const cache1 = cacheFactory.standardCache('namespace-one');
|
||||
const cache2 = cacheFactory.standardCache('namespace-two');
|
||||
|
||||
await cache1.set('key1', 'value1');
|
||||
await cache2.set('key2', 'value2');
|
||||
|
||||
// Verify both caches work independently
|
||||
expect(await cache1.get('key1')).toBe('value1');
|
||||
expect(await cache2.get('key2')).toBe('value2');
|
||||
expect(await cache1.get('key2')).toBeUndefined();
|
||||
expect(await cache2.get('key1')).toBeUndefined();
|
||||
|
||||
// Verify Redis keys have correct prefixes for different namespaces
|
||||
await expectRedisKeysExist([
|
||||
'Cache-Integration-Test>>namespace-one:key1',
|
||||
'Cache-Integration-Test>>namespace-two:key2',
|
||||
]);
|
||||
|
||||
await cache1.clear();
|
||||
await cache2.clear();
|
||||
});
|
||||
|
||||
test('should respect FORCED_IN_MEMORY_CACHE_NAMESPACES', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = 'ROLES'; // Use a valid cache key
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
|
||||
// This should create an in-memory cache despite USE_REDIS being true
|
||||
testCache = cacheFactory.standardCache('ROLES', 5000);
|
||||
|
||||
expect(testCache).toBeDefined();
|
||||
expect(testCache.constructor.name).toBe('Keyv');
|
||||
// Type assertion to access internal options
|
||||
const cacheWithOpts = testCache as Keyv & { opts: { namespace: string; ttl: number } };
|
||||
expect(cacheWithOpts.opts.namespace).toBe('ROLES');
|
||||
expect(cacheWithOpts.opts.ttl).toBe(5000);
|
||||
});
|
||||
|
||||
test('should handle TTL correctly', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
testCache = cacheFactory.standardCache('ttl-test', 1000); // 1 second TTL
|
||||
|
||||
const testKey = 'ttl-key';
|
||||
const testValue = 'ttl-value';
|
||||
|
||||
await testCache.set(testKey, testValue);
|
||||
expect(await testCache.get(testKey)).toBe(testValue);
|
||||
|
||||
// Wait for TTL to expire
|
||||
await new Promise((resolve) => setTimeout(resolve, 1100));
|
||||
expect(await testCache.get(testKey)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
241
packages/api/src/cache/__tests__/cacheFactory/violationCache.integration.spec.ts
vendored
Normal file
241
packages/api/src/cache/__tests__/cacheFactory/violationCache.integration.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,241 @@
|
|||
interface ViolationData {
|
||||
count?: number;
|
||||
timestamp?: number;
|
||||
namespace?: number;
|
||||
data?: string;
|
||||
userId?: string;
|
||||
violations?: Array<{
|
||||
type: string;
|
||||
timestamp: number;
|
||||
severity: string;
|
||||
}>;
|
||||
metadata?: {
|
||||
ip: string;
|
||||
userAgent: string;
|
||||
nested: {
|
||||
deep: {
|
||||
value: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
describe('violationCache', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear cache-related env vars
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
|
||||
// Set test configuration
|
||||
process.env.REDIS_PING_INTERVAL = '0';
|
||||
process.env.REDIS_KEY_PREFIX = 'Cache-Integration-Test';
|
||||
process.env.REDIS_RETRY_MAX_ATTEMPTS = '5';
|
||||
|
||||
// Clear require cache to reload modules
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
test('should create violation cache with Redis when USE_REDIS is true', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const cache = cacheFactory.violationCache('test-violations', 60000); // 60 second TTL
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
// Verify it returns a Keyv instance
|
||||
expect(cache).toBeDefined();
|
||||
expect(cache.constructor.name).toBe('Keyv');
|
||||
|
||||
// Test basic cache operations
|
||||
const testKey = 'user:456:violation';
|
||||
const testValue: ViolationData = { count: 1, timestamp: Date.now() };
|
||||
|
||||
// SET operation
|
||||
await cache.set(testKey, testValue);
|
||||
|
||||
// GET operation
|
||||
const retrievedValue = await cache.get(testKey);
|
||||
expect(retrievedValue).toEqual(testValue);
|
||||
|
||||
// DELETE operation
|
||||
const deleted = await cache.delete(testKey);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
// Verify deletion
|
||||
const afterDelete = await cache.get(testKey);
|
||||
expect(afterDelete).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should use fallback store when USE_REDIS is false', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const cache = cacheFactory.violationCache('test-violations');
|
||||
|
||||
// Verify it returns a Keyv instance
|
||||
expect(cache).toBeDefined();
|
||||
expect(cache.constructor.name).toBe('Keyv');
|
||||
|
||||
// Test basic operations with fallback store
|
||||
const testKey = 'user:789:violation';
|
||||
const testValue: ViolationData = { count: 2, timestamp: Date.now() };
|
||||
|
||||
// SET operation
|
||||
await cache.set(testKey, testValue);
|
||||
|
||||
// GET operation
|
||||
const retrievedValue = await cache.get(testKey);
|
||||
expect(retrievedValue).toEqual(testValue);
|
||||
|
||||
// DELETE operation
|
||||
const deleted = await cache.delete(testKey);
|
||||
expect(deleted).toBe(true);
|
||||
|
||||
// Verify deletion
|
||||
const afterDelete = await cache.get(testKey);
|
||||
expect(afterDelete).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should respect namespace prefixing', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const cache1 = cacheFactory.violationCache('namespace1');
|
||||
const cache2 = cacheFactory.violationCache('namespace2');
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
const testKey = 'shared-key';
|
||||
const value1: ViolationData = { namespace: 1 };
|
||||
const value2: ViolationData = { namespace: 2 };
|
||||
|
||||
// Set same key in different namespaces
|
||||
await cache1.set(testKey, value1);
|
||||
await cache2.set(testKey, value2);
|
||||
|
||||
// Verify namespace isolation
|
||||
const retrieved1 = await cache1.get(testKey);
|
||||
const retrieved2 = await cache2.get(testKey);
|
||||
|
||||
expect(retrieved1).toEqual(value1);
|
||||
expect(retrieved2).toEqual(value2);
|
||||
|
||||
// Clean up
|
||||
await cache1.delete(testKey);
|
||||
await cache2.delete(testKey);
|
||||
});
|
||||
|
||||
test('should respect TTL settings', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const ttl = 1000; // 1 second TTL
|
||||
const cache = cacheFactory.violationCache('ttl-test', ttl);
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
const testKey = 'ttl-key';
|
||||
const testValue: ViolationData = { data: 'expires soon' };
|
||||
|
||||
// Set value with TTL
|
||||
await cache.set(testKey, testValue);
|
||||
|
||||
// Verify value exists immediately
|
||||
const immediate = await cache.get(testKey);
|
||||
expect(immediate).toEqual(testValue);
|
||||
|
||||
// Wait for TTL to expire
|
||||
await new Promise((resolve) => setTimeout(resolve, ttl + 100));
|
||||
|
||||
// Verify value has expired
|
||||
const expired = await cache.get(testKey);
|
||||
expect(expired).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should handle complex violation data structures', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const cacheFactory = await import('../../cacheFactory');
|
||||
const redisClients = await import('../../redisClients');
|
||||
const { ioredisClient } = redisClients;
|
||||
const cache = cacheFactory.violationCache('complex-violations');
|
||||
|
||||
// Wait for Redis connection to be ready
|
||||
if (ioredisClient && ioredisClient.status !== 'ready') {
|
||||
await new Promise<void>((resolve) => {
|
||||
ioredisClient.once('ready', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
const complexData: ViolationData = {
|
||||
userId: 'user123',
|
||||
violations: [
|
||||
{ type: 'rate_limit', timestamp: Date.now(), severity: 'warning' },
|
||||
{ type: 'spam', timestamp: Date.now() - 1000, severity: 'critical' },
|
||||
],
|
||||
metadata: {
|
||||
ip: '192.168.1.1',
|
||||
userAgent: 'Mozilla/5.0',
|
||||
nested: {
|
||||
deep: {
|
||||
value: 'test',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const key = 'complex-violation-data';
|
||||
|
||||
// Store complex data
|
||||
await cache.set(key, complexData);
|
||||
|
||||
// Retrieve and verify
|
||||
const retrieved = await cache.get(key);
|
||||
expect(retrieved).toEqual(complexData);
|
||||
|
||||
// Clean up
|
||||
await cache.delete(key);
|
||||
});
|
||||
});
|
||||
168
packages/api/src/cache/__tests__/redisClients.integration.spec.ts
vendored
Normal file
168
packages/api/src/cache/__tests__/redisClients.integration.spec.ts
vendored
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
import type { Redis, Cluster } from 'ioredis';
|
||||
import type { RedisClientType, RedisClusterType } from '@redis/client';
|
||||
|
||||
type RedisClient = RedisClientType | RedisClusterType | Redis | Cluster;
|
||||
|
||||
describe('redisClients Integration Tests', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
let ioredisClient: Redis | Cluster | null = null;
|
||||
let keyvRedisClient: RedisClientType | RedisClusterType | null = null;
|
||||
|
||||
// Helper function to test set/get/delete operations
|
||||
const testRedisOperations = async (client: RedisClient, keyPrefix: string): Promise<void> => {
|
||||
// Wait cluster to fully initialize
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
|
||||
const testKey = `${keyPrefix}-test-key`;
|
||||
const testValue = `${keyPrefix}-test-value`;
|
||||
|
||||
// Test set operation
|
||||
await client.set(testKey, testValue);
|
||||
|
||||
// Test get operation
|
||||
const result = await client.get(testKey);
|
||||
expect(result).toBe(testValue);
|
||||
|
||||
// Test delete operation
|
||||
const deleteResult = await client.del(testKey);
|
||||
expect(deleteResult).toBe(1);
|
||||
|
||||
// Verify key is deleted
|
||||
const deletedResult = await client.get(testKey);
|
||||
expect(deletedResult).toBeNull();
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
originalEnv = { ...process.env };
|
||||
|
||||
// Clear Redis-related env vars
|
||||
delete process.env.USE_REDIS;
|
||||
delete process.env.REDIS_URI;
|
||||
delete process.env.USE_REDIS_CLUSTER;
|
||||
delete process.env.REDIS_PING_INTERVAL;
|
||||
delete process.env.REDIS_KEY_PREFIX;
|
||||
|
||||
// Set common test configuration
|
||||
process.env.REDIS_PING_INTERVAL = '0';
|
||||
process.env.REDIS_KEY_PREFIX = 'Redis-Integration-Test';
|
||||
process.env.REDIS_RETRY_MAX_ATTEMPTS = '5';
|
||||
process.env.REDIS_PING_INTERVAL = '1000';
|
||||
|
||||
// Clear module cache to reload module
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
// Clean up test keys using the prefix
|
||||
if (ioredisClient && ioredisClient.status === 'ready') {
|
||||
try {
|
||||
const keys = await ioredisClient.keys('Redis-Integration-Test::*');
|
||||
if (keys.length > 0) {
|
||||
await ioredisClient.del(...keys);
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.warn('Error cleaning up test keys:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup Redis connections
|
||||
if (ioredisClient) {
|
||||
try {
|
||||
if (ioredisClient.status === 'ready') {
|
||||
ioredisClient.disconnect();
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.warn('Error disconnecting ioredis client:', error.message);
|
||||
}
|
||||
ioredisClient = null;
|
||||
}
|
||||
|
||||
if (keyvRedisClient) {
|
||||
try {
|
||||
// Try to disconnect - keyv/redis client doesn't have an isReady property
|
||||
await keyvRedisClient.disconnect();
|
||||
} catch (error: any) {
|
||||
console.warn('Error disconnecting keyv redis client:', error.message);
|
||||
}
|
||||
keyvRedisClient = null;
|
||||
}
|
||||
|
||||
process.env = originalEnv;
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
describe('ioredis Client Tests', () => {
|
||||
describe('when USE_REDIS is false', () => {
|
||||
test('should have null client', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
ioredisClient = clients.ioredisClient;
|
||||
|
||||
expect(ioredisClient).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when connecting to a Redis instance', () => {
|
||||
test('should connect and perform set/get/delete operations', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
ioredisClient = clients.ioredisClient;
|
||||
await testRedisOperations(ioredisClient!, 'ioredis-single');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when connecting to a Redis cluster', () => {
|
||||
test('should connect to cluster and perform set/get/delete operations', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'true';
|
||||
process.env.REDIS_URI =
|
||||
'redis://127.0.0.1:7001,redis://127.0.0.1:7002,redis://127.0.0.1:7003';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
ioredisClient = clients.ioredisClient;
|
||||
await testRedisOperations(ioredisClient!, 'ioredis-cluster');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('keyvRedisClient Tests', () => {
|
||||
describe('when USE_REDIS is false', () => {
|
||||
test('should have null client', async () => {
|
||||
process.env.USE_REDIS = 'false';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
keyvRedisClient = clients.keyvRedisClient;
|
||||
expect(keyvRedisClient).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when connecting to a Redis instance', () => {
|
||||
test('should connect and perform set/get/delete operations', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'false';
|
||||
process.env.REDIS_URI = 'redis://127.0.0.1:6379';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
keyvRedisClient = clients.keyvRedisClient;
|
||||
await testRedisOperations(keyvRedisClient!, 'keyv-single');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when connecting to a Redis cluster', () => {
|
||||
test('should connect to cluster and perform set/get/delete operations', async () => {
|
||||
process.env.USE_REDIS = 'true';
|
||||
process.env.USE_REDIS_CLUSTER = 'true';
|
||||
process.env.REDIS_URI =
|
||||
'redis://127.0.0.1:7001,redis://127.0.0.1:7002,redis://127.0.0.1:7003';
|
||||
|
||||
const clients = await import('../redisClients');
|
||||
keyvRedisClient = clients.keyvRedisClient;
|
||||
await testRedisOperations(keyvRedisClient!, 'keyv-cluster');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
const fs = require('fs');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { math, isEnabled } = require('@librechat/api');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
import { readFileSync, existsSync } from 'fs';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { CacheKeys } from 'librechat-data-provider';
|
||||
import { math, isEnabled } from '~/utils';
|
||||
|
||||
// To ensure that different deployments do not interfere with each other's cache, we use a prefix for the Redis keys.
|
||||
// This prefix is usually the deployment ID, which is often passed to the container or pod as an env var.
|
||||
|
|
@ -25,7 +25,7 @@ const FORCED_IN_MEMORY_CACHE_NAMESPACES = process.env.FORCED_IN_MEMORY_CACHE_NAM
|
|||
|
||||
// Validate against CacheKeys enum
|
||||
if (FORCED_IN_MEMORY_CACHE_NAMESPACES.length > 0) {
|
||||
const validKeys = Object.values(CacheKeys);
|
||||
const validKeys = Object.values(CacheKeys) as string[];
|
||||
const invalidKeys = FORCED_IN_MEMORY_CACHE_NAMESPACES.filter((key) => !validKeys.includes(key));
|
||||
|
||||
if (invalidKeys.length > 0) {
|
||||
|
|
@ -38,15 +38,15 @@ if (FORCED_IN_MEMORY_CACHE_NAMESPACES.length > 0) {
|
|||
/** Helper function to safely read Redis CA certificate from file
|
||||
* @returns {string|null} The contents of the CA certificate file, or null if not set or on error
|
||||
*/
|
||||
const getRedisCA = () => {
|
||||
const getRedisCA = (): string | null => {
|
||||
const caPath = process.env.REDIS_CA;
|
||||
if (!caPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
if (fs.existsSync(caPath)) {
|
||||
return fs.readFileSync(caPath, 'utf8');
|
||||
if (existsSync(caPath)) {
|
||||
return readFileSync(caPath, 'utf8');
|
||||
} else {
|
||||
logger.warn(`Redis CA certificate file not found: ${caPath}`);
|
||||
return null;
|
||||
|
|
@ -64,7 +64,7 @@ const cacheConfig = {
|
|||
REDIS_USERNAME: process.env.REDIS_USERNAME,
|
||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
REDIS_CA: getRedisCA(),
|
||||
REDIS_KEY_PREFIX: process.env[REDIS_KEY_PREFIX_VAR] || REDIS_KEY_PREFIX || '',
|
||||
REDIS_KEY_PREFIX: process.env[REDIS_KEY_PREFIX_VAR ?? ''] || REDIS_KEY_PREFIX || '',
|
||||
REDIS_MAX_LISTENERS: math(process.env.REDIS_MAX_LISTENERS, 40),
|
||||
REDIS_PING_INTERVAL: math(process.env.REDIS_PING_INTERVAL, 0),
|
||||
/** Max delay between reconnection attempts in ms */
|
||||
|
|
@ -86,4 +86,4 @@ const cacheConfig = {
|
|||
BAN_DURATION: math(process.env.BAN_DURATION, 7200000), // 2 hours
|
||||
};
|
||||
|
||||
module.exports = { cacheConfig };
|
||||
export { cacheConfig };
|
||||
116
packages/api/src/cache/cacheFactory.ts
vendored
Normal file
116
packages/api/src/cache/cacheFactory.ts
vendored
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
/**
|
||||
* @keyv/redis exports its default class in a non-standard way:
|
||||
* module.exports = { default: KeyvRedis, ... } instead of module.exports = KeyvRedis
|
||||
* This breaks ES6 imports when the module is marked as external in rollup.
|
||||
* We must use require() to access the .default property directly.
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const KeyvRedis = require('@keyv/redis').default as typeof import('@keyv/redis').default;
|
||||
import { Keyv } from 'keyv';
|
||||
import createMemoryStore from 'memorystore';
|
||||
import { RedisStore } from 'rate-limit-redis';
|
||||
import { Time } from 'librechat-data-provider';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import session, { MemoryStore } from 'express-session';
|
||||
import { RedisStore as ConnectRedis } from 'connect-redis';
|
||||
import type { SendCommandFn } from 'rate-limit-redis';
|
||||
import { keyvRedisClient, ioredisClient, GLOBAL_PREFIX_SEPARATOR } from './redisClients';
|
||||
import { cacheConfig } from './cacheConfig';
|
||||
import { violationFile } from './keyvFiles';
|
||||
|
||||
/**
|
||||
* Creates a cache instance using Redis or a fallback store. Suitable for general caching needs.
|
||||
* @param namespace - The cache namespace.
|
||||
* @param ttl - Time to live for cache entries.
|
||||
* @param fallbackStore - Optional fallback store if Redis is not used.
|
||||
* @returns Cache instance.
|
||||
*/
|
||||
export const standardCache = (namespace: string, ttl?: number, fallbackStore?: object): Keyv => {
|
||||
if (keyvRedisClient && !cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES?.includes(namespace)) {
|
||||
try {
|
||||
const keyvRedis = new KeyvRedis(keyvRedisClient);
|
||||
const cache = new Keyv(keyvRedis, { namespace, ttl });
|
||||
keyvRedis.namespace = cacheConfig.REDIS_KEY_PREFIX;
|
||||
keyvRedis.keyPrefixSeparator = GLOBAL_PREFIX_SEPARATOR;
|
||||
|
||||
cache.on('error', (err) => {
|
||||
logger.error(`Cache error in namespace ${namespace}:`, err);
|
||||
});
|
||||
|
||||
return cache;
|
||||
} catch (err) {
|
||||
logger.error(`Failed to create Redis cache for namespace ${namespace}:`, err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
if (fallbackStore) {
|
||||
return new Keyv({ store: fallbackStore, namespace, ttl });
|
||||
}
|
||||
return new Keyv({ namespace, ttl });
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a cache instance for storing violation data.
|
||||
* Uses a file-based fallback store if Redis is not enabled.
|
||||
* @param namespace - The cache namespace for violations.
|
||||
* @param ttl - Time to live for cache entries.
|
||||
* @returns Cache instance for violations.
|
||||
*/
|
||||
export const violationCache = (namespace: string, ttl?: number): Keyv => {
|
||||
return standardCache(`violations:${namespace}`, ttl, violationFile);
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a session cache instance using Redis or in-memory store.
|
||||
* @param namespace - The session namespace.
|
||||
* @param ttl - Time to live for session entries.
|
||||
* @returns Session store instance.
|
||||
*/
|
||||
export const sessionCache = (namespace: string, ttl?: number): MemoryStore | ConnectRedis => {
|
||||
namespace = namespace.endsWith(':') ? namespace : `${namespace}:`;
|
||||
if (!cacheConfig.USE_REDIS) {
|
||||
const MemoryStore = createMemoryStore(session);
|
||||
return new MemoryStore({ ttl, checkPeriod: Time.ONE_DAY });
|
||||
}
|
||||
const store = new ConnectRedis({ client: ioredisClient, ttl, prefix: namespace });
|
||||
if (ioredisClient) {
|
||||
ioredisClient.on('error', (err) => {
|
||||
logger.error(`Session store Redis error for namespace ${namespace}:`, err);
|
||||
});
|
||||
}
|
||||
return store;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a rate limiter cache using Redis.
|
||||
* @param prefix - The key prefix for rate limiting.
|
||||
* @returns RedisStore instance or undefined if Redis is not used.
|
||||
*/
|
||||
export const limiterCache = (prefix: string): RedisStore | undefined => {
|
||||
if (!prefix) {
|
||||
throw new Error('prefix is required');
|
||||
}
|
||||
if (!cacheConfig.USE_REDIS) {
|
||||
return undefined;
|
||||
}
|
||||
// TODO: The prefix is not actually applied. Also needs to account for global prefix.
|
||||
prefix = prefix.endsWith(':') ? prefix : `${prefix}:`;
|
||||
|
||||
try {
|
||||
const sendCommand: SendCommandFn = (async (...args: string[]) => {
|
||||
if (ioredisClient == null) {
|
||||
throw new Error('Redis client not available');
|
||||
}
|
||||
try {
|
||||
return await ioredisClient.call(args[0], ...args.slice(1));
|
||||
} catch (err) {
|
||||
logger.error('Redis command execution failed:', err);
|
||||
throw err;
|
||||
}
|
||||
}) as SendCommandFn;
|
||||
return new RedisStore({ sendCommand, prefix });
|
||||
} catch (err) {
|
||||
logger.error(`Failed to create Redis rate limiter for prefix ${prefix}:`, err);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
5
packages/api/src/cache/index.ts
vendored
Normal file
5
packages/api/src/cache/index.ts
vendored
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
export * from './cacheConfig';
|
||||
export * from './redisClients';
|
||||
export * from './keyvFiles';
|
||||
export { default as keyvMongo } from './keyvMongo';
|
||||
export * from './cacheFactory';
|
||||
6
packages/api/src/cache/keyvFiles.ts
vendored
Normal file
6
packages/api/src/cache/keyvFiles.ts
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
import { KeyvFile } from 'keyv-file';
|
||||
|
||||
export const logFile = new KeyvFile({ filename: './data/logs.json' }).setMaxListeners(20);
|
||||
export const violationFile = new KeyvFile({ filename: './data/violations.json' }).setMaxListeners(
|
||||
20,
|
||||
);
|
||||
|
|
@ -1,65 +1,68 @@
|
|||
// api/cache/keyvMongo.js
|
||||
const mongoose = require('mongoose');
|
||||
const EventEmitter = require('events');
|
||||
const { GridFSBucket } = require('mongodb');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
import mongoose from 'mongoose';
|
||||
import { EventEmitter } from 'events';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { GridFSBucket, type Db, type ReadPreference, type Collection } from 'mongodb';
|
||||
|
||||
const storeMap = new Map();
|
||||
interface KeyvMongoOptions {
|
||||
url?: string;
|
||||
collection?: string;
|
||||
useGridFS?: boolean;
|
||||
readPreference?: ReadPreference;
|
||||
}
|
||||
|
||||
interface GridFSClient {
|
||||
bucket: GridFSBucket;
|
||||
store: Collection;
|
||||
db: Db;
|
||||
}
|
||||
|
||||
interface CollectionClient {
|
||||
store: Collection;
|
||||
db: Db;
|
||||
}
|
||||
|
||||
type Client = GridFSClient | CollectionClient;
|
||||
|
||||
const storeMap = new Map<string, Client>();
|
||||
|
||||
class KeyvMongoCustom extends EventEmitter {
|
||||
constructor(url, options = {}) {
|
||||
super();
|
||||
private opts: KeyvMongoOptions;
|
||||
public ttlSupport: boolean;
|
||||
public namespace?: string;
|
||||
|
||||
url = url || {};
|
||||
if (typeof url === 'string') {
|
||||
url = { url };
|
||||
}
|
||||
if (url.uri) {
|
||||
url = { url: url.uri, ...url };
|
||||
}
|
||||
constructor(options: KeyvMongoOptions = {}) {
|
||||
super();
|
||||
|
||||
this.opts = {
|
||||
url: 'mongodb://127.0.0.1:27017',
|
||||
collection: 'keyv',
|
||||
...url,
|
||||
...options,
|
||||
};
|
||||
|
||||
this.ttlSupport = false;
|
||||
|
||||
// Filter valid options
|
||||
const keyvMongoKeys = new Set([
|
||||
'url',
|
||||
'collection',
|
||||
'namespace',
|
||||
'serialize',
|
||||
'deserialize',
|
||||
'uri',
|
||||
'useGridFS',
|
||||
'dialect',
|
||||
]);
|
||||
this.opts = Object.fromEntries(Object.entries(this.opts).filter(([k]) => keyvMongoKeys.has(k)));
|
||||
}
|
||||
|
||||
// Helper to access the store WITHOUT storing a promise on the instance
|
||||
_getClient() {
|
||||
private async _getClient(): Promise<Client> {
|
||||
const storeKey = `${this.opts.collection}:${this.opts.useGridFS ? 'gridfs' : 'collection'}`;
|
||||
|
||||
// If we already have the store initialized, return it directly
|
||||
if (storeMap.has(storeKey)) {
|
||||
return Promise.resolve(storeMap.get(storeKey));
|
||||
return storeMap.get(storeKey)!;
|
||||
}
|
||||
|
||||
// Check mongoose connection state
|
||||
if (mongoose.connection.readyState !== 1) {
|
||||
return Promise.reject(
|
||||
new Error('Mongoose connection not ready. Ensure connectDb() is called first.'),
|
||||
);
|
||||
throw new Error('Mongoose connection not ready. Ensure connectDb() is called first.');
|
||||
}
|
||||
|
||||
try {
|
||||
const db = mongoose.connection.db;
|
||||
let client;
|
||||
const db = mongoose.connection.db as unknown as Db | undefined;
|
||||
if (!db) {
|
||||
throw new Error('MongoDB database not available');
|
||||
}
|
||||
|
||||
let client: Client;
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
const bucket = new GridFSBucket(db, {
|
||||
|
|
@ -75,17 +78,17 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
}
|
||||
|
||||
storeMap.set(storeKey, client);
|
||||
return Promise.resolve(client);
|
||||
return client;
|
||||
} catch (error) {
|
||||
this.emit('error', error);
|
||||
return Promise.reject(error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async get(key) {
|
||||
async get(key: string): Promise<unknown> {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
if (this.opts.useGridFS && this.isGridFSClient(client)) {
|
||||
await client.store.updateOne(
|
||||
{
|
||||
filename: key,
|
||||
|
|
@ -100,7 +103,7 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
const stream = client.bucket.openDownloadStreamByName(key);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const resp = [];
|
||||
const resp: Buffer[] = [];
|
||||
stream.on('error', () => {
|
||||
resolve(undefined);
|
||||
});
|
||||
|
|
@ -110,7 +113,7 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
resolve(data);
|
||||
});
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
stream.on('data', (chunk: Buffer) => {
|
||||
resp.push(chunk);
|
||||
});
|
||||
});
|
||||
|
|
@ -125,7 +128,7 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
return document.value;
|
||||
}
|
||||
|
||||
async getMany(keys) {
|
||||
async getMany(keys: string[]): Promise<unknown[]> {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
|
|
@ -135,9 +138,9 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
}
|
||||
|
||||
const values = await Promise.allSettled(promises);
|
||||
const data = [];
|
||||
const data: unknown[] = [];
|
||||
for (const value of values) {
|
||||
data.push(value.value);
|
||||
data.push(value.status === 'fulfilled' ? value.value : undefined);
|
||||
}
|
||||
|
||||
return data;
|
||||
|
|
@ -148,7 +151,7 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
.project({ _id: 0, value: 1, key: 1 })
|
||||
.toArray();
|
||||
|
||||
const results = [...keys];
|
||||
const results: unknown[] = [...keys];
|
||||
let i = 0;
|
||||
for (const key of keys) {
|
||||
const rowIndex = values.findIndex((row) => row.key === key);
|
||||
|
|
@ -159,11 +162,11 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
return results;
|
||||
}
|
||||
|
||||
async set(key, value, ttl) {
|
||||
async set(key: string, value: string, ttl?: number): Promise<unknown> {
|
||||
const client = await this._getClient();
|
||||
const expiresAt = typeof ttl === 'number' ? new Date(Date.now() + ttl) : null;
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
if (this.opts.useGridFS && this.isGridFSClient(client)) {
|
||||
const stream = client.bucket.openUploadStream(key, {
|
||||
metadata: {
|
||||
expiresAt,
|
||||
|
|
@ -186,20 +189,18 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
);
|
||||
}
|
||||
|
||||
async delete(key) {
|
||||
if (typeof key !== 'string') {
|
||||
return false;
|
||||
}
|
||||
|
||||
async delete(key: string): Promise<boolean> {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
if (this.opts.useGridFS && this.isGridFSClient(client)) {
|
||||
try {
|
||||
const bucket = new GridFSBucket(client.db, {
|
||||
bucketName: this.opts.collection,
|
||||
});
|
||||
const files = await bucket.find({ filename: key }).toArray();
|
||||
if (files.length > 0) {
|
||||
await client.bucket.delete(files[0]._id);
|
||||
}
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
|
|
@ -210,10 +211,10 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
return object.deletedCount > 0;
|
||||
}
|
||||
|
||||
async deleteMany(keys) {
|
||||
async deleteMany(keys: string[]): Promise<boolean> {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
if (this.opts.useGridFS && this.isGridFSClient(client)) {
|
||||
const bucket = new GridFSBucket(client.db, {
|
||||
bucketName: this.opts.collection,
|
||||
});
|
||||
|
|
@ -230,15 +231,17 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
return object.deletedCount > 0;
|
||||
}
|
||||
|
||||
async clear() {
|
||||
async clear(): Promise<void> {
|
||||
const client = await this._getClient();
|
||||
|
||||
if (this.opts.useGridFS) {
|
||||
if (this.opts.useGridFS && this.isGridFSClient(client)) {
|
||||
try {
|
||||
await client.bucket.drop();
|
||||
} catch (error) {
|
||||
} catch (error: unknown) {
|
||||
// Throw error if not "namespace not found" error
|
||||
if (!(error.code === 26)) {
|
||||
const errorCode =
|
||||
error instanceof Error && 'code' in error ? (error as { code?: number }).code : undefined;
|
||||
if (errorCode !== 26) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
|
@ -249,7 +252,7 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
});
|
||||
}
|
||||
|
||||
async has(key) {
|
||||
async has(key: string): Promise<boolean> {
|
||||
const client = await this._getClient();
|
||||
const filter = { [this.opts.useGridFS ? 'filename' : 'key']: { $eq: key } };
|
||||
const document = await client.store.countDocuments(filter, { limit: 1 });
|
||||
|
|
@ -257,10 +260,14 @@ class KeyvMongoCustom extends EventEmitter {
|
|||
}
|
||||
|
||||
// No-op disconnect
|
||||
async disconnect() {
|
||||
async disconnect(): Promise<boolean> {
|
||||
// This is a no-op since we don't want to close the shared mongoose connection
|
||||
return true;
|
||||
}
|
||||
|
||||
private isGridFSClient(client: Client): client is GridFSClient {
|
||||
return (client as GridFSClient).bucket != null;
|
||||
}
|
||||
}
|
||||
|
||||
const keyvMongo = new KeyvMongoCustom({
|
||||
|
|
@ -269,4 +276,4 @@ const keyvMongo = new KeyvMongoCustom({
|
|||
|
||||
keyvMongo.on('error', (err) => logger.error('KeyvMongo connection error:', err));
|
||||
|
||||
module.exports = keyvMongo;
|
||||
export default keyvMongo;
|
||||
|
|
@ -1,26 +1,26 @@
|
|||
const IoRedis = require('ioredis');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { createClient, createCluster } = require('@keyv/redis');
|
||||
const { cacheConfig } = require('./cacheConfig');
|
||||
import IoRedis from 'ioredis';
|
||||
import type { Redis, Cluster } from 'ioredis';
|
||||
import { logger } from '@librechat/data-schemas';
|
||||
import { createClient, createCluster } from '@keyv/redis';
|
||||
import type { RedisClientType, RedisClusterType } from '@redis/client';
|
||||
import { cacheConfig } from './cacheConfig';
|
||||
|
||||
const GLOBAL_PREFIX_SEPARATOR = '::';
|
||||
|
||||
const urls = cacheConfig.REDIS_URI?.split(',').map((uri) => new URL(uri));
|
||||
const username = urls?.[0].username || cacheConfig.REDIS_USERNAME;
|
||||
const password = urls?.[0].password || cacheConfig.REDIS_PASSWORD;
|
||||
const urls = cacheConfig.REDIS_URI?.split(',').map((uri) => new URL(uri)) || [];
|
||||
const username = urls?.[0]?.username || cacheConfig.REDIS_USERNAME;
|
||||
const password = urls?.[0]?.password || cacheConfig.REDIS_PASSWORD;
|
||||
const ca = cacheConfig.REDIS_CA;
|
||||
|
||||
/** @type {import('ioredis').Redis | import('ioredis').Cluster | null} */
|
||||
let ioredisClient = null;
|
||||
let ioredisClient: Redis | Cluster | null = null;
|
||||
if (cacheConfig.USE_REDIS) {
|
||||
/** @type {import('ioredis').RedisOptions | import('ioredis').ClusterOptions} */
|
||||
const redisOptions = {
|
||||
const redisOptions: Record<string, unknown> = {
|
||||
username: username,
|
||||
password: password,
|
||||
tls: ca ? { ca } : undefined,
|
||||
keyPrefix: `${cacheConfig.REDIS_KEY_PREFIX}${GLOBAL_PREFIX_SEPARATOR}`,
|
||||
maxListeners: cacheConfig.REDIS_MAX_LISTENERS,
|
||||
retryStrategy: (times) => {
|
||||
retryStrategy: (times: number) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
times > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
|
|
@ -34,7 +34,7 @@ if (cacheConfig.USE_REDIS) {
|
|||
logger.info(`ioredis reconnecting... attempt ${times}, delay ${delay}ms`);
|
||||
return delay;
|
||||
},
|
||||
reconnectOnError: (err) => {
|
||||
reconnectOnError: (err: Error) => {
|
||||
const targetError = 'READONLY';
|
||||
if (err.message.includes(targetError)) {
|
||||
logger.warn('ioredis reconnecting due to READONLY error');
|
||||
|
|
@ -49,15 +49,20 @@ if (cacheConfig.USE_REDIS) {
|
|||
|
||||
ioredisClient =
|
||||
urls.length === 1 && !cacheConfig.USE_REDIS_CLUSTER
|
||||
? new IoRedis(cacheConfig.REDIS_URI, redisOptions)
|
||||
? new IoRedis(cacheConfig.REDIS_URI!, redisOptions)
|
||||
: new IoRedis.Cluster(
|
||||
urls.map((url) => ({ host: url.hostname, port: parseInt(url.port, 10) || 6379 })),
|
||||
{
|
||||
...(cacheConfig.REDIS_USE_ALTERNATIVE_DNS_LOOKUP
|
||||
? { dnsLookup: (address, callback) => callback(null, address) }
|
||||
? {
|
||||
dnsLookup: (
|
||||
address: string,
|
||||
callback: (err: Error | null, address: string) => void,
|
||||
) => callback(null, address),
|
||||
}
|
||||
: {}),
|
||||
redisOptions,
|
||||
clusterRetryStrategy: (times) => {
|
||||
clusterRetryStrategy: (times: number) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
times > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
|
|
@ -87,7 +92,7 @@ if (cacheConfig.USE_REDIS) {
|
|||
logger.info('ioredis client ready');
|
||||
});
|
||||
|
||||
ioredisClient.on('reconnecting', (delay) => {
|
||||
ioredisClient.on('reconnecting', (delay: number) => {
|
||||
logger.info(`ioredis client reconnecting in ${delay}ms`);
|
||||
});
|
||||
|
||||
|
|
@ -96,7 +101,7 @@ if (cacheConfig.USE_REDIS) {
|
|||
});
|
||||
|
||||
/** Ping Interval to keep the Redis server connection alive (if enabled) */
|
||||
let pingInterval = null;
|
||||
let pingInterval: NodeJS.Timeout | null = null;
|
||||
const clearPingInterval = () => {
|
||||
if (pingInterval) {
|
||||
clearInterval(pingInterval);
|
||||
|
|
@ -117,22 +122,20 @@ if (cacheConfig.USE_REDIS) {
|
|||
}
|
||||
}
|
||||
|
||||
/** @type {import('@keyv/redis').RedisClient | import('@keyv/redis').RedisCluster | null} */
|
||||
let keyvRedisClient = null;
|
||||
let keyvRedisClient: RedisClientType | RedisClusterType | null = null;
|
||||
if (cacheConfig.USE_REDIS) {
|
||||
/**
|
||||
* ** WARNING ** Keyv Redis client does not support Prefix like ioredis above.
|
||||
* The prefix feature will be handled by the Keyv-Redis store in cacheFactory.js
|
||||
* @type {import('@keyv/redis').RedisClientOptions | import('@keyv/redis').RedisClusterOptions}
|
||||
*/
|
||||
const redisOptions = {
|
||||
const redisOptions: Record<string, unknown> = {
|
||||
username,
|
||||
password,
|
||||
socket: {
|
||||
tls: ca != null,
|
||||
ca,
|
||||
connectTimeout: cacheConfig.REDIS_CONNECT_TIMEOUT,
|
||||
reconnectStrategy: (retries) => {
|
||||
reconnectStrategy: (retries: number) => {
|
||||
if (
|
||||
cacheConfig.REDIS_RETRY_MAX_ATTEMPTS > 0 &&
|
||||
retries > cacheConfig.REDIS_RETRY_MAX_ATTEMPTS
|
||||
|
|
@ -148,6 +151,9 @@ if (cacheConfig.USE_REDIS) {
|
|||
},
|
||||
},
|
||||
disableOfflineQueue: !cacheConfig.REDIS_ENABLE_OFFLINE_QUEUE,
|
||||
...(cacheConfig.REDIS_PING_INTERVAL > 0
|
||||
? { pingInterval: cacheConfig.REDIS_PING_INTERVAL * 1000 }
|
||||
: {}),
|
||||
};
|
||||
|
||||
keyvRedisClient =
|
||||
|
|
@ -184,27 +190,6 @@ if (cacheConfig.USE_REDIS) {
|
|||
logger.error('@keyv/redis initial connection failed:', err);
|
||||
throw err;
|
||||
});
|
||||
|
||||
/** Ping Interval to keep the Redis server connection alive (if enabled) */
|
||||
let pingInterval = null;
|
||||
const clearPingInterval = () => {
|
||||
if (pingInterval) {
|
||||
clearInterval(pingInterval);
|
||||
pingInterval = null;
|
||||
}
|
||||
};
|
||||
|
||||
if (cacheConfig.REDIS_PING_INTERVAL > 0) {
|
||||
pingInterval = setInterval(() => {
|
||||
if (keyvRedisClient && keyvRedisClient.isReady) {
|
||||
keyvRedisClient.ping().catch((err) => {
|
||||
logger.error('@keyv/redis ping failed:', err);
|
||||
});
|
||||
}
|
||||
}, cacheConfig.REDIS_PING_INTERVAL * 1000);
|
||||
keyvRedisClient.on('disconnect', clearPingInterval);
|
||||
keyvRedisClient.on('end', clearPingInterval);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ioredisClient, keyvRedisClient, GLOBAL_PREFIX_SEPARATOR };
|
||||
export { ioredisClient, keyvRedisClient, GLOBAL_PREFIX_SEPARATOR };
|
||||
|
|
@ -35,6 +35,8 @@ export * from './files';
|
|||
export * from './tools';
|
||||
/* web search */
|
||||
export * from './web';
|
||||
/* Cache */
|
||||
export * from './cache';
|
||||
/* types */
|
||||
export type * from './mcp/types';
|
||||
export type * from './flow/types';
|
||||
|
|
|
|||
|
|
@ -12,8 +12,8 @@
|
|||
*
|
||||
* @throws Throws an error if the input is not a string or number, contains invalid characters, or does not evaluate to a number.
|
||||
*/
|
||||
export function math(str: string | number, fallbackValue?: number): number {
|
||||
const fallback = typeof fallbackValue !== 'undefined' && typeof fallbackValue === 'number';
|
||||
export function math(str: string | number | undefined, fallbackValue?: number): number {
|
||||
const fallback = fallbackValue != null;
|
||||
if (typeof str !== 'string' && typeof str === 'number') {
|
||||
return str;
|
||||
} else if (typeof str !== 'string') {
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ redis-server redis-7002.conf --daemonize yes
|
|||
redis-server redis-7003.conf --daemonize yes
|
||||
|
||||
# Wait for nodes to start
|
||||
sleep 3
|
||||
sleep 5
|
||||
|
||||
# Check if all nodes are running
|
||||
NODES_RUNNING=0
|
||||
|
|
@ -66,10 +66,14 @@ fi
|
|||
|
||||
# Initialize the cluster
|
||||
echo "🔧 Initializing cluster..."
|
||||
echo "yes" | redis-cli --cluster create 127.0.0.1:7001 127.0.0.1:7002 127.0.0.1:7003 --cluster-replicas 0 > /dev/null
|
||||
echo "yes" | redis-cli --cluster create 127.0.0.1:7001 127.0.0.1:7002 127.0.0.1:7003 --cluster-replicas 0 2>&1 | tee /tmp/cluster-init.log || {
|
||||
echo "❌ Cluster creation command failed. Output:"
|
||||
cat /tmp/cluster-init.log
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Wait for cluster to stabilize
|
||||
sleep 3
|
||||
sleep 5
|
||||
|
||||
# Verify cluster status
|
||||
if redis-cli -p 7001 cluster info | grep -q "cluster_state:ok"; then
|
||||
|
|
@ -80,5 +84,10 @@ if redis-cli -p 7001 cluster info | grep -q "cluster_state:ok"; then
|
|||
echo " Stop: ./stop-cluster.sh"
|
||||
else
|
||||
echo "❌ Cluster initialization failed!"
|
||||
echo "📊 Cluster info from node 7001:"
|
||||
redis-cli -p 7001 cluster info
|
||||
echo ""
|
||||
echo "📊 Cluster nodes from node 7001:"
|
||||
redis-cli -p 7001 cluster nodes
|
||||
exit 1
|
||||
fi
|
||||
Loading…
Add table
Add a link
Reference in a new issue