First commit

This commit is contained in:
2025-12-25 11:16:59 +01:00
commit 0c5ca09a63
720 changed files with 329234 additions and 0 deletions

View File

@@ -0,0 +1,188 @@
/* eslint-disable no-unused-expressions */
const { describe, it } = require('mocha');
const { expect } = require('chai');
const RateLimiterMemory = require('../lib/RateLimiterMemory');
const BurstyRateLimiter = require('../lib/BurstyRateLimiter');
const RateLimiterRedis = require('../lib/RateLimiterRedis');
const redisMock = require('redis-mock');
const { redisEvalMock, getRedisClientClosed } = require('./helper');
describe('BurstyRateLimiter', () => {
it('consume 1 point from limiter', (done) => {
const testKey = 'consume1';
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
const blMemory = new RateLimiterMemory({ points: 1, duration: 10 });
const bursty = new BurstyRateLimiter(rlMemory, blMemory);
bursty.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
expect(res.remainingPoints).to.equal(0);
expect(res.msBeforeNext <= 1000).to.equal(true);
expect(res.isFirstInDuration).to.equal(true);
done();
})
.catch((err) => {
done(err);
});
});
it('consume 1 point from bursty limiter, if all consumed on limiter', (done) => {
const testKey = 'consume1frombursty';
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
const blMemory = new RateLimiterMemory({ points: 1, duration: 10 });
const bursty = new BurstyRateLimiter(rlMemory, blMemory);
bursty.consume(testKey)
.then(() => {
bursty.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.remainingPoints).to.equal(0);
expect(res.msBeforeNext <= 1000).to.equal(true);
expect(res.isFirstInDuration).to.equal(false);
done();
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('consume 1 point from limiter and 1 from bursty, and then 1 point reject with data from limiter', (done) => {
const testKey = 'consume1frombursty';
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
const blMemory = new RateLimiterMemory({ points: 1, duration: 10 });
const bursty = new BurstyRateLimiter(rlMemory, blMemory);
bursty.consume(testKey)
.then(() => {
bursty.consume(testKey)
.then(() => {
bursty.consume(testKey)
.then(() => {
done(new Error('must not'));
})
.catch((rej) => {
expect(rej.consumedPoints).to.equal(3);
expect(rej.remainingPoints).to.equal(0);
expect(rej.msBeforeNext <= 1000).to.equal(true);
expect(rej.isFirstInDuration).to.equal(false);
done();
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('do not consume from burst limiter, if rate limiter consume rejected with error', (done) => {
const testKey = 'consume-rejected-with-error';
const redisMockClient = redisMock.createClient();
redisMockClient.eval = redisEvalMock(redisMockClient);
const redisClientClosed = getRedisClientClosed(redisMockClient);
const rlRedisClosed = new RateLimiterRedis({
storeClient: redisClientClosed,
});
const blRedis = new RateLimiterRedis({
storeClient: redisMockClient,
keyPrefix: 'bursty',
points: 1,
duration: 1,
});
const bursty = new BurstyRateLimiter(rlRedisClosed, blRedis);
bursty.consume(testKey)
.then(() => {
done(new Error('must not'));
})
.catch((err) => {
expect(err instanceof Error).to.equal(true);
blRedis.get(testKey)
.then((res) => {
expect(res).to.equal(null);
done();
});
});
});
it('reject with burst limiter error if it happens', (done) => {
const testKey = 'consume-rejected-with-error';
const redisMockClient = redisMock.createClient();
redisMockClient.eval = redisEvalMock(redisMockClient);
const redisClientClosed = getRedisClientClosed(redisMockClient);
const rlRedis = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 1,
});
const blRedisClosed = new RateLimiterRedis({
storeClient: redisClientClosed,
keyPrefix: 'bursty',
});
const bursty = new BurstyRateLimiter(rlRedis, blRedisClosed);
bursty.consume(testKey)
.then(() => {
bursty.consume(testKey)
.then(() => {
done(new Error('must not'));
})
.catch((err) => {
expect(err instanceof Error).to.equal(true);
rlRedis.get(testKey)
.then((rlRes) => {
expect(rlRes.consumedPoints).to.equal(2);
expect(rlRes.remainingPoints).to.equal(0);
expect(rlRes.msBeforeNext <= 1000).to.equal(true);
done();
});
});
})
.catch((err) => {
done(err);
});
});
it('consume and get return the combined RateLimiterRes of both limiters with correct msBeforeNext', (done) => {
const rlMemory = new RateLimiterMemory({ points: 1, duration: 10 });
const rlBurstMemory = new RateLimiterMemory({ points: 20, duration: 1 });
const bl = new BurstyRateLimiter(rlMemory, rlBurstMemory);
bl.consume('keyGet', 1)
.then((firstConsumeRes) => {
expect(firstConsumeRes.isFirstInDuration).to.equal(true);
bl.consume('keyGet', 1)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.remainingPoints).to.equal(0);
expect(res.msBeforeNext <= 1000).to.equal(true);
expect(res.isFirstInDuration).to.equal(false);
bl.get('keyGet')
.then((rlRes) => {
expect(rlRes.consumedPoints).to.equal(2);
expect(rlRes.remainingPoints).to.equal(0);
expect(rlRes.msBeforeNext <= 1000).to.equal(true);
done();
})
.catch(err => done(err));
})
.catch((err) => {
done(err);
});
});
});
it('returns points from limiter', (done) => {
const rlMemory = new RateLimiterMemory({ points: 1, duration: 10 });
const rlBurstMemory = new RateLimiterMemory({ points: 20, duration: 1 });
const brl = new BurstyRateLimiter(rlMemory, rlBurstMemory);
expect(brl.points).to.equal(1);
done();
});
});

View File

@@ -0,0 +1,436 @@
/* eslint-disable no-unused-expressions */
/* eslint-disable prefer-promise-reject-errors */
const { describe, it, beforeEach } = require('mocha');
const { expect } = require('chai');
const sinon = require('sinon');
const redisMock = require('redis-mock');
const Memcached = require('memcached-mock');
const ExpressBruteFlexible = require('../lib/ExpressBruteFlexible');
const limiters = require('../index');
const { redisEvalMock } = require('./helper');
const makeRequest = (middleware, req, res, next) => new Promise((resolve) => {
middleware(req, res, (err) => {
if (err) {
resolve(err);
} else {
next();
resolve();
}
});
});
describe('ExpressBruteFlexible', function ExpressBruteFlexibleTest() {
this.timeout(10000);
const resObj = {
header: () => {
},
status: () => {
},
send: () => {
},
};
const memcacheMockClient = new Memcached('localhost:11211');
const redisMockClient = redisMock.createClient();
redisMockClient.eval = redisEvalMock(redisMockClient);
const mongoCollection = {
createIndex: () => {
},
findOneAndUpdate: () => {
},
findOne: () => {
},
deleteOne: () => {
},
};
const mongoClientMock = {
db: () => {
},
};
const mongoDb = {
collection: () => {
},
};
sinon.stub(mongoDb, 'collection').callsFake(() => mongoCollection);
sinon.stub(mongoClientMock, 'db').callsFake(() => mongoDb);
const mysqlClientMock = {
query: () => {
},
};
const pgClientMock = {
query: () => Promise.resolve(),
};
const pgClientErrored = {
query: () => Promise.reject({ code: 0 }),
};
beforeEach((done) => {
memcacheMockClient.flush(() => {
redisMockClient.flushall(done);
});
});
it('allows 1 request with 1 free try', (done) => {
const brute = new ExpressBruteFlexible('memory', {
freeRetries: 1,
});
brute.prevent({ ip: '127.0.0.1' }, resObj, () => {
done();
});
});
it('allows 2 requests with 2 free try', (done) => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MEMCACHE, {
storeClient: memcacheMockClient,
freeRetries: 2,
handleStoreError(err) {
done(err);
},
});
const next = sinon.spy();
Promise.all([
makeRequest(brute.prevent, { ip: '127.0.0.1' }, resObj, next),
makeRequest(brute.prevent, { ip: '127.0.0.1' }, resObj, next),
]).then(() => {
expect(next.calledTwice).to.equal(true);
done();
});
});
it('works 0 free try', (done) => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MEMCACHE, {
storeClient: memcacheMockClient,
freeRetries: 0,
handleStoreError(err) {
done(err);
},
failCallback(req, res, next) {
next({ message: 'blocked' });
},
});
const next = sinon.spy();
Promise.all([
makeRequest(brute.prevent, { ip: '127.0.0.1' }, resObj, next),
makeRequest(brute.prevent, { ip: '127.0.0.1' }, resObj, next),
]).then(() => {
expect(next.calledOnce).to.equal(true);
done();
});
});
it('blocks the second request when no free tries and calls failCallback', () => {
const brute = new ExpressBruteFlexible('memory', {
freeRetries: 0,
minWait: 1000,
failCallback(req, res, next, nextValidRequestDate) {
res.status(403);
res.send({
error: {
nextValidRequestDate,
},
});
next();
},
});
const next = sinon.spy();
const mockRes = Object.assign({}, resObj);
const resStatusSpy = sinon.spy(mockRes, 'status');
const resSendSpy = sinon.spy(mockRes, 'send');
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next)
.then(() => {
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next)
.then(() => {
expect(resStatusSpy.calledWith(403)).to.equal(true);
const spySendCall = resSendSpy.getCall(0);
const blockDuration = spySendCall.args[0].error.nextValidRequestDate.getTime() - Date.now();
expect(blockDuration > 0 && blockDuration <= 1000).to.equal(true);
});
});
});
it('maxWait limits maximum block duration on high traffic', (done) => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
storeClient: redisMockClient,
freeRetries: 0,
minWait: 2000,
maxWait: 3000,
failCallback(req, res, next, nextValidRequestDate) {
res.send({
error: {
nextValidRequestDate,
},
});
next();
},
});
let maximumBlockDuration = 0;
const mockRes = Object.assign({}, resObj);
mockRes.send = (obj) => {
const blockDuration = obj.error.nextValidRequestDate.getTime() - Date.now();
if (blockDuration > maximumBlockDuration) {
maximumBlockDuration = blockDuration;
}
};
const next = sinon.spy();
const resSendSpy = sinon.spy(mockRes, 'send');
Promise.all([
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
]).then(() => {
setTimeout(() => {
Promise.all([
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
]).then(() => {
setTimeout(() => {
Promise.all([
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
]).then(() => {
setTimeout(() => {
expect(maximumBlockDuration <= 3000).to.be.true;
expect(resSendSpy.callCount).to.equal(3);
done();
}, 4100);
});
}, 3100);
});
}, 2100);
});
});
it('block time grows fibonacci-like way', (done) => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
storeClient: redisMockClient,
freeRetries: 0,
minWait: 2000,
maxWait: 10000,
lifetime: 10000,
failCallback(req, res, next, nextValidRequestDate) {
res.send({
error: {
nextValidRequestDate,
},
});
next();
},
});
let sequenceLength = 0;
const mockRes = Object.assign({}, resObj);
mockRes.send = (obj) => {
const blockDuration = obj.error.nextValidRequestDate.getTime() - Date.now();
if (blockDuration > 1000 && blockDuration <= 2000 && sequenceLength === 0) {
sequenceLength++;
}
if (blockDuration > 1000 && blockDuration <= 2000 && sequenceLength === 1) {
sequenceLength++;
}
if (blockDuration > 2000 && blockDuration <= 4000 && sequenceLength === 2) {
sequenceLength++;
}
};
const next = sinon.spy();
Promise.all([
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
]).then(() => {
setTimeout(() => {
Promise.all([
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
]).then(() => {
setTimeout(() => {
Promise.all([
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
]).then(() => {
setTimeout(() => {
expect(sequenceLength).to.equal(3);
done();
}, 4100);
});
}, 2100);
});
}, 2100);
});
});
it('attaches reset to request by default and reset works', (done) => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
storeClient: redisMockClient,
freeRetries: 1,
minWait: 1000,
maxWait: 5000,
});
const req = { ip: '127.0.0.1' };
brute.prevent(req, resObj, () => {
expect(typeof req.brute.reset).to.equal('function');
req.brute.reset(() => {
brute.prevent(req, resObj, () => {
done();
});
});
});
});
it('does not attach request if option is false', (done) => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
storeClient: redisMockClient,
freeRetries: 1,
minWait: 1000,
maxWait: 5000,
attachResetToRequest: false,
});
const req = { ip: '127.0.0.1' };
brute.prevent(req, resObj, () => {
expect(typeof req.brute === 'undefined' || typeof req.brute.reset === 'undefined').to.be.true;
done();
});
});
it('getMiddleware returns middleware function and works', (done) => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
storeClient: redisMockClient,
freeRetries: 1,
minWait: 1000,
maxWait: 5000,
attachResetToRequest: false,
});
const middleware = brute.getMiddleware();
const req = { ip: '127.0.0.1' };
middleware(req, resObj, done);
});
it('ignores IP from key if getMiddleware is with option ignoreIP=false', (done) => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
storeClient: redisMockClient,
freeRetries: 1,
minWait: 1000,
maxWait: 5000,
attachResetToRequest: false,
handleStoreError(err) {
done(err);
},
});
const getKeySpy = sinon.spy(ExpressBruteFlexible, '_getKey');
const middleware = brute.getMiddleware({
ignoreIP: true,
});
const req = { ip: '127.0.0.1' };
middleware(req, resObj, () => {
const getKeySpyCall = getKeySpy.getCall(0);
expect(getKeySpyCall.lastArg[0]).to.not.equal(req.ip);
getKeySpy.restore();
done();
});
});
it('memory limiters created internally by storeType', () => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MEMORY);
expect(brute.counterLimiter instanceof limiters.RateLimiterMemory).to.be.true;
expect(brute.blockLimiter instanceof limiters.RateLimiterMemory).to.be.true;
});
it('memcache limiters created internally by storeType', () => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MEMCACHE, {
storeClient: memcacheMockClient,
});
expect(brute.counterLimiter instanceof limiters.RateLimiterMemcache).to.be.true;
expect(brute.blockLimiter instanceof limiters.RateLimiterMemcache).to.be.true;
});
it('mongo limiters created internally by storeType', () => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MONGO, {
storeClient: mongoClientMock,
});
expect(brute.counterLimiter instanceof limiters.RateLimiterMongo).to.be.true;
expect(brute.blockLimiter instanceof limiters.RateLimiterMongo).to.be.true;
});
it('redis limiters created internally by storeType', () => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
storeClient: redisMockClient,
});
expect(brute.counterLimiter instanceof limiters.RateLimiterRedis).to.be.true;
expect(brute.blockLimiter instanceof limiters.RateLimiterRedis).to.be.true;
});
it('mysql limiters created internally by storeType', () => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MYSQL, {
storeClient: mysqlClientMock,
storeType: 'client',
});
expect(brute.counterLimiter instanceof limiters.RateLimiterMySQL).to.be.true;
expect(brute.blockLimiter instanceof limiters.RateLimiterMySQL).to.be.true;
});
it('postgres limiters created internally by storeType', () => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.POSTGRES, {
storeClient: pgClientMock,
storeType: 'client',
});
expect(brute.counterLimiter instanceof limiters.RateLimiterPostgres).to.be.true;
expect(brute.blockLimiter instanceof limiters.RateLimiterPostgres).to.be.true;
});
it('global reset works', (done) => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MEMORY, {
freeRetries: 1,
});
const ip = '127.0.0.1';
brute.prevent({ ip }, resObj, () => {
brute.reset(ip, undefined, () => {
const key = ExpressBruteFlexible._getKey([ip, brute.name]);
brute.freeLimiter.get(key)
.then((res) => {
expect(res).to.equal(null);
done();
});
});
});
});
it('global reset launches handleStoreError function', (done) => {
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.POSTGRES, {
storeClient: pgClientMock,
storeType: 'client',
freeRetries: 1,
handleStoreError() {
done();
},
});
const ip = '127.0.0.1';
brute.freeLimiter.client = pgClientErrored;
brute.reset(ip);
});
});

View File

@@ -0,0 +1,512 @@
const { describe, it } = require('mocha');
const { expect } = require('chai');
const RLWrapperBlackAndWhite = require('../lib/RLWrapperBlackAndWhite');
const RateLimiterMemory = require('../lib/RateLimiterMemory');
describe('RLWrapperBlackAndWhite ', () => {
it('consume if not blacked', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
blackList: ['blacked'],
});
limiterWrapped
.consume('test')
.then((res) => {
expect(res.remainingPoints === 0 && res.consumedPoints === 1).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('rejected on consume if blacked', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
blackList: ['blacked'],
});
limiterWrapped
.consume('blacked')
.then(() => {
done(Error('must not consume'));
})
.catch((rej) => {
expect(rej.remainingPoints === 0 && rej.consumedPoints === 0).to.equal(true);
done();
});
});
it('block if not blacked', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
blackList: ['blacked'],
});
limiterWrapped
.block('test', 30)
.then((res) => {
expect(res.msBeforeNext > 1000 && res.msBeforeNext <= 30000).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('block resolved if blacked', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
blackList: ['blacked'],
});
limiterWrapped
.block('blacked', 30)
.then((res) => {
expect(res.msBeforeNext > 30000).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('penalty if not blacked', (done) => {
const limiter = new RateLimiterMemory({
points: 2,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
blackList: ['blacked'],
});
limiterWrapped
.penalty('test', 1)
.then((res) => {
expect(res.consumedPoints === 1).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('penalty resolved if blacked', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
blackList: ['blacked'],
});
limiterWrapped
.penalty('blacked', 1)
.then((res) => {
expect(res.consumedPoints === 0 && res.remainingPoints === 0).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('reward if not blacked', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
blackList: ['blacked'],
});
limiterWrapped.consume('test').then(() => {
limiterWrapped
.reward('test', 1)
.then((res) => {
expect(res.consumedPoints === 0).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
});
it('reward resolved if blacked', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
blackList: ['blacked'],
});
limiterWrapped
.reward('blacked', 1)
.then((res) => {
expect(res.consumedPoints === 0 && res.remainingPoints === 0).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('get if not blacked', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
blackList: ['blacked'],
});
limiterWrapped.consume('test').then(() => {
limiterWrapped
.get('test')
.then((res) => {
expect(res.consumedPoints === 1).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
});
it('get resolved if blacked', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
blackList: ['blacked'],
});
limiterWrapped
.get('blacked')
.then((res) => {
expect(res.consumedPoints === 0 && res.remainingPoints === 0).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('resolve consume if whited', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
whiteList: ['white'],
});
limiterWrapped
.consume('white', 3)
.then((res) => {
expect(res.consumedPoints === 0).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('resolve block if whited', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
whiteList: ['white'],
});
limiterWrapped
.block('white', 3)
.then((res) => {
expect(res.msBeforeNext === 0).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('resolve penalty if whited', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
whiteList: ['white'],
});
limiterWrapped
.penalty('white', 3)
.then((res) => {
expect(res.msBeforeNext === 0).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('resolve reward if whited', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
whiteList: ['white'],
});
limiterWrapped
.reward('white', 3)
.then((res) => {
expect(res.msBeforeNext === 0).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('resolve get if whited', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
whiteList: ['white'],
});
limiterWrapped
.get('white')
.then((res) => {
expect(res.remainingPoints === Number.MAX_SAFE_INTEGER).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('consume resolved if in white and in black', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
whiteList: ['test'],
blackList: ['test'],
});
limiterWrapped
.consume('test')
.then((res) => {
expect(res.remainingPoints === Number.MAX_SAFE_INTEGER).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('consume resolved if isWhiteListed func returns true', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
isWhiteListed: key => key === 'test',
});
limiterWrapped
.consume('test')
.then((res) => {
expect(res.remainingPoints === Number.MAX_SAFE_INTEGER).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('consume rejected if isBlackListed func returns true', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
isBlackListed: key => key === 'test',
});
limiterWrapped
.consume('test')
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext === Number.MAX_SAFE_INTEGER).to.equal(true);
done();
});
});
it('consume even if black listed when runAction set to true', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
isBlackListed: key => key === 'test',
runActionAnyway: true,
});
limiterWrapped
.consume('test')
.then(() => {
done(Error('must not resolve'));
})
.catch(() => {
limiterWrapped.get('test').then((res) => {
expect(res.consumedPoints === 1).to.equal(true);
done();
});
});
});
it('block even if black listed when runAction set to true', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
isBlackListed: key => key === 'test',
runActionAnyway: true,
});
limiterWrapped
.block('test', 30)
.then(() => {
limiterWrapped.get('test').then((res) => {
expect(res.msBeforeNext > 1000).to.equal(true);
done();
});
})
.catch(() => {
done(Error('must not reject'));
});
});
it('penalty even if blacked when runAction set to true', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
isBlackListed: key => key === 'test',
runActionAnyway: true,
});
limiterWrapped
.penalty('test', 1)
.then(() => {
limiterWrapped.get('test').then((res) => {
expect(res.consumedPoints === 1).to.equal(true);
done();
});
})
.catch(() => {
done(Error('must not reject'));
});
});
it('reward even if blacked when runAction set to true', (done) => {
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
isBlackListed: key => key === 'test',
runActionAnyway: true,
});
limiterWrapped
.reward('test', 1)
.then(() => {
limiterWrapped.get('test').then((res) => {
expect(res.consumedPoints === -1).to.equal(true);
done();
});
})
.catch(() => {
done(Error('must not reject'));
});
});
it('delete data straight on limiter even if key is black or white listed', (done) => {
const testKey = 'test';
const limiter = new RateLimiterMemory({
points: 1,
duration: 1,
});
const limiterWrapped = new RLWrapperBlackAndWhite({
limiter,
isBlackListed: key => key === testKey,
isWhiteListed: key => key === testKey,
});
limiter.consume(testKey)
.then(() => {
limiterWrapped.delete(testKey)
.then((res) => {
expect(res).to.equal(true);
done();
});
});
});
});

View File

@@ -0,0 +1,13 @@
const { describe, it } = require('mocha');
const { expect } = require('chai');
const RateLimiterAbstract = require('../lib/RateLimiterAbstract');
describe('RateLimiterAbstract', function () {
this.timeout(5000);
it('do not prefix key, if keyPrefix is empty string', () => {
const testKey = 'test1';
const rateLimiter = new RateLimiterAbstract({ keyPrefix: '' });
expect(rateLimiter.getKey(testKey)).to.equal(testKey);
});
});

View File

@@ -0,0 +1,221 @@
/* eslint-env mocha */
/* eslint-disable no-unused-expressions */
/* eslint-disable security/detect-object-injection */
const cluster = require('cluster');
const sinon = require('sinon');
const { describe, it, after } = require('mocha');
const { expect } = require('chai');
const { RateLimiterClusterMaster, RateLimiterCluster } = require('../lib/RateLimiterCluster');
const masterEvents = [];
const workerEvents = [];
const worker = {
send: (data) => {
workerEvents.forEach((cb) => {
cb(data);
});
},
};
global.process.on = (eventName, cb) => {
if (eventName === 'message') {
workerEvents.push(cb);
}
};
global.process.send = (data) => {
masterEvents.forEach((cb) => {
cb(worker, data);
});
};
describe('RateLimiterCluster', function RateLimiterClusterTest() {
let rateLimiterClusterMaster;
let clusterStubOn;
this.timeout(5000);
before(() => {
clusterStubOn = sinon.stub(cluster, 'on').callsFake((eventName, cb) => {
masterEvents.push(cb);
});
rateLimiterClusterMaster = new RateLimiterClusterMaster();
});
after(() => {
clusterStubOn.restore();
});
it('master must be singleton', () => {
const rateLimiterClusterMaster2 = new RateLimiterClusterMaster();
expect(rateLimiterClusterMaster2 === rateLimiterClusterMaster).to.equal(true);
});
it('consume 1 point', (done) => {
const key = 'consume1';
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix: key });
rateLimiterCluster.consume(key)
.then((res) => {
expect(res.remainingPoints).to.equal(1);
done();
})
.catch((rej) => {
done(rej);
});
});
it('reject on consuming more than maximum points', (done) => {
const key = 'reject';
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix: key });
rateLimiterCluster.consume(key, 3)
.then(() => {
})
.catch((rejRes) => {
expect(rejRes.remainingPoints).to.equal(0);
done();
});
});
//
it('execute evenly over duration', (done) => {
const key = 'evenly';
const rateLimiterCluster = new RateLimiterCluster({
points: 2, duration: 5, execEvenly: true, keyPrefix: key,
});
rateLimiterCluster.consume(key)
.then(() => {
const timeFirstConsume = Date.now();
rateLimiterCluster.consume(key)
.then(() => {
/* Second consume should be delayed more than 2 seconds
Explanation:
1) consume at 0ms, remaining duration = 4444ms
2) delayed consume for (4444 / (0 + 2)) ~= 2222ms, where 2 is a fixed value
, because it mustn't delay in the beginning and in the end of duration
3) consume after 2222ms by timeout
*/
expect((Date.now() - timeFirstConsume) > 2000).to.equal(true);
done();
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('use keyPrefix from options', (done) => {
const key = 'use keyPrefix from options';
const keyPrefix = 'test';
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix });
rateLimiterCluster.consume(key)
.then(() => {
expect(typeof rateLimiterClusterMaster._rateLimiters[keyPrefix]._memoryStorage._storage[`${keyPrefix}:${key}`]
!== 'undefined').to.equal(true);
done();
})
.catch((rejRes) => {
done(rejRes);
});
});
it('create 2 rate limiters depending on keyPrefix', (done) => {
const keyPrefixes = ['create1', 'create2'];
const rateLimiterClusterprocess1 = new RateLimiterCluster({ keyPrefix: keyPrefixes[0] });
const rateLimiterClusterprocess2 = new RateLimiterCluster({ keyPrefix: keyPrefixes[1] });
rateLimiterClusterprocess1.consume('key1')
.then(() => {
rateLimiterClusterprocess2.consume('key2')
.then(() => {
const createdKeyLimiters = Object.keys(rateLimiterClusterMaster._rateLimiters);
expect(createdKeyLimiters.indexOf(keyPrefixes[0]) !== -1 && createdKeyLimiters.indexOf(keyPrefixes[0]) !== -1).to.equal(true);
done();
});
});
});
it('penalty', (done) => {
const key = 'penalty';
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix: key });
rateLimiterCluster.penalty(key)
.then((res) => {
expect(res.remainingPoints).to.equal(1);
done();
});
});
it('reward', (done) => {
const key = 'reward';
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix: key });
rateLimiterCluster.consume(key)
.then(() => {
rateLimiterCluster.reward(key)
.then((res) => {
expect(res.remainingPoints).to.equal(2);
done();
});
});
});
it('block', (done) => {
const key = 'block';
const rateLimiterCluster = new RateLimiterCluster({ points: 1, duration: 1, keyPrefix: key });
rateLimiterCluster.block(key, 2)
.then((res) => {
expect(res.msBeforeNext > 1000 && res.msBeforeNext <= 2000).to.equal(true);
done();
});
});
it('get', (done) => {
const key = 'get';
const rateLimiterCluster = new RateLimiterCluster({ points: 1, duration: 1, keyPrefix: key });
rateLimiterCluster.consume(key)
.then(() => {
rateLimiterCluster.get(key)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
});
});
});
it('get null', (done) => {
const key = 'getnull';
const rateLimiterCluster = new RateLimiterCluster({ points: 1, duration: 1, keyPrefix: key });
rateLimiterCluster.get(key)
.then((res) => {
expect(res).to.equal(null);
done();
});
});
it('delete', (done) => {
const key = 'deletetrue';
const rateLimiterCluster = new RateLimiterCluster({ points: 1, duration: 10, keyPrefix: key });
rateLimiterCluster.consume(key)
.then(() => {
rateLimiterCluster.delete(key)
.then((res) => {
expect(res).to.equal(true);
done();
});
});
});
it('consume applies options.customDuration to set expire', (done) => {
const key = 'consume.customDuration';
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix: key });
rateLimiterCluster.consume(key, 1, { customDuration: 1 })
.then((res) => {
expect(res.msBeforeNext <= 1000).to.be.true;
done();
})
.catch((rej) => {
done(rej);
});
});
});

View File

@@ -0,0 +1,532 @@
const { describe, it, beforeEach } = require('mocha');
const { expect } = require('chai');
const RateLimiterMemcache = require('../lib/RateLimiterMemcache');
const Memcached = require('memcached-mock');
describe('RateLimiterMemcache', function RateLimiterMemcacheTest() {
this.timeout(5000);
const memcacheMockClient = new Memcached('localhost:11211');
const memcacheUnavailableClient = new Proxy({}, {
get: () => (...args) => {
const cb = args.pop();
cb(Error('Server Unavailable'));
},
});
beforeEach((done) => {
memcacheMockClient.flush(done);
});
it('consume 1 point', (done) => {
const testKey = 'consume1';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 2,
duration: 5,
});
rateLimiter
.consume(testKey)
.then(() => {
memcacheMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
if (!err) {
expect(consumedPoints).to.equal(1);
done();
}
});
})
.catch((err) => {
done(err);
});
});
it('rejected when consume more than maximum points', (done) => {
const testKey = 'consume2';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 1,
duration: 5,
});
rateLimiter
.consume(testKey, 2)
.then(() => {
})
.catch((rejRes) => {
expect(rejRes.msBeforeNext >= 0).to.equal(true);
done();
});
});
it('execute evenly over duration', (done) => {
const testKey = 'consumeEvenly';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 2,
duration: 5,
execEvenly: true,
});
rateLimiter
.consume(testKey)
.then(() => {
const timeFirstConsume = Date.now();
rateLimiter
.consume(testKey)
.then(() => {
/* Second consume should be delayed more than 2 seconds
Explanation:
1) consume at 0ms, remaining duration = 5000ms
2) delayed consume for (4999 / (0 + 2)) ~= 2500ms, where 2 is a fixed value
, because it mustn't delay in the beginning and in the end of duration
3) consume after 2500ms by timeout
*/
const diff = Date.now() - timeFirstConsume;
expect(diff > 2400 && diff < 2600).to.equal(true);
done();
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('execute evenly over duration with minimum delay 20 ms', (done) => {
const testKey = 'consumeEvenlyMinDelay';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 100,
duration: 1,
execEvenly: true,
execEvenlyMinDelayMs: 20,
});
rateLimiter
.consume(testKey)
.then(() => {
const timeFirstConsume = Date.now();
rateLimiter
.consume(testKey)
.then(() => {
expect(Date.now() - timeFirstConsume >= 20).to.equal(true);
done();
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('makes penalty', (done) => {
const testKey = 'penalty1';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 3,
duration: 5,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter
.penalty(testKey)
.then(() => {
memcacheMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
if (!err) {
expect(consumedPoints).to.equal(2);
done();
}
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('reward points', (done) => {
const testKey = 'reward';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 1,
duration: 5,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter
.reward(testKey)
.then(() => {
memcacheMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
if (!err) {
expect(consumedPoints).to.equal(0);
done();
}
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('block key in memory when inMemory block options set up', (done) => {
const testKey = 'blockmem';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 1,
duration: 5,
inmemoryBlockOnConsumed: 2, // @deprecated Kept to test backward compatability
inmemoryBlockDuration: 10, // @deprecated Kept to test backward compatability
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter
.consume(testKey)
.then(() => {
})
.catch((rejRes) => {
// msBeforeNext more than 5000, so key was blocked
expect(rejRes.msBeforeNext > 5000 && rejRes.remainingPoints === 0).to.equal(true);
done();
});
})
.catch((rejRes) => {
done(rejRes);
});
});
it('expire inMemory blocked key', (done) => {
const testKey = 'blockmem2';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 1,
duration: 1,
inMemoryBlockOnConsumed: 2,
inMemoryBlockDuration: 2,
});
// It blocks on the first consume as consumed points more than available
rateLimiter
.consume(testKey, 2)
.then(() => {
})
.catch(() => {
setTimeout(() => {
rateLimiter
.consume(testKey)
.then((res) => {
// Block expired
expect(res.msBeforeNext <= 1000 && res.remainingPoints === 0).to.equal(true);
done();
})
.catch((rejRes) => {
done(rejRes);
});
}, 2001);
});
});
it('throws error when inMemoryBlockOnConsumed is not set, but inMemoryBlockDuration is set', (done) => {
try {
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
inMemoryBlockDuration: 2,
});
rateLimiter.reward('test');
} catch (err) {
expect(err instanceof Error).to.equal(true);
done();
}
});
it('throws error when inMemoryBlockOnConsumed less than points', (done) => {
try {
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 2,
inMemoryBlockOnConsumed: 1,
});
rateLimiter.reward('test');
} catch (err) {
expect(err instanceof Error).to.equal(true);
done();
}
});
it('use keyPrefix from options', () => {
const testKey = 'key';
const keyPrefix = 'test';
const rateLimiter = new RateLimiterMemcache({ keyPrefix, storeClient: memcacheMockClient });
expect(rateLimiter.getKey(testKey)).to.equal('test:key');
});
it('blocks key for block duration when consumed more than points', (done) => {
const testKey = 'block';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 1,
duration: 1,
blockDuration: 2,
});
rateLimiter
.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext > 1000).to.equal(true);
done();
});
});
it('block expires in blockDuration seconds', (done) => {
const testKey = 'blockexpires';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 1,
duration: 1,
blockDuration: 2,
});
rateLimiter
.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch(() => {
setTimeout(() => {
rateLimiter
.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch(() => {
done(Error('must resolve'));
});
}, 2000);
});
});
it('block custom key', (done) => {
const testKey = 'blockcustom';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 1,
duration: 1,
});
rateLimiter.block(testKey, 2).then(() => {
rateLimiter
.consume(testKey)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext > 1000 && rej.msBeforeNext <= 2000).to.equal(true);
done();
});
});
});
it('get points', (done) => {
const testKey = 'get';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 2,
duration: 1,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter
.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch(() => {
done(Error('get must not reject'));
});
})
.catch(() => {
done(Error('consume must not reject'));
});
});
it('get returns NULL if key is not set', (done) => {
const testKey = 'getnull';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 2,
duration: 1,
});
rateLimiter
.get(testKey)
.then((res) => {
expect(res).to.equal(null);
done();
})
.catch(() => {
done(Error('get must not reject'));
});
});
it('delete key and return true', (done) => {
const testKey = 'deletetrue';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 2,
duration: 10,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter
.delete(testKey)
.then((res) => {
expect(res).to.equal(true);
rateLimiter
.get(testKey)
.then((resGet) => {
expect(resGet).to.equal(null);
done();
});
});
});
});
it('delete returns false, if there is no key', (done) => {
const testKey = 'deletefalse';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 2,
duration: 10,
});
rateLimiter
.delete(testKey)
.then((res) => {
expect(res).to.equal(false);
rateLimiter
.get(testKey)
.then((resGet) => {
expect(resGet).to.equal(null);
done();
});
});
});
it('creates key and increment on 2 parallel requests', () => {
const testKey = 'parallel';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheMockClient,
points: 2,
duration: 1,
});
return Promise.all([
rateLimiter.consume(testKey),
rateLimiter.consume(testKey),
]).then((resAll) => {
expect(resAll[0].consumedPoints === 1 && resAll[1].consumedPoints === 2).to.equal(true);
});
});
it('rejected when MemcachedClient error', (done) => {
const testKey = 'memcacheerror';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheUnavailableClient,
points: 1,
duration: 5,
});
rateLimiter
.consume(testKey, 2)
.then(() => {
expect.fail('should not be resolved');
})
.catch(() => {
done();
});
});
it('consume using insuranceLimiter when MemcachedClient error', (done) => {
const testKey = 'memcacheerror2';
const rateLimiter = new RateLimiterMemcache({
storeClient: memcacheUnavailableClient,
points: 1,
duration: 1,
insuranceLimiter: new RateLimiterMemcache({
points: 2,
duration: 2,
storeClient: memcacheMockClient,
}),
});
// Consume from insurance limiter with different options
rateLimiter
.consume(testKey)
.then((res) => {
expect(res.remainingPoints === 1 && res.msBeforeNext > 1000).to.equal(true);
done();
})
.catch((rejRes) => {
done(rejRes);
});
});
it('does not expire key if duration set to 0', (done) => {
const testKey = 'neverexpire';
const rateLimiter = new RateLimiterMemcache({ storeClient: memcacheMockClient, points: 2, duration: 0 });
rateLimiter.consume(testKey, 1)
.then(() => {
rateLimiter.consume(testKey, 1)
.then(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.msBeforeNext).to.equal(-1);
done();
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('block key forever, if secDuration is 0', (done) => {
const testKey = 'neverexpire';
const rateLimiter = new RateLimiterMemcache({ storeClient: memcacheMockClient, points: 1, duration: 1 });
rateLimiter.block(testKey, 0)
.then(() => {
setTimeout(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.msBeforeNext).to.equal(-1);
done();
});
}, 1000);
})
.catch((err) => {
done(err);
});
});
});

View File

@@ -0,0 +1,391 @@
/* eslint-disable no-unused-expressions */
const { describe, it } = require('mocha');
const { expect } = require('chai');
const RateLimiterMemory = require('../lib/RateLimiterMemory');
describe('RateLimiterMemory with fixed window', function RateLimiterMemoryTest() {
this.timeout(5000);
it('consume 1 point', (done) => {
const testKey = 'consume1';
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 5 });
rateLimiterMemory.consume(testKey)
.then(() => {
const res = rateLimiterMemory._memoryStorage.get(rateLimiterMemory.getKey(testKey));
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
it('can not consume more than maximum points', (done) => {
const testKey = 'consume2';
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
rateLimiterMemory.consume(testKey, 2)
.then(() => {})
.catch((rejRes) => {
expect(rejRes.msBeforeNext >= 0).to.equal(true);
done();
})
.catch((err) => {
done(err);
});
});
it('execute evenly over duration with minimum delay 20 ms', (done) => {
const testKey = 'consumeEvenlyMinDelay';
const rateLimiterMemory = new RateLimiterMemory({
points: 100, duration: 1, execEvenly: true, execEvenlyMinDelayMs: 20,
});
rateLimiterMemory.consume(testKey)
.then(() => {
const timeFirstConsume = Date.now();
rateLimiterMemory.consume(testKey)
.then(() => {
expect(Date.now() - timeFirstConsume >= 20).to.equal(true);
done();
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('execute evenly over duration', (done) => {
const testKey = 'consumeEvenly';
const rateLimiterMemory = new RateLimiterMemory({
points: 2, duration: 5, execEvenly: true, execEvenlyMinDelayMs: 1,
});
rateLimiterMemory.consume(testKey)
.then(() => {
const timeFirstConsume = Date.now();
rateLimiterMemory.consume(testKey)
.then(() => {
/* Second consume should be delayed more than 2 seconds
Explanation:
1) consume at 0ms, remaining duration = 5000ms
2) delayed consume for (4999 / (0 + 2)) ~= 2500ms, where 2 is a fixed value
, because it mustn't delay in the beginning and in the end of duration
3) consume after 2500ms by timeout
*/
const diff = Date.now() - timeFirstConsume;
expect(diff > 2400 && diff < 2600).to.equal(true);
done();
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('makes penalty', (done) => {
const testKey = 'penalty1';
const rateLimiterMemory = new RateLimiterMemory({ points: 3, duration: 5 });
rateLimiterMemory.consume(testKey)
.then(() => {
rateLimiterMemory.penalty(testKey)
.then(() => {
const res = rateLimiterMemory._memoryStorage.get(rateLimiterMemory.getKey(testKey));
expect(res.consumedPoints).to.equal(2);
done();
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('reward points', (done) => {
const testKey = 'reward1';
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
rateLimiterMemory.consume(testKey)
.then(() => {
rateLimiterMemory.reward(testKey)
.then(() => {
const res = rateLimiterMemory._memoryStorage.get(rateLimiterMemory.getKey(testKey));
expect(res.consumedPoints).to.equal(0);
done();
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('use keyPrefix from options', () => {
const testKey = 'key';
const keyPrefix = 'test';
const rateLimiterMemory = new RateLimiterMemory({ keyPrefix, points: 1, duration: 5 });
expect(rateLimiterMemory.getKey(testKey)).to.equal('test:key');
});
it('blocks key for block duration when consumed more than points', (done) => {
const testKey = 'block';
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 1, blockDuration: 2 });
rateLimiterMemory.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext > 1000).to.equal(true);
done();
});
});
it('do not block key second time until block expires no matter how many points consumed', (done) => {
const testKey = 'donotblocktwice';
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 1, blockDuration: 2 });
rateLimiterMemory.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch(() => {
setTimeout(() => {
rateLimiterMemory.consume(testKey)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext < 1000).to.equal(true);
done();
});
}, 1001);
});
});
it('block expires in blockDuration seconds', (done) => {
const testKey = 'blockexpires';
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 1, blockDuration: 2 });
rateLimiterMemory.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch(() => {
setTimeout(() => {
rateLimiterMemory.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch(() => {
done(Error('must resolve'));
});
}, 2000);
});
});
it('block custom key', (done) => {
const testKey = 'blockcustom';
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 1 });
rateLimiterMemory.block(testKey, 2);
rateLimiterMemory.consume(testKey)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext > 1000 && rej.msBeforeNext <= 2000).to.equal(true);
done();
});
});
it('get by key', (done) => {
const testKey = 'get';
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 5 });
rateLimiterMemory.consume(testKey)
.then(() => {
rateLimiterMemory.get(testKey)
.then((res) => {
expect(res.remainingPoints).to.equal(1);
done();
});
})
.catch(() => {
done(Error('must not reject'));
});
});
it('get resolves null if key is not set', (done) => {
const testKey = 'getbynotexistingkey';
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 5 });
rateLimiterMemory.get(testKey)
.then((res) => {
expect(res).to.equal(null);
done();
});
});
it('delete resolves true if key is set', (done) => {
const testKey = 'deletekey';
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 5 });
rateLimiterMemory.consume(testKey)
.then(() => {
rateLimiterMemory.delete(testKey)
.then((res) => {
expect(res).to.equal(true);
done();
}).catch(() => {
done(Error('must not reject'));
});
})
.catch(() => {
done(Error('must not reject'));
});
});
it('delete resolves false if key is not set', (done) => {
const testKey = 'deletekey2';
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 5 });
rateLimiterMemory.delete(testKey)
.then((res) => {
expect(res).to.equal(false);
done();
}).catch(() => {
done(Error('must not reject'));
});
});
it('consume applies options.customDuration to set expire', (done) => {
const testKey = 'options.customDuration';
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
rateLimiterMemory.consume(testKey, 1, { customDuration: 1 })
.then((res) => {
expect(res.msBeforeNext <= 1000).to.be.true;
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('consume applies options.customDuration to set not expiring key', (done) => {
const testKey = 'options.customDuration.forever';
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
rateLimiterMemory.consume(testKey, 1, { customDuration: 0 })
.then((res) => {
expect(res.msBeforeNext === -1).to.be.true;
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('penalty applies options.customDuration to set expire', (done) => {
const testKey = 'options.customDuration';
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
rateLimiterMemory.penalty(testKey, 1, { customDuration: 1 })
.then((res) => {
expect(res.msBeforeNext <= 1000).to.be.true;
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('reward applies options.customDuration to set expire', (done) => {
const testKey = 'options.customDuration';
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
rateLimiterMemory.reward(testKey, 1, { customDuration: 1 })
.then((res) => {
expect(res.msBeforeNext <= 1000).to.be.true;
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('does not expire key if duration set to 0', (done) => {
const testKey = 'neverexpire';
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 0 });
rateLimiterMemory.consume(testKey, 1)
.then(() => {
rateLimiterMemory.consume(testKey, 1)
.then(() => {
rateLimiterMemory.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.msBeforeNext).to.equal(-1);
done();
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('block key forever, if secDuration is 0', (done) => {
const testKey = 'neverexpire';
const rateLimiter = new RateLimiterMemory({ points: 1, duration: 1 });
rateLimiter.block(testKey, 0)
.then(() => {
setTimeout(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.msBeforeNext).to.equal(-1);
done();
});
}, 1000);
})
.catch((err) => {
done(err);
});
});
it('set points by key', (done) => {
const testKey = 'set';
const rateLimiter = new RateLimiterMemory({ points: 10, duration: 1 });
rateLimiter.set(testKey, 12)
.then(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(12);
expect(res.remainingPoints).to.equal(0);
done();
});
})
.catch((err) => {
done(err);
});
});
it('set points by key forever', (done) => {
const testKey = 'setforever';
const rateLimiter = new RateLimiterMemory({ points: 10, duration: 1 });
rateLimiter.set(testKey, 12, 0)
.then(() => {
setTimeout(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(12);
expect(res.msBeforeNext).to.equal(-1);
done();
});
}, 1100);
})
.catch((err) => {
done(err);
});
});
});

View File

@@ -0,0 +1,675 @@
/* eslint-disable no-new */
const {
describe, it, beforeEach, before,
} = require('mocha');
const { expect } = require('chai');
const sinon = require('sinon');
const RateLimiterMongo = require('../lib/RateLimiterMongo');
const RateLimiterMemory = require('../lib/RateLimiterMemory');
describe('RateLimiterMongo with fixed window', function RateLimiterMongoTest() {
this.timeout(5000);
let mongoClient;
let mongoClientV4;
let mongoClientStub;
let mongoDb;
let mongoCollection;
let stubMongoDbCollection;
before(() => {
mongoClient = {
db: () => {},
topology: {},
};
mongoClientV4 = {
collection: () => {},
client: {},
};
mongoDb = {
collection: () => {},
};
stubMongoDbCollection = sinon.stub(mongoDb, 'collection').callsFake(() => mongoCollection);
mongoClientStub = sinon.stub(mongoClient, 'db').callsFake(() => mongoDb);
sinon.stub(mongoClientV4, 'collection').callsFake(() => mongoCollection);
});
beforeEach(() => {
mongoCollection = {
createIndex: () => {},
findOneAndUpdate: () => {},
findOne: () => {},
deleteOne: () => {},
};
sinon.stub(mongoCollection, 'createIndex').callsFake(() => {});
});
it('throws error if storeClient not set', (done) => {
try {
new RateLimiterMongo({ points: 2, duration: 5 });
} catch (err) {
done();
}
});
it('consume 1 point', (done) => {
const testKey = 'consume1';
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
const res = {
value: {
points: 1,
expire: 5000,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
rateLimiter.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
it('rejected when consume more than maximum points', (done) => {
const testKey = 'consumerej';
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
const res = {
value: {
points: 2,
expire: 5000,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 1, duration: 5 });
rateLimiter.consume(testKey, 2)
.then(() => {
done(Error('have to reject'));
})
.catch((err) => {
expect(err.consumedPoints).to.equal(2);
done();
});
});
it('makes penalty', (done) => {
const testKey = 'penalty1';
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
const res = {
value: {
points: 1,
expire: 5000,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
rateLimiter.penalty(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
it('reward points', (done) => {
const testKey = 'reward1';
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
const res = {
value: {
points: -1,
expire: 5000,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
rateLimiter.reward(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(-1);
done();
})
.catch((err) => {
done(err);
});
});
it('consume using insuranceLimiter when Mongo error', (done) => {
const testKey = 'errorinsurance';
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => Promise.reject(Error('Mongo error')));
const rateLimiter = new RateLimiterMongo({
storeClient: mongoClient,
insuranceLimiter: new RateLimiterMemory({
points: 2,
duration: 2,
}),
});
rateLimiter.consume(testKey)
.then((res) => {
expect(res.remainingPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
it('block key in memory when inMemory block options set up', (done) => {
const testKey = 'blockmem';
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
const res = {
value: {
points: 11,
expire: 5000,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({
storeClient: mongoClient,
points: 2,
duration: 5,
inMemoryBlockOnConsumed: 10,
inMemoryBlockDuration: 10,
});
rateLimiter.consume(testKey)
.then(() => {
done(Error('have to reject'));
})
.catch(() => {
expect(rateLimiter._inMemoryBlockedKeys.msBeforeExpire(rateLimiter.getKey(testKey)) > 0).to.equal(true);
done();
});
});
it('blocks key for block duration when consumed more than points', (done) => {
const testKey = 'block';
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
const res = {
value: {
points: 2,
expire: 1000,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({
storeClient: mongoClient, points: 1, duration: 1, blockDuration: 2,
});
rateLimiter.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext > 1000).to.equal(true);
done();
});
});
it('block using insuranceLimiter when Mongo error', (done) => {
const testKey = 'mongoerrorblock';
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => Promise.reject(Error('Mongo error')));
const rateLimiter = new RateLimiterMongo({
storeClient: mongoClient,
points: 1,
duration: 1,
blockDuration: 2,
insuranceLimiter: new RateLimiterMemory({
points: 1,
duration: 1,
}),
});
rateLimiter.block(testKey, 2)
.then((res) => {
expect(res.msBeforeNext > 1000 && res.msBeforeNext <= 2000).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('return correct data with _getRateLimiterRes', () => {
const rateLimiter = new RateLimiterMongo({ points: 5, storeClient: mongoClient });
const res = rateLimiter._getRateLimiterRes('test', 1, {
value: {
points: 3,
expire: new Date(Date.now() + 1000).toISOString(),
},
});
expect(res.msBeforeNext <= 1000
&& res.consumedPoints === 3
&& res.isFirstInDuration === false
&& res.remainingPoints === 2).to.equal(true);
});
it('get points', (done) => {
const testKey = 'get';
sinon.stub(mongoCollection, 'findOne').callsFake(() => {
const res = {
value: {
points: 1,
expire: 1000,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({
storeClient: mongoClient, points: 1, duration: 1,
});
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
it('get points return NULL if key is not set', (done) => {
const testKey = 'getnull';
sinon.stub(mongoCollection, 'findOne').callsFake(() => {
const res = null;
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({
storeClient: mongoClient, points: 1, duration: 1,
});
rateLimiter.get(testKey)
.then((res) => {
expect(res).to.equal(null);
done();
})
.catch((err) => {
done(err);
});
});
it('get points return NULL if key is not set and store returns undefined', (done) => {
const testKey = 'getnull';
sinon.stub(mongoCollection, 'findOne').callsFake(() => {
return Promise.resolve(undefined);
});
const rateLimiter = new RateLimiterMongo({
storeClient: mongoClient, points: 1, duration: 1,
});
rateLimiter.get(testKey)
.then((res) => {
expect(res).to.equal(null);
done();
})
.catch((err) => {
done(err);
});
});
it('use dbName from options if db is function', () => {
mongoClientStub.restore();
mongoClientStub = sinon.stub(mongoClient, 'db').callsFake((dbName) => {
expect(dbName).to.equal('test');
return mongoDb;
});
new RateLimiterMongo({
storeClient: mongoClient, dbName: 'test',
});
mongoClientStub.restore();
mongoClientStub = sinon.stub(mongoClient, 'db').callsFake(() => mongoDb);
});
it('use collection from client instead of db if Mongoose in use', () => {
const createIndex = sinon.spy();
const mongooseConnection = {
collection: () => ({
createIndex,
}),
};
new RateLimiterMongo({
storeClient: mongooseConnection,
});
expect(createIndex.called);
});
it('delete key and return true', (done) => {
const testKey = 'deletetrue';
sinon.stub(mongoCollection, 'deleteOne').callsFake(() => Promise.resolve({
deletedCount: 1,
}));
const rateLimiter = new RateLimiterMongo({
storeClient: mongoClient, points: 1, duration: 1, blockDuration: 2,
});
rateLimiter.delete(testKey)
.then((res) => {
expect(res).to.equal(true);
done();
});
});
it('delete returns false, if there is no key', (done) => {
const testKey = 'deletefalse';
sinon.stub(mongoCollection, 'deleteOne').callsFake(() => Promise.resolve({
result: {
n: 0,
},
}));
const rateLimiter = new RateLimiterMongo({
storeClient: mongoClient, points: 1, duration: 1, blockDuration: 2,
});
rateLimiter.delete(testKey)
.then((res) => {
expect(res).to.equal(false);
done();
});
});
it('uses tableName option to create collection', (done) => {
const tableName = 'collection_name';
stubMongoDbCollection.restore();
stubMongoDbCollection = sinon.stub(mongoDb, 'collection').callsFake((name) => {
expect(name).to.equal(tableName);
stubMongoDbCollection.restore();
stubMongoDbCollection = sinon.stub(mongoDb, 'collection').callsFake(() => mongoCollection);
done();
return mongoCollection;
});
const client = {
db: () => mongoDb,
};
new RateLimiterMongo({
storeClient: client,
tableName,
});
});
it('_upsert adds options.attrs to where clause to find document by additional attributes in conjunction with key', (done) => {
const testKey = '_upsert';
const testAttrs = {
country: 'country1',
};
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake((where) => {
expect(where.country).to.equal(testAttrs.country);
done();
return Promise.resolve({
value: {
points: 1,
expire: 5000,
},
});
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
rateLimiter.consume(testKey, 1, { attrs: testAttrs })
.catch((err) => {
done(err);
});
});
it('forced _upsert adds options.attrs to where clause to find document by additional attributes in conjunction with key', (done) => {
const testKey = '_upsertforce';
const testAttrs = {
country: 'country2',
};
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake((where) => {
expect(where.country).to.equal(testAttrs.country);
done();
return Promise.resolve({
value: {
points: 1,
expire: 5000,
},
});
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
rateLimiter.block(testKey, 1, { attrs: testAttrs })
.catch((err) => {
done(err);
});
});
it('_get adds options.attrs to where clause to find document by additional attributes in conjunction with key', (done) => {
const testKey = '_get';
const testAttrs = {
country: 'country3',
};
sinon.stub(mongoCollection, 'findOne').callsFake((where) => {
expect(where.country).to.equal(testAttrs.country);
done();
return Promise.resolve({
value: {
points: 1,
expire: 5000,
},
});
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
rateLimiter.get(testKey, { attrs: testAttrs });
});
it('_delete adds options.attrs to where clause to find document by additional attributes in conjunction with key', (done) => {
const testKey = '_delete';
const testAttrs = {
country: 'country4',
};
sinon.stub(mongoCollection, 'deleteOne').callsFake((where) => {
expect(where.country).to.equal(testAttrs.country);
done();
return Promise.resolve({
result: {
n: 0,
},
});
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
rateLimiter.delete(testKey, { attrs: testAttrs });
});
it('set indexKeyPrefix empty {} if not provided', () => {
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
expect(Object.keys(rateLimiter.indexKeyPrefix).length).to.equal(0);
});
it('does not expire key if duration set to 0', (done) => {
const testKey = 'neverexpire';
const stubFindOneAndUpdate = sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
const res = {
value: {
points: 1,
expire: null,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 0 });
rateLimiter.consume(testKey, 1)
.then(() => {
stubFindOneAndUpdate.restore();
const stubFindOneAndUpdate2 = sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
const res = {
value: {
points: 2,
expire: null,
},
};
return Promise.resolve(res);
});
rateLimiter.consume(testKey, 1)
.then(() => {
stubFindOneAndUpdate2.restore();
const stubFindOne = sinon.stub(mongoCollection, 'findOne').callsFake(() => Promise.resolve({
value: {
points: 2,
expire: null,
},
}));
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.msBeforeNext).to.equal(-1);
stubFindOne.restore();
done();
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('block key forever, if secDuration is 0', (done) => {
const testKey = 'neverexpire';
const stubFindOneAndUpdate = sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
const res = {
value: {
points: 2,
expire: null,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 1, duration: 1 });
rateLimiter.block(testKey, 0)
.then(() => {
setTimeout(() => {
stubFindOneAndUpdate.restore();
const stubFindOne = sinon.stub(mongoCollection, 'findOne').callsFake(() => Promise.resolve({
value: {
points: 2,
expire: null,
},
}));
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.msBeforeNext).to.equal(-1);
stubFindOne.restore();
done();
});
}, 1000);
})
.catch((err) => {
done(err);
});
});
it('consume 1 point (driver v3)', (done) => {
const testKey = 'consume1v3';
sinon.stub(mongoClient, 'topology').value({ s: { options: { metadata: { driver: { version: '3.6' } } } } });
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake((where, upsertData, upsertOptions) => {
expect(upsertOptions.returnOriginal).to.equal(false);
const res = {
value: {
points: 1,
expire: 5000,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
rateLimiter.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
it('consume 1 point (driver v4)', (done) => {
const testKey = 'consume1v4';
sinon.stub(mongoClient, 'topology').value({ s: { options: { metadata: { driver: { version: '4.0' } } } } });
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake((where, upsertData, upsertOptions) => {
expect(upsertOptions.returnDocument).to.equal('after');
const res = {
value: {
points: 1,
expire: 5000,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
rateLimiter.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
it('consume 1 point (driver v4.1.3)', (done) => {
const testKey = 'consume1v4.1.3';
sinon.stub(mongoClientV4, 'client').value({ topology: { s: { options: { metadata: { driver: { version: '4.1.3' } } } } } });
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake((where, upsertData, upsertOptions) => {
expect(upsertOptions.returnDocument).to.equal('after');
const res = {
value: {
points: 1,
expire: 5000,
},
};
return Promise.resolve(res);
});
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClientV4, points: 2, duration: 5 });
rateLimiter.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
});

View File

@@ -0,0 +1,416 @@
const {
describe, it, beforeEach, afterEach,
} = require('mocha');
const { expect } = require('chai');
const sinon = require('sinon');
const RateLimiterMySQL = require('../lib/RateLimiterMySQL');
describe('RateLimiterMySQL with fixed window', function RateLimiterMySQLTest() {
this.timeout(5000);
const mysqlClient = {
query: () => {},
};
let mysqlClientStub;
beforeEach(() => {
mysqlClientStub = sinon.stub(mysqlClient, 'query').callsFake((q, cb) => {
cb();
});
});
afterEach(() => {
mysqlClientStub.restore();
});
it('call back with error if can not create db or table', (done) => {
mysqlClientStub.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, cb) => {
cb(Error('test'));
});
const rateLimiter = new RateLimiterMySQL({ // eslint-disable-line
storeClient: mysqlClient, storeType: 'connection', points: 2, duration: 5,
}, (e) => {
expect(e instanceof Error).to.equal(true);
done();
});
});
it('get connection from pool', (done) => {
const poolConnectionReleaseFn = sinon.spy();
const mysqlPoolClient = {
query: (q, data, cb) => {
const res = [
{ points: 1, expire: 1000 },
];
cb(null, res);
},
getConnection: (cb) => {cb(null, mysqlPoolClient)},
release: poolConnectionReleaseFn,
};
let rateLimiter
process.on('unhandledRejection', (err) => console.error(err))
new Promise((resolve) => {
rateLimiter = new RateLimiterMySQL({
storeClient: mysqlPoolClient, storeType: 'pool', points: 2, duration: 5, tableCreated: true,
}, () => {
resolve();
});
}).then(() => {
rateLimiter.get('testPool')
.then((rlRes) => {
expect(poolConnectionReleaseFn.calledOnce).to.equal(true);
expect(rlRes.consumedPoints).to.equal(1);
done();
})
}).catch((err) => {
done(err);
})
});
it('do not create a table if tableCreated option is true', (done) => {
const mysqlClientTableCreated = {
query: () => {},
};
sinon.spy(mysqlClientTableCreated, 'query');
const rateLimiter = new RateLimiterMySQL({ // eslint-disable-line
storeClient: mysqlClientTableCreated, storeType: 'connection', tableCreated: true,
});
setTimeout(() => {
expect(mysqlClientTableCreated.query.callCount).to.equal(0);
done();
}, 1000);
});
it('callback called even if tableCreated option is true', (done) => {
mysqlClientStub.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, cb) => {
cb();
});
const rateLimiter = new RateLimiterMySQL({ // eslint-disable-line
storeClient: mysqlClient, storeType: 'connection', tableCreated: true,
}, () => {
done();
});
});
it('consume 1 point', (done) => {
const testKey = 'consume1';
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient, storeType: 'connection', points: 2, duration: 5,
}, () => {
mysqlClientStub.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
const res = [
{ points: 1, expire: 5000 },
];
if (Array.isArray(data)) {
cb(null, res);
} else {
data(null);
}
});
rateLimiter.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
});
it('rejected when consume more than maximum points', (done) => {
const testKey = 'consumerej';
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 5,
}, () => {
mysqlClientStub.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
const res = [
{ points: 2, expire: 5000 },
];
if (Array.isArray(data)) {
cb(null, res);
} else {
data(null);
}
});
rateLimiter.consume(testKey, 2)
.then(() => {
done(Error('have to reject'));
})
.catch((err) => {
expect(err.consumedPoints).to.equal(2);
done();
});
});
});
it('blocks key for block duration when consumed more than points', (done) => {
const testKey = 'block';
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1, blockDuration: 2,
}, () => {
mysqlClientStub.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
const res = [
{ points: 2, expire: 1000 },
];
if (Array.isArray(data)) {
cb(null, res);
} else {
data(null);
}
});
rateLimiter.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext > 1000).to.equal(true);
done();
});
});
});
it('return correct data with _getRateLimiterRes', () => {
const rateLimiter = new RateLimiterMySQL({ points: 5, storeClient: mysqlClient, storeType: 'connection' });
const res = rateLimiter._getRateLimiterRes('test', 1, [
{ points: 3, expire: Date.now() + 1000 },
]);
expect(res.msBeforeNext <= 1000
&& res.consumedPoints === 3
&& res.isFirstInDuration === false
&& res.remainingPoints === 2).to.equal(true);
});
it('get points', (done) => {
const testKey = 'get';
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1,
}, () => {
mysqlClientStub.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
const res = [
{ points: 1, expire: 1000 },
];
cb(null, res);
});
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
});
it('get points return NULL if key is not set', (done) => {
const testKey = 'getnull';
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1,
}, () => {
mysqlClientStub.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
cb(null, []);
});
rateLimiter.get(testKey)
.then((res) => {
expect(res).to.equal(null);
done();
})
.catch((err) => {
done(err);
});
});
});
it('delete key and return true', (done) => {
const testKey = 'deletetrue';
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1,
}, () => {
mysqlClientStub.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
cb(null, { affectedRows: 1 });
});
rateLimiter.delete(testKey)
.then((res) => {
expect(res).to.equal(true);
done();
});
});
});
it('delete returns false, if there is no key', (done) => {
const testKey = 'deletefalse';
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1,
}, () => {
mysqlClientStub.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
cb(null, { affectedRows: 0 });
});
rateLimiter.delete(testKey)
.then((res) => {
expect(res).to.equal(false);
done();
});
});
});
it('delete rejects on error', (done) => {
const testKey = 'deleteerr';
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1,
}, () => {
mysqlClientStub.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
cb(new Error('test'));
});
rateLimiter.delete(testKey)
.catch(() => {
done();
});
});
});
it('clearExpired method uses private _getConnection to get connection', (done) => {
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient, storeType: 'sequelize',
}, () => {
const rlStub = sinon.stub(rateLimiter, '_getConnection').callsFake(() => {
done();
return Promise.resolve(mysqlClient);
});
rateLimiter.clearExpired(1);
rlStub.restore();
});
});
it('does not expire key if duration set to 0', (done) => {
const testKey = 'neverexpire';
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient,
storeType: 'connection',
points: 2,
duration: 0,
}, () => {
mysqlClientStub.restore();
const queryStub = sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
const res = [
{ points: 1, expire: null },
];
if (Array.isArray(data)) {
cb(null, res);
} else {
data(null);
}
});
rateLimiter.consume(testKey, 1)
.then(() => {
queryStub.restore();
const queryStub2 = sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
const res = [
{ points: 2, expire: null },
];
if (Array.isArray(data)) {
cb(null, res);
} else {
data(null);
}
});
rateLimiter.consume(testKey, 1)
.then(() => {
queryStub2.restore();
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
const res = [
{ points: 2, expire: null },
];
if (Array.isArray(data)) {
cb(null, res);
} else {
data(null);
}
});
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.msBeforeNext).to.equal(-1);
done();
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
});
it('block key forever, if secDuration is 0', (done) => {
const testKey = 'neverexpire';
const rateLimiter = new RateLimiterMySQL({
storeClient: mysqlClient,
storeType: 'connection',
points: 2,
duration: 1,
}, () => {
mysqlClientStub.restore();
const queryStub = sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
const res = [
{ points: 3, expire: null },
];
if (Array.isArray(data)) {
cb(null, res);
} else {
data(null);
}
});
rateLimiter.block(testKey, 0)
.then(() => {
setTimeout(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(3);
expect(res.msBeforeNext).to.equal(-1);
queryStub.restore();
done();
});
}, 1000);
})
.catch((err) => {
done(err);
});
});
});
});

View File

@@ -0,0 +1,617 @@
/* eslint-disable no-new */
const {
describe, it, beforeEach, afterEach,
} = require('mocha');
const { expect } = require('chai');
const sinon = require('sinon');
const RateLimiterPostgres = require('../lib/RateLimiterPostgres');
const RateLimiterMemory = require('../lib/RateLimiterMemory');
describe('RateLimiterPostgres with fixed window', function RateLimiterPostgresTest() {
this.timeout(5000);
const pgClient = {
query: () => {},
};
let pgClientStub;
beforeEach(() => {
pgClientStub = sinon.stub(pgClient, 'query').callsFake(() => Promise.resolve());
});
afterEach(() => {
pgClientStub.restore();
});
it('throw error if can not create table', (done) => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').callsFake(() => Promise.reject(Error('test')));
new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
}, (e) => {
expect(e instanceof Error).to.equal(true);
done();
});
});
it('do not create a table if tableCreated option is true', (done) => {
const pgClientTableCreated = {
query: () => {},
};
sinon.spy(pgClientTableCreated, 'query');
const rateLimiter = new RateLimiterPostgres({ // eslint-disable-line
storeClient: pgClientTableCreated, storeType: 'client', tableCreated: true,
});
setTimeout(() => {
expect(pgClientTableCreated.query.callCount).to.equal(0);
done();
}, 1000);
});
it('callback called even if tableCreated option is true', (done) => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').callsFake(() => Promise.resolve());
new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', tableCreated: true,
}, () => {
done();
});
});
it('consume 1 point', (done) => {
const testKey = 'consume1';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
}, () => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').resolves({
rows: [{ points: 1, expire: 5000 }],
});
rateLimiter.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
});
it('rejected when consume more than maximum points', (done) => {
const testKey = 'consumerej';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', points: 1, duration: 5,
}, () => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').resolves({
rows: [{ points: 2, expire: 5000 }],
});
rateLimiter.consume(testKey, 2)
.then(() => {
done(Error('have to reject'));
})
.catch((err) => {
expect(err.consumedPoints).to.equal(2);
done();
});
});
});
it('blocks key for block duration when consumed more than points', (done) => {
const testKey = 'block';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', points: 1, duration: 1, blockDuration: 2,
}, () => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').resolves({
rows: [{ points: 2, expire: 1000 }],
});
rateLimiter.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext > 1000).to.equal(true);
done();
});
});
});
it('return correct data with _getRateLimiterRes', () => {
const rateLimiter = new RateLimiterPostgres({ points: 5, storeClient: pgClient, storeType: 'client' });
const res = rateLimiter._getRateLimiterRes('test', 1, {
rows: [{ points: 3, expire: Date.now() + 1000 }],
});
expect(res.msBeforeNext <= 1000
&& res.consumedPoints === 3
&& res.isFirstInDuration === false
&& res.remainingPoints === 2).to.equal(true);
});
it('get points', (done) => {
const testKey = 'get';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
}, () => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').resolves({
rows: [{ points: 1, expire: 5000 }],
});
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch((err) => {
done(err);
});
});
});
it('get points return NULL if key is not set', (done) => {
const testKey = 'getnull';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
}, () => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').resolves({
rowCount: 0,
rows: [],
});
rateLimiter.get(testKey)
.then((res) => {
expect(res).to.equal(null);
done();
})
.catch((err) => {
done(err);
});
});
});
it('get points using insuranceLimiter on Postgres error', (done) => {
const testKey = 'geterror';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient,
storeType: 'client',
points: 1,
duration: 1,
insuranceLimiter: new RateLimiterMemory({
points: 1,
duration: 1,
}),
}, () => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').callsFake(() => Promise.reject(Error('test')));
rateLimiter.get(testKey)
.then((res) => {
expect(res).to.equal(null);
done();
})
.catch((err) => {
done(err);
});
});
});
it('block custom key using insuranceLimiter on Postgres error', (done) => {
const testKey = 'postgreserrorblock';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient,
storeType: 'client',
points: 1,
duration: 1,
insuranceLimiter: new RateLimiterMemory({
points: 1,
duration: 1,
}),
}, () => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').callsFake(() => Promise.reject(Error('test')));
rateLimiter.block(testKey, 3)
.then((res) => {
expect(res.msBeforeNext > 2000 && res.msBeforeNext <= 3000).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
});
it('delete key and return true', (done) => {
const testKey = 'deletetrue';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
}, () => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').resolves({
rowCount: 1,
});
rateLimiter.delete(testKey)
.then((res) => {
expect(res).to.equal(true);
done();
});
});
});
it('delete returns false, if there is no key', (done) => {
const testKey = 'deletefalse';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
}, () => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').resolves({
rowCount: 0,
});
rateLimiter.delete(testKey)
.then((res) => {
expect(res).to.equal(false);
done();
});
});
});
it('delete rejects on error', (done) => {
const testKey = 'deleteerr';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
}, () => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').rejects(new Error());
rateLimiter.delete(testKey)
.catch(() => {
done();
});
});
});
it('query sets unique prefix to prepared statement for every limiter table', (done) => {
let queryName1;
let rateLimiter1;
let rateLimiter2;
Promise.all([
new Promise((resolve) => {
rateLimiter1 = new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', tableName: 'upsertqueryname1',
}, () => {
resolve();
});
}),
new Promise((resolve) => {
rateLimiter2 = new RateLimiterPostgres({
storeClient: pgClient, storeType: 'client', tableName: 'upsertqueryname2',
}, () => {
resolve();
});
}),
]).then(() => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').callsFake((q) => {
queryName1 = q.name;
return Promise.resolve({
rows: [{ points: 1, expire: 5000 }],
});
});
rateLimiter1.consume('test')
.then(() => {
pgClientStub.restore();
pgClientStub = sinon.stub(pgClient, 'query').callsFake((q) => {
expect(q.name).to.not.equal(queryName1);
done();
return Promise.resolve({
rows: [{ points: 1, expire: 5000 }],
});
});
rateLimiter2.consume('test');
});
});
});
it('set client type to "client" by constructor name for Client', (done) => {
class Client {
Client() {}
query() {}
}
const rateLimiter = new RateLimiterPostgres({
storeClient: new Client(),
}, () => {
expect(rateLimiter.clientType).to.equal('client');
done();
});
});
it('set client type to "pool" by constructor name for Pool', (done) => {
class Pool {
Pool() {}
query() {}
}
const rateLimiter = new RateLimiterPostgres({
storeClient: new Pool(),
}, () => {
expect(rateLimiter.clientType).to.equal('pool');
done();
});
});
it('set client type to "sequelize" by constructor name for Sequelize', (done) => {
class Sequelize {
Sequelize() {}
query() {}
}
const rateLimiter = new RateLimiterPostgres({
storeClient: new Sequelize(),
}, () => {
expect(rateLimiter.clientType).to.equal('sequelize');
done();
});
});
it('throw error if it is not possible to define client type', (done) => {
try {
new RateLimiterPostgres({
storeClient: {},
});
} catch (err) {
expect(err instanceof Error).to.equal(true);
done();
}
});
it('private _getConnection returns client for Pool', (done) => {
class Pool {
Pool() {}
query() {}
}
const client = new Pool();
const rateLimiter = new RateLimiterPostgres({
storeClient: client,
}, () => {
rateLimiter._getConnection()
.then((conn) => {
expect(conn).to.equal(client);
done();
});
});
});
it('private _getConnection returns connection from manager for Sequelize', (done) => {
class Sequelize {
Sequelize() {}
query() {}
}
const client = new Sequelize();
client.connectionManager = {
getConnection: () => Promise.resolve(123),
};
const rateLimiter = new RateLimiterPostgres({
storeClient: client,
}, () => {
rateLimiter._getConnection()
.then((res) => {
expect(res).to.equal(123);
done();
});
});
});
it('private _getConnection returns acquire connection from Knex', (done) => {
class Knex {
Knex() {}
query() {}
}
const client = new Knex();
client.client = {
acquireConnection: () => Promise.resolve(321),
};
const rateLimiter = new RateLimiterPostgres({
storeClient: client,
storeType: 'knex',
}, () => {
rateLimiter._getConnection()
.then((res) => {
expect(res).to.equal(321);
done();
});
});
});
it('private _getConnection returns client for TypeORM', (done) => {
class Pool {
Pool() {}
query() {}
}
const typeORMConnection = {
driver: { master: new Pool()}
}
const rateLimiter = new RateLimiterPostgres({
storeClient: typeORMConnection,
storeType: 'typeorm',
}, () => {
rateLimiter._getConnection()
.then((conn) => {
expect(conn).to.equal(typeORMConnection.driver.master);
done();
});
});
});
it('Pool does not require specific connection releasing', (done) => {
class Pool {
Pool() {}
query() {}
}
const client = new Pool();
const rateLimiter = new RateLimiterPostgres({
storeClient: client,
}, () => {
expect(rateLimiter._releaseConnection()).to.equal(true);
done();
});
});
it('Sequelize release connection from manager', (done) => {
class Sequelize {
Sequelize() {}
query() {}
}
const client = new Sequelize();
client.connectionManager = {
releaseConnection: () => 123,
};
const rateLimiter = new RateLimiterPostgres({
storeClient: client,
}, () => {
expect(rateLimiter._releaseConnection()).to.equal(123);
done();
});
});
it('Knex release connection from client', (done) => {
class Knex {
Knex() {}
query() {}
}
const client = new Knex();
client.client = {
releaseConnection: () => 321,
};
const rateLimiter = new RateLimiterPostgres({
storeClient: client,
storeType: 'knex',
}, () => {
expect(rateLimiter._releaseConnection()).to.equal(321);
done();
});
});
it('TypeORM does not require specific connection releasing', (done) => {
class Pool {
Pool() {}
query() {}
}
const typeORMConnection = {
driver: { master: new Pool()}
}
const rateLimiter = new RateLimiterPostgres({
storeClient: typeORMConnection,
storeType: 'typeorm',
}, () => {
expect(rateLimiter._releaseConnection()).to.equal(true);
done();
});
});
it('does not expire key if duration set to 0', (done) => {
const testKey = 'neverexpire';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient,
storeType: 'connection',
points: 2,
duration: 0,
}, () => {
pgClientStub.restore();
const queryStub = sinon.stub(pgClient, 'query').resolves({
rows: [{ points: 1, expire: null }],
});
rateLimiter.consume(testKey, 1)
.then(() => {
queryStub.restore();
sinon.stub(pgClient, 'query').resolves({
rows: [{ points: 2, expire: null }],
});
rateLimiter.consume(testKey, 1)
.then(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.msBeforeNext).to.equal(-1);
done();
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
});
it('block key forever, if secDuration is 0', (done) => {
const testKey = 'neverexpire';
const rateLimiter = new RateLimiterPostgres({
storeClient: pgClient,
storeType: 'connection',
points: 2,
duration: 1,
}, () => {
pgClientStub.restore();
const queryStub = sinon.stub(pgClient, 'query').resolves({
rows: [{ points: 3, expire: null }],
});
rateLimiter.block(testKey, 0)
.then(() => {
setTimeout(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(3);
expect(res.msBeforeNext).to.equal(-1);
queryStub.restore();
done();
});
}, 1000);
})
.catch((err) => {
done(err);
});
});
});
});

View File

@@ -0,0 +1,183 @@
const { describe, it } = require('mocha');
const { expect } = require('chai');
const RateLimiterMemory = require('../lib/RateLimiterMemory');
const BurstyLimiter = require('../lib/BurstyRateLimiter');
const RateLimiterQueue = require('../lib/RateLimiterQueue');
const RateLimiterQueueError = require('../lib/component/RateLimiterQueueError');
describe('RateLimiterQueue with FIFO queue', function RateLimiterQueueTest() {
this.timeout(5000);
it('remove 1 token works and 1 remaining', (done) => {
const rlMemory = new RateLimiterMemory({ points: 2, duration: 1 });
const rlQueue = new RateLimiterQueue(rlMemory);
rlQueue.removeTokens(1)
.then((remainingTokens) => {
expect(remainingTokens).to.equal(1);
done();
});
});
it('remove 2 tokens from bursty limiter and returns correct remainingTokens 0', (done) => {
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
const blMemory = new RateLimiterMemory({ points: 1, duration: 3 });
const burstyLimiter = new BurstyLimiter(rlMemory, blMemory);
const rlQueue = new RateLimiterQueue(burstyLimiter);
const startTime = Date.now();
rlQueue.removeTokens(1)
.then((remainingTokens1) => {
expect(remainingTokens1).to.equal(0);
rlQueue.removeTokens(1)
.then((remainingTokens2) => {
expect(remainingTokens2).to.equal(0);
expect(Date.now() - startTime < 1000).to.equal(true);
done();
});
});
});
it('remove 2 tokens from bursty limiter and wait 1 more', (done) => {
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
const blMemory = new RateLimiterMemory({ points: 1, duration: 3 });
const burstyLimiter = new BurstyLimiter(rlMemory, blMemory);
const rlQueue = new RateLimiterQueue(burstyLimiter);
const startTime = Date.now();
rlQueue.removeTokens(1)
.then(() => {
rlQueue.removeTokens(1)
.then(() => {
rlQueue.removeTokens(1)
.then((remainingTokens) => {
expect(remainingTokens).to.equal(0);
expect(Date.now() - startTime > 1000).to.equal(true);
done();
});
});
});
});
it('remove all tokens works and 0 remaining', (done) => {
const rlMemory = new RateLimiterMemory({ points: 2, duration: 1 });
const rlQueue = new RateLimiterQueue(rlMemory);
rlQueue.removeTokens(2)
.then((remainingTokens) => {
expect(remainingTokens).to.equal(0);
done();
});
});
it('return error if try to remove more tokens than allowed', (done) => {
const rlMemory = new RateLimiterMemory({ points: 2, duration: 1 });
const rlQueue = new RateLimiterQueue(rlMemory);
rlQueue.removeTokens(3)
.then(() => {
})
.catch((err) => {
expect(err instanceof RateLimiterQueueError).to.equal(true);
done();
});
});
it('queues 1 request and fire it after 1 second', (done) => {
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
const rlQueue = new RateLimiterQueue(rlMemory);
const time = Date.now();
rlQueue.removeTokens(1).then(() => {
});
rlQueue.removeTokens(1).then((remainingTokens) => {
expect(remainingTokens).to.equal(0);
expect(Date.now() - time >= 1000).to.equal(true);
done();
});
});
it('respects order of queued callbacks', (done) => {
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
const rlQueue = new RateLimiterQueue(rlMemory);
let index;
rlQueue.removeTokens(1).then(() => {
index = 0;
});
rlQueue.removeTokens(1).then(() => {
expect(index).to.equal(0);
index = 1;
});
rlQueue.removeTokens(1).then(() => {
expect(index).to.equal(1);
index = 2;
});
rlQueue.removeTokens(1).then(() => {
expect(index).to.equal(2);
done();
});
});
it('return error if queue length reaches maximum', (done) => {
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
const rlQueue = new RateLimiterQueue(rlMemory, { maxQueueSize: 1 });
rlQueue.removeTokens(1).then(() => {
});
rlQueue.removeTokens(1).then(() => {
done();
});
rlQueue.removeTokens(1)
.then(() => {
done(new Error('must not allow to queue'));
})
.catch((err) => {
expect(err instanceof RateLimiterQueueError).to.equal(true);
});
});
it('getTokensRemaining works', (done) => {
const rlMemory = new RateLimiterMemory({ points: 2, duration: 1 });
const rlQueue = new RateLimiterQueue(rlMemory);
rlQueue.removeTokens(1)
.then(() => {
rlQueue.getTokensRemaining()
.then((tokensRemaining) => {
expect(tokensRemaining).to.equal(1);
done();
});
});
});
it('getTokensRemaining returns maximum if internal limiter by key does not exist', (done) => {
const rlMemory = new RateLimiterMemory({ points: 23, duration: 1 });
const rlQueue = new RateLimiterQueue(rlMemory);
rlQueue.getTokensRemaining('test')
.then((tokensRemaining) => {
expect(tokensRemaining).to.equal(23);
done();
});
});
it('creates internal instance by key and removes tokens from it', (done) => {
const rlMemory = new RateLimiterMemory({ points: 2, duration: 1 });
const rlQueue = new RateLimiterQueue(rlMemory);
rlQueue.removeTokens(1, 'customkey')
.then((remainingTokens) => {
expect(remainingTokens).to.equal(1);
rlQueue.getTokensRemaining()
.then((defaultTokensRemaining) => {
expect(defaultTokensRemaining).to.equal(2);
done();
});
});
});
it('getTokensRemaining returns maximum if internal limiter does not have data', (done) => {
const rlMemory = new RateLimiterMemory({ points: 23, duration: 1 });
const rlQueue = new RateLimiterQueue(rlMemory);
rlQueue.removeTokens(1, 'nodata')
.then(() => {
setTimeout(() => {
rlQueue.getTokensRemaining('nodata')
.then((tokensRemaining) => {
expect(tokensRemaining).to.equal(23);
done();
});
}, 1001)
})
});
});

View File

@@ -0,0 +1,902 @@
/* eslint-disable new-cap */
/* eslint-disable no-unused-expressions */
const { describe, it, beforeEach } = require('mocha');
const { expect } = require('chai');
const sinon = require('sinon');
const RateLimiterRedis = require('../lib/RateLimiterRedis');
const redisMock = require('redis-mock');
const { redisEvalMock, getRedisClientClosed } = require('./helper');
describe('RateLimiterRedis with fixed window', function RateLimiterRedisTest() {
this.timeout(5000);
const redisMockClient = redisMock.createClient();
redisMockClient.eval = redisEvalMock(redisMockClient);
const redisClientClosed = getRedisClientClosed(redisMockClient);
beforeEach((done) => {
redisMockClient.flushall(done);
});
it('consume 1 point', (done) => {
const testKey = 'consume1';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 2,
duration: 5,
});
rateLimiter
.consume(testKey)
.then(() => {
redisMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
if (!err) {
expect(consumedPoints).to.equal('1');
done();
}
});
})
.catch((err) => {
done(err);
});
});
it('rejected when consume more than maximum points', (done) => {
const testKey = 'consume2';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 5,
});
rateLimiter
.consume(testKey, 2)
.then(() => {})
.catch((rejRes) => {
expect(rejRes.msBeforeNext >= 0).to.equal(true);
done();
});
});
it('execute evenly over duration', (done) => {
const testKey = 'consumeEvenly';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 2,
duration: 5,
execEvenly: true,
});
rateLimiter
.consume(testKey)
.then(() => {
const timeFirstConsume = Date.now();
rateLimiter
.consume(testKey)
.then(() => {
/* Second consume should be delayed more than 2 seconds
Explanation:
1) consume at 0ms, remaining duration = 5000ms
2) delayed consume for (4999 / (0 + 2)) ~= 2500ms, where 2 is a fixed value
, because it mustn't delay in the beginning and in the end of duration
3) consume after 2500ms by timeout
*/
const diff = Date.now() - timeFirstConsume;
expect(diff > 2400 && diff < 2600).to.equal(true);
done();
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('execute evenly over duration with minimum delay 20 ms', (done) => {
const testKey = 'consumeEvenlyMinDelay';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 100,
duration: 1,
execEvenly: true,
execEvenlyMinDelayMs: 20,
});
rateLimiter
.consume(testKey)
.then(() => {
const timeFirstConsume = Date.now();
rateLimiter
.consume(testKey)
.then(() => {
expect(Date.now() - timeFirstConsume >= 20).to.equal(true);
done();
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('makes penalty', (done) => {
const testKey = 'penalty1';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 3,
duration: 5,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter
.penalty(testKey)
.then(() => {
redisMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
if (!err) {
expect(consumedPoints).to.equal('2');
done();
}
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('reward points', (done) => {
const testKey = 'reward';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 5,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter
.reward(testKey)
.then(() => {
redisMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
if (!err) {
expect(consumedPoints).to.equal('0');
done();
}
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('block key in memory when inMemory block options set up', (done) => {
const testKey = 'blockmem';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 5,
inMemoryBlockOnConsumed: 2,
inMemoryBlockDuration: 10,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter
.consume(testKey)
.then(() => {})
.catch((rejRes) => {
// msBeforeNext more than 5000, so key was blocked
expect(rejRes.msBeforeNext > 5000 && rejRes.remainingPoints === 0).to.equal(true);
done();
});
})
.catch((rejRes) => {
done(rejRes);
});
});
it('block key in memory for msBeforeNext milliseconds', (done) => {
const testKey = 'blockmempoints';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 5,
inMemoryBlockOnConsumed: 1,
});
rateLimiter
.consume(testKey)
.then(() => {
expect(rateLimiter._inMemoryBlockedKeys.msBeforeExpire(rateLimiter.getKey(testKey)) > 0).to.equal(true);
done();
})
.catch((rejRes) => {
done(rejRes);
});
});
it('reject after block key in memory for msBeforeNext, if consumed more than points', (done) => {
const testKey = 'blockmempointsreject';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 5,
inMemoryBlockOnConsumed: 1,
});
rateLimiter
.consume(testKey, 2)
.then(() => {
done(new Error('must not'));
})
.catch(() => {
expect(rateLimiter._inMemoryBlockedKeys.msBeforeExpire(rateLimiter.getKey(testKey)) > 0).to.equal(true);
done();
});
});
it('expire inMemory blocked key', (done) => {
const testKey = 'blockmem2';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 1,
inMemoryBlockOnConsumed: 2,
inmemoryBlockDuration: 2, // @deprecated Kept to test backward compatability
});
// It blocks on the first consume as consumed points more than available
rateLimiter
.consume(testKey, 2)
.then(() => {})
.catch(() => {
setTimeout(() => {
rateLimiter
.consume(testKey)
.then((res) => {
// Block expired
expect(res.msBeforeNext <= 1000 && res.remainingPoints === 0).to.equal(true);
done();
})
.catch((rejRes) => {
done(rejRes);
});
}, 2001);
});
});
it('throws error when inMemoryBlockOnConsumed is not set, but inMemoryBlockDuration is set', (done) => {
try {
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
inMemoryBlockDuration: 2,
});
rateLimiter.reward('test');
} catch (err) {
expect(err instanceof Error).to.equal(true);
done();
}
});
it('throws error when inMemoryBlockOnConsumed less than points', (done) => {
try {
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 2,
inmemoryBlockOnConsumed: 1, // @deprecated Kept to test backward compatability
});
rateLimiter.reward('test');
} catch (err) {
expect(err instanceof Error).to.equal(true);
done();
}
});
it('throws error on RedisClient error', (done) => {
const testKey = 'rediserror';
const rateLimiter = new RateLimiterRedis({
storeClient: redisClientClosed,
});
rateLimiter
.consume(testKey)
.then(() => {})
.catch((rejRes) => {
expect(rejRes instanceof Error).to.equal(true);
done();
});
});
it('consume using insuranceLimiter when RedisClient error', (done) => {
const testKey = 'rediserror2';
const rateLimiter = new RateLimiterRedis({
storeClient: redisClientClosed,
points: 1,
duration: 1,
insuranceLimiter: new RateLimiterRedis({
points: 2,
duration: 2,
storeClient: redisMockClient,
}),
});
// Consume from insurance limiter with different options
rateLimiter
.consume(testKey)
.then((res) => {
expect(res.remainingPoints === 1 && res.msBeforeNext > 1000).to.equal(true);
done();
})
.catch((rejRes) => {
done(rejRes);
});
});
it('penalty using insuranceLimiter when RedisClient error', (done) => {
const testKey = 'rediserror3';
const rateLimiter = new RateLimiterRedis({
storeClient: redisClientClosed,
points: 1,
duration: 1,
insuranceLimiter: new RateLimiterRedis({
points: 2,
duration: 2,
storeClient: redisMockClient,
}),
});
rateLimiter
.penalty(testKey)
.then(() => {
redisMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
if (!err) {
expect(consumedPoints).to.equal('1');
done();
}
});
})
.catch((rejRes) => {
done(rejRes);
});
});
it('reward using insuranceLimiter when RedisClient error', (done) => {
const testKey = 'rediserror4';
const rateLimiter = new RateLimiterRedis({
storeClient: redisClientClosed,
points: 1,
duration: 1,
insuranceLimiter: new RateLimiterRedis({
points: 2,
duration: 2,
storeClient: redisMockClient,
}),
});
rateLimiter
.consume(testKey, 2)
.then(() => {
rateLimiter
.reward(testKey)
.then(() => {
redisMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
if (!err) {
expect(consumedPoints).to.equal('1');
done();
}
});
})
.catch((rejRes) => {
done(rejRes);
});
})
.catch((rejRes) => {
done(rejRes);
});
});
it('block using insuranceLimiter when RedisClient error', (done) => {
const testKey = 'rediserrorblock';
const rateLimiter = new RateLimiterRedis({
storeClient: redisClientClosed,
points: 1,
duration: 1,
insuranceLimiter: new RateLimiterRedis({
points: 1,
duration: 1,
storeClient: redisMockClient,
}),
});
rateLimiter
.block(testKey, 3)
.then((res) => {
expect(res.msBeforeNext > 2000 && res.msBeforeNext <= 3000).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('use keyPrefix from options', () => {
const testKey = 'key';
const keyPrefix = 'test';
const rateLimiter = new RateLimiterRedis({ keyPrefix, storeClient: redisClientClosed });
expect(rateLimiter.getKey(testKey)).to.equal('test:key');
});
it('blocks key for block duration when consumed more than points', (done) => {
const testKey = 'block';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 1,
blockDuration: 2,
});
rateLimiter
.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext > 1000).to.equal(true);
done();
});
});
it('reject with error, if internal block by blockDuration failed', (done) => {
const testKey = 'blockdurationfailed';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 1,
blockDuration: 2,
});
sinon.stub(rateLimiter, '_block').callsFake(() => Promise.reject(new Error()));
rateLimiter
.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej instanceof Error).to.equal(true);
done();
});
});
it('block expires in blockDuration seconds', (done) => {
const testKey = 'blockexpires';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 1,
blockDuration: 2,
});
rateLimiter
.consume(testKey, 2)
.then(() => {
done(Error('must not resolve'));
})
.catch(() => {
setTimeout(() => {
rateLimiter
.consume(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch(() => {
done(Error('must resolve'));
});
}, 2000);
});
});
it('block custom key', (done) => {
const testKey = 'blockcustom';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 1,
});
rateLimiter.block(testKey, 2).then(() => {
rateLimiter
.consume(testKey)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej.msBeforeNext > 1000).to.equal(true);
done();
});
});
});
it('get points', (done) => {
const testKey = 'get';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 2,
duration: 1,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter
.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(1);
done();
})
.catch(() => {
done(Error('get must not reject'));
});
})
.catch(() => {
done(Error('consume must not reject'));
});
});
describe('disconnected redis client', () => {
it('attempt to invoke redis if rejectIfRedisNotReady is not set', (done) => {
const testKey = 'get';
const rateLimiter = new RateLimiterRedis({
storeClient: redisClientClosed,
points: 2,
duration: 1,
});
rateLimiter
.consume(testKey)
.catch((error) => {
expect(error.message).to.equal('closed');
done();
});
});
it('get throws error with mock redis', (done) => {
const testKey = 'get';
const rateLimiter = new RateLimiterRedis({
storeClient: redisClientClosed,
points: 2,
duration: 1,
rejectIfRedisNotReady: true,
});
rateLimiter
.consume(testKey)
.catch((error) => {
expect(error.message).to.equal('Redis connection is not ready');
done();
});
});
it('get throws error with disconnected ioredis', (done) => {
const testKey = 'get';
const disconnectedIoRedis = {
status: 'closed',
};
const rateLimiter = new RateLimiterRedis({
storeClient: disconnectedIoRedis,
points: 2,
duration: 1,
rejectIfRedisNotReady: true,
});
rateLimiter
.consume(testKey)
.catch((error) => {
expect(error.message).to.equal('Redis connection is not ready');
done();
});
});
it('get throws error with disconnected node-redis', (done) => {
const testKey = 'get';
const disconnectedIoRedis = {
isReady: () => false,
};
const rateLimiter = new RateLimiterRedis({
storeClient: disconnectedIoRedis,
points: 2,
duration: 1,
rejectIfRedisNotReady: true,
});
rateLimiter
.consume(testKey)
.catch((error) => {
expect(error.message).to.equal('Redis connection is not ready');
done();
});
});
});
it('get returns NULL if key is not set', (done) => {
const testKey = 'getnull';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 2,
duration: 1,
});
rateLimiter
.get(testKey)
.then((res) => {
expect(res).to.equal(null);
done();
})
.catch(() => {
done(Error('get must not reject'));
});
});
it('get supports ioredis format', (done) => {
const testKey = 'getioredis';
class multiStubIoRedisClient {
multi() {
const multi = redisMockClient.multi();
multi.exec = (cb) => {
cb(null, [[null, '2'], [null, 4993]]);
};
return multi;
}
}
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 3,
duration: 5,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter.client = new multiStubIoRedisClient();
rateLimiter
.get(testKey)
.then((res) => {
expect(res.remainingPoints).to.equal(1);
done();
})
.catch(() => {
done(Error('get must not reject'));
});
})
.catch(() => {
done(Error('consume must not reject'));
});
});
it('delete key and return true', (done) => {
const testKey = 'deletetrue';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 2,
duration: 1,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter.delete(testKey)
.then((resDel) => {
expect(resDel).to.equal(true);
done();
});
});
});
it('delete returns false, if there is no key', (done) => {
const testKey = 'deletefalse';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 2,
duration: 1,
});
rateLimiter.delete(testKey)
.then((resDel) => {
expect(resDel).to.equal(false);
done();
});
});
it('delete rejects on error', (done) => {
const testKey = 'deleteerr';
const rateLimiter = new RateLimiterRedis({
storeClient: redisClientClosed,
points: 2,
duration: 1,
});
rateLimiter.delete(testKey)
.catch(() => done());
});
it('consume applies options.customDuration to set expire', (done) => {
const testKey = 'consume.customDuration';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 2,
duration: 5,
});
rateLimiter
.consume(testKey, 1, { customDuration: 1 })
.then((res) => {
expect(res.msBeforeNext <= 1000).to.be.true;
done();
})
.catch((err) => {
done(err);
});
});
it('insurance limiter on error consume applies options.customDuration to set expire', (done) => {
const testKey = 'consume.customDuration';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 2,
duration: 5,
});
rateLimiter
.consume(testKey, 1, { customDuration: 1 })
.then((res) => {
expect(res.msBeforeNext <= 1000).to.be.true;
done();
})
.catch((err) => {
done(err);
});
});
it('insurance limiter on error consume applies options.customDuration to set expire', (done) => {
const testKey = 'consume.customDuration.onerror';
const rateLimiter = new RateLimiterRedis({
storeClient: redisClientClosed,
points: 1,
duration: 2,
insuranceLimiter: new RateLimiterRedis({
points: 2,
duration: 3,
storeClient: redisMockClient,
}),
});
// Consume from insurance limiter with different options
rateLimiter
.consume(testKey, 1, { customDuration: 1 })
.then((res) => {
expect(res.remainingPoints === 1 && res.msBeforeNext <= 1000).to.equal(true);
done();
})
.catch((rejRes) => {
done(rejRes);
});
});
it('block key in memory works with blockDuration on store', (done) => {
const testKey = 'blockmem+blockduration';
const rateLimiter = new RateLimiterRedis({
storeClient: redisMockClient,
points: 1,
duration: 5,
blockDuration: 10,
inMemoryBlockOnConsumed: 2,
inMemoryBlockDuration: 10,
});
rateLimiter
.consume(testKey)
.then(() => {
rateLimiter
.consume(testKey)
.then(() => {})
.catch((rejRes) => {
rateLimiter.get(testKey)
.then((getRes) => {
expect(getRes.msBeforeNext > 5000 && rejRes.remainingPoints === 0).to.equal(true);
// msBeforeNext more than 5000, so key was blocked in memory
expect(rejRes.msBeforeNext > 5000 && rejRes.remainingPoints === 0).to.equal(true);
done();
});
});
})
.catch((rejRes) => {
done(rejRes);
});
});
it('does not expire key if duration set to 0', (done) => {
const testKey = 'neverexpire';
const rateLimiter = new RateLimiterRedis({ storeClient: redisMockClient, points: 2, duration: 0 });
rateLimiter.consume(testKey, 1)
.then(() => {
rateLimiter.consume(testKey, 1)
.then(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.msBeforeNext).to.equal(-1);
done();
});
})
.catch((err) => {
done(err);
});
})
.catch((err) => {
done(err);
});
});
it('block key forever, if secDuration is 0', (done) => {
const testKey = 'neverexpire';
const rateLimiter = new RateLimiterRedis({ storeClient: redisMockClient, points: 1, duration: 1 });
rateLimiter.block(testKey, 0)
.then(() => {
setTimeout(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(2);
expect(res.msBeforeNext).to.equal(-1);
done();
});
}, 2000);
})
.catch((err) => {
done(err);
});
});
it('set points by key', (done) => {
const testKey = 'set';
const rateLimiter = new RateLimiterRedis({ storeClient: redisMockClient, points: 1, duration: 1 });
rateLimiter.set(testKey, 12)
.then(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(12);
done();
});
})
.catch((err) => {
done(err);
});
});
it('set points by key forever', (done) => {
const testKey = 'setforever';
const rateLimiter = new RateLimiterRedis({ storeClient: redisMockClient, points: 1, duration: 1 });
rateLimiter.set(testKey, 12, 0)
.then(() => {
setTimeout(() => {
rateLimiter.get(testKey)
.then((res) => {
expect(res.consumedPoints).to.equal(12);
expect(res.msBeforeNext).to.equal(-1);
done();
});
}, 1100);
})
.catch((err) => {
done(err);
});
});
});

View File

@@ -0,0 +1,58 @@
const { describe, it, beforeEach } = require('mocha');
const { expect } = require('chai');
const RateLimiterRes = require('../lib/RateLimiterRes');
describe('RateLimiterRes response object', () => {
let rateLimiterRes;
beforeEach(() => {
rateLimiterRes = new RateLimiterRes();
});
it('setup defaults on construct', () => {
expect(rateLimiterRes.msBeforeNext === 0 && rateLimiterRes.remainingPoints === 0)
.to.be.equal(true);
});
it('msBeforeNext set and get', () => {
rateLimiterRes.msBeforeNext = 123;
expect(rateLimiterRes.msBeforeNext).to.equal(123);
});
it('points set and get', () => {
rateLimiterRes.remainingPoints = 4;
expect(rateLimiterRes.remainingPoints).to.equal(4);
});
it('consumed points set and get', () => {
rateLimiterRes.consumedPoints = 5;
expect(rateLimiterRes.consumedPoints).to.equal(5);
});
it('isFirstInDuration set and get with cast', () => {
rateLimiterRes.isFirstInDuration = 1;
expect(rateLimiterRes.isFirstInDuration).to.equal(true);
});
it('returns object on toJSON call', () => {
rateLimiterRes.msBeforeNext = 12;
rateLimiterRes.remainingPoints = 3;
rateLimiterRes.consumedPoints = 2;
rateLimiterRes.isFirstInDuration = true;
expect(rateLimiterRes.toJSON()).to.deep.equal({
remainingPoints: 3,
msBeforeNext: 12,
consumedPoints: 2,
isFirstInDuration: true,
});
});
it('returns JSON string on toString call', () => {
rateLimiterRes.msBeforeNext = 2;
rateLimiterRes.remainingPoints = 0;
rateLimiterRes.consumedPoints = 5;
rateLimiterRes.isFirstInDuration = false;
expect(rateLimiterRes.toString()).to.equal('{"remainingPoints":0,"msBeforeNext":2,"consumedPoints":5,"isFirstInDuration":false}');
});
});

View File

@@ -0,0 +1,122 @@
/* eslint-disable security/detect-object-injection */
const { describe, it } = require('mocha');
const { expect } = require('chai');
const RateLimiterStoreAbstract = require('../lib/RateLimiterStoreAbstract');
const RateLimiterRes = require('../lib/RateLimiterRes');
class RateLimiterStoreMemory extends RateLimiterStoreAbstract {
constructor(opts) {
super(opts);
this._inMemoryDataAsStorage = {};
}
_getRateLimiterRes(rlKey, changedPoints, storeResult) {
const res = new RateLimiterRes();
res.consumedPoints = storeResult.points;
res.isFirstInDuration = res.consumedPoints === changedPoints;
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
res.msBeforeNext = storeResult.msBeforeNext;
return res;
}
_get(rlKey) {
const result = this._inMemoryDataAsStorage[rlKey];
return Promise.resolve(typeof result === 'undefined' ? null : result);
}
_delete(rlKey) {
const value = this._inMemoryDataAsStorage[rlKey];
if (typeof value === 'undefined') {
return Promise.resolve(false);
}
delete this._inMemoryDataAsStorage[rlKey];
return Promise.resolve(true);
}
_upsert(rlKey, points, msDuration) {
const now = Date.now();
const result = {
points,
msBeforeNext: msDuration,
};
if (typeof this._inMemoryDataAsStorage[rlKey] === 'undefined') {
this._inMemoryDataAsStorage[rlKey] = {
points,
expired: now + msDuration,
};
} else {
const value = this._inMemoryDataAsStorage[rlKey];
if (value.expired > now) {
value.points += points;
result.points = value.points;
result.msBeforeNext = value.expired - now;
} else {
value.points = points;
value.expired = now + msDuration;
}
}
return Promise.resolve(result);
}
}
describe('RateLimiterStoreAbstract with fixed window', () => {
it('delete all in memory blocked keys', (done) => {
const rateLimiter = new RateLimiterStoreMemory({
points: 1,
duration: 1,
// avoid fire block method
blockDuration: 0,
inMemoryBlockOnConsumed: 1,
inMemoryBlockDuration: 1,
keyPrefix: '',
});
// should start blocking
Promise.allSettled([
rateLimiter.consume('key1', 2),
rateLimiter.consume('key2', 2),
])
.then(() => {
expect(rateLimiter._inMemoryBlockedKeys._keys.key1).not.eq(undefined);
expect(rateLimiter._inMemoryBlockedKeys._keys.key2).not.eq(undefined);
rateLimiter.deleteInMemoryBlockedAll();
expect(rateLimiter._inMemoryBlockedKeys._keys.key1).eq(undefined);
expect(rateLimiter._inMemoryBlockedKeys._keys.key2).eq(undefined);
done();
})
.catch((err) => {
done(err);
});
});
it('delete specific key should also deleting in-memory data', (done) => {
const rateLimiter = new RateLimiterStoreMemory({
points: 1,
duration: 1,
// avoid fire block method
blockDuration: 0,
inMemoryBlockOnConsumed: 1,
inMemoryBlockDuration: 1,
keyPrefix: '',
});
// should start blocking
rateLimiter.consume('key', 2).catch(() => {
expect(rateLimiter._inMemoryBlockedKeys._keys.key).not.eq(undefined);
rateLimiter.delete('key').then((isExist) => {
expect(rateLimiter._inMemoryBlockedKeys._keys.key).eq(undefined);
expect(isExist).eq(true);
done();
});
});
});
});

View File

@@ -0,0 +1,83 @@
// eslint-disable no-unused-expressions
const { describe, it, beforeEach } = require('mocha');
const { expect } = require('chai');
const RateLimiterUnion = require('../lib/RateLimiterUnion');
const RateLimiterMemory = require('../lib/RateLimiterMemory');
describe('RateLimiterUnion with fixed window', () => {
const keyPrefix1 = 'limit1';
const keyPrefix2 = 'limit2';
let rateLimiter;
beforeEach(() => {
const limiter1 = new RateLimiterMemory({
keyPrefix: keyPrefix1,
points: 1,
duration: 1,
});
const limiter2 = new RateLimiterMemory({
keyPrefix: keyPrefix2,
points: 2,
duration: 5,
});
rateLimiter = new RateLimiterUnion(limiter1, limiter2);
});
it('does not allow to create union with limiters number less than 2', () => {
try {
new RateLimiterUnion(new RateLimiterMemory({ // eslint-disable-line no-new
keyPrefix: keyPrefix1,
points: 1,
duration: 1,
}));
} catch (err) {
expect(err instanceof Error).to.equal(true);
}
});
it('all limiters have to be instance of RateLimiterAbstract', () => {
try {
new RateLimiterUnion(new RateLimiterMemory({ // eslint-disable-line no-new
keyPrefix: keyPrefix1,
points: 1,
duration: 1,
}), {});
} catch (err) {
expect(err instanceof Error).to.equal(true);
}
});
it('consume from all limiters', (done) => {
rateLimiter.consume('test')
.then((res) => {
expect(res[keyPrefix1].remainingPoints === 0 && res[keyPrefix2].remainingPoints === 1).to.equal(true);
done();
})
.catch(() => {
done(Error('must not reject'));
});
});
it('reject consume one "limit1", which does not have enough points', (done) => {
rateLimiter.consume('test', 2)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej[keyPrefix1].remainingPoints === 0).to.equal(true);
done();
});
});
it('reject both do not have enough points', (done) => {
rateLimiter.consume('test', 3)
.then(() => {
done(Error('must not resolve'));
})
.catch((rej) => {
expect(rej[keyPrefix1].remainingPoints === 0 && rej[keyPrefix2].remainingPoints === 0).to.equal(true);
done();
});
});
});

View File

@@ -0,0 +1,82 @@
const { describe, it, beforeEach } = require('mocha');
const { expect } = require('chai');
const BlockedKeys = require('../../../lib/component/BlockedKeys/BlockedKeys');
describe('BlockedKeys', () => {
let blockedKeys;
beforeEach(() => {
blockedKeys = new BlockedKeys();
});
it('add blocked key', () => {
blockedKeys.add('key', 5);
blockedKeys.collectExpired();
expect(blockedKeys.msBeforeExpire('key') > 0).to.equal(true);
});
it('expire blocked key', (done) => {
blockedKeys.add('key', 1);
setTimeout(() => {
expect(blockedKeys.msBeforeExpire('key')).to.equal(0);
done();
}, 1001);
});
it('check not blocked key', () => {
blockedKeys.add('key', 1);
expect(blockedKeys.msBeforeExpire('key1')).to.equal(0);
});
it('do not collect expired on add', (done) => {
blockedKeys.add('key', 1);
blockedKeys.add('key1', 1);
setTimeout(() => {
blockedKeys.add('key2', 1);
expect(Object.keys(blockedKeys._keys).length).to.equal(3);
done();
}, 1001);
});
it('collect expired on add if there more than 999 blocked keys', (done) => {
for (let i = 0; i < 1000; i++) {
blockedKeys.add(`key${i}`, 1);
}
setTimeout(() => {
blockedKeys.add('key1', 1);
expect(Object.keys(blockedKeys._keys).length === 1 && blockedKeys._addedKeysAmount === 1)
.to.equal(true);
done();
}, 1001);
});
it('do not collect expired when key is not blocked', (done) => {
blockedKeys.add('key', 1);
setTimeout(() => {
blockedKeys.msBeforeExpire('key');
expect(Object.keys(blockedKeys._keys).length === 1 && blockedKeys._addedKeysAmount === 1)
.to.equal(true);
done();
}, 1001);
});
it('collect expired when key is blocked', (done) => {
blockedKeys.add('key', 1);
blockedKeys.add('blocked', 2);
setTimeout(() => {
blockedKeys.msBeforeExpire('blocked');
expect(Object.keys(blockedKeys._keys).length).to.equal(1);
done();
}, 1001);
});
it('duplicated keys do not brake collectExpired and msBeforeExpire', (done) => {
blockedKeys.add('key', 1);
blockedKeys.add('key', 2);
setTimeout(() => {
blockedKeys.add('key', 3);
expect(blockedKeys.msBeforeExpire('key') > 2000).to.equal(true);
done();
}, 1001);
});
});

View File

@@ -0,0 +1,79 @@
const { describe, it, beforeEach } = require('mocha');
const { expect } = require('chai');
const MemoryStorage = require('../../../lib/component/MemoryStorage/MemoryStorage');
describe('MemoryStorage', function MemoryStorageTest() {
const testKey = 'test';
const val = 34;
let storage;
this.timeout(5000);
beforeEach(() => {
storage = new MemoryStorage();
});
it('should set and get', (done) => {
storage.set(testKey, val, 5);
expect(storage.get(testKey).consumedPoints).to.equal(val);
done();
});
it('should delete record on expire', (done) => {
storage.set(testKey, val, 1);
setTimeout(() => {
expect(storage.get(testKey)).to.equal(null);
done();
}, 2000);
});
it('should incrby', (done) => {
storage.set(testKey, val, 5);
storage.incrby(testKey, 2);
expect(storage.get(testKey).consumedPoints).to.equal(val + 2);
done();
});
it('incrby should create record if it is not set', (done) => {
storage.incrby(testKey, val, 5);
expect(storage.get(testKey).consumedPoints).to.equal(val);
done();
});
it('incrby should create record if expiresAt is not set', (done) => {
storage.set(testKey, val)
expect(storage.get(testKey).expiresAt).to.equal(undefined);
storage.incrby(testKey, val, 5);
expect(storage.get(testKey).expiresAt !== null).to.equal(true);
done();
});
it('should delete record and return true, if it was there', () => {
storage.set(testKey, val, 10);
expect(storage.delete(testKey)).to.equal(true);
expect(storage.get(testKey)).to.equal(null);
});
it('return false, if there is no record to delete', () => {
expect(storage.delete(testKey)).to.equal(false);
});
it('should not fail in the absence of Timeout::unref', (done) => {
// Node (where we most likely be running tests) provides `Timeout.prototype.unref`, however
// MemoryStorage should run in environments where `Timeout.prototype.unref` is not provided
// (e.g. browsers). For this test we remove `unref` from `Timeout.prototype` only for the
// duration of this test, to verify that MemoryStorage.prototype.set won't throw.
const handle = setTimeout(() => {}, 0);
const isHandleObject = typeof handle === 'object' && !!handle.constructor;
let timeoutUnref;
if (isHandleObject) {
timeoutUnref = handle.constructor.prototype.unref;
delete handle.constructor.prototype.unref;
}
expect(() => new MemoryStorage().set('key', 0, 0.001)).to.not.throw();
setTimeout(done, 250);
if (isHandleObject) {
handle.constructor.prototype.unref = timeoutUnref;
}
});
});

View File

@@ -0,0 +1,21 @@
const { describe, it, beforeEach } = require('mocha');
const { expect } = require('chai');
const Record = require('../../../lib/component/MemoryStorage/Record');
describe('MemoryStorage Record', () => {
let record;
beforeEach(() => {
record = new Record();
});
it('value set with cast to int and get', () => {
record.value = '123';
expect(record.value).to.equal(123);
});
it('expiresAt set unix time and get Date', () => {
const now = Date.now();
record.expiresAt = now;
expect(record.expiresAt.getTime()).to.equal(now);
});
});

View File

@@ -0,0 +1,11 @@
const { describe, it } = require('mocha');
const { expect } = require('chai');
const RateLimiterQueueError = require('../../lib/component/RateLimiterQueueError');
describe('RateLimiterQueueError', () => {
it('supports extra argument in constructor', (done) => {
const err = new RateLimiterQueueError('test', 'extra');
expect(err.extra).to.equal('extra');
done();
});
});

View File

@@ -0,0 +1,57 @@
// Mock eval function with almost the same behaviour as Lua script
// It gives 99% sure, that all work as expected
function redisEvalMock(redisMockClient) {
return (script, numberOfKeys, rlKey, points, secDuration, callback) => {
const multi = redisMockClient.multi();
if (secDuration > 0) {
multi.set(rlKey, 0, 'EX', secDuration, 'NX');
}
multi.incrby(rlKey, points)
.pttl(rlKey)
.exec((err, res) => {
const finalRes = secDuration > 0
? [res[1], res[2]]
: res;
callback(err, finalRes);
});
};
}
// emulate closed RedisClient
class RedisClient {
constructor(redisMockClient) {
this._redisMockClient = redisMockClient;
}
multi() {
const multi = this._redisMockClient.multi();
multi.exec = (cb) => {
cb(new Error('closed'), []);
};
return multi;
}
}
function getRedisClientClosed(redisClient) {
const redisClientClosedRaw = new RedisClient(redisClient);
return new Proxy(redisClientClosedRaw, {
get: (func, name) => {
if (name === 'defineCommand') {
return undefined;
}
if (name in redisClientClosedRaw) {
return redisClientClosedRaw[name];
}
return function (...args) {
const cb = args.pop();
cb(Error('closed'));
};
},
});
}
module.exports = {
redisEvalMock,
getRedisClientClosed,
};