First commit
13
framework/node_modules/node-rate-limiter-flexible/.editorconfig
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.md]
|
||||
insert_final_newline = false
|
||||
trim_trailing_whitespace = false
|
||||
28
framework/node_modules/node-rate-limiter-flexible/.eslintrc.json
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"plugins": [
|
||||
"node",
|
||||
"security"
|
||||
],
|
||||
"extends": [
|
||||
"plugin:node/recommended",
|
||||
"plugin:security/recommended",
|
||||
"eslint:recommended",
|
||||
"airbnb-base"
|
||||
],
|
||||
"env": {
|
||||
"node": true
|
||||
},
|
||||
"rules": {
|
||||
"no-underscore-dangle": "off",
|
||||
"no-param-reassign": "off",
|
||||
"no-plusplus": "off",
|
||||
"radix": ["error", "as-needed"],
|
||||
"consistent-return": "off",
|
||||
"class-methods-use-this": "off",
|
||||
"max-len": ["error", { "code": 140 }],
|
||||
"node/no-unpublished-require": ["error", {
|
||||
"allowModules": ["mocha", "chai", "redis-mock"]
|
||||
}],
|
||||
"node/no-unsupported-features": "off"
|
||||
}
|
||||
}
|
||||
1
framework/node_modules/node-rate-limiter-flexible/.github/FUNDING.yml
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ko_fi: animir
|
||||
4
framework/node_modules/node-rate-limiter-flexible/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
.idea
|
||||
node_modules
|
||||
coverage
|
||||
package-lock.json
|
||||
9
framework/node_modules/node-rate-limiter-flexible/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
.idea
|
||||
.gitignore
|
||||
.npmignore
|
||||
.travis.yml
|
||||
.eslintrc.json
|
||||
test
|
||||
coverage
|
||||
img
|
||||
.github
|
||||
10
framework/node_modules/node-rate-limiter-flexible/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "8"
|
||||
- "10"
|
||||
- "12"
|
||||
- "14"
|
||||
script:
|
||||
- npm run eslint
|
||||
- npm run test
|
||||
after_success: 'npm run coveralls'
|
||||
7
framework/node_modules/node-rate-limiter-flexible/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
## ISC License (ISC)
|
||||
|
||||
Copyright 2019 Roman Voloboev
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
239
framework/node_modules/node-rate-limiter-flexible/README.md
generated
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
[](https://coveralls.io/r/animir/node-rate-limiter-flexible?branch=master)
|
||||
[](https://www.npmjs.com/package/rate-limiter-flexible)
|
||||

|
||||
[![node version][node-image]][node-url]
|
||||
[](https://github.com/denoland/deno)
|
||||
|
||||
[node-image]: https://img.shields.io/badge/node.js-%3E=_6.0-green.svg?style=flat-square
|
||||
[node-url]: http://nodejs.org/download/
|
||||
|
||||
<img src="img/rlflx-logo-small.png" width="50" alt="Logo"/>
|
||||
|
||||
## node-rate-limiter-flexible
|
||||
|
||||
**rate-limiter-flexible** counts and limits number of actions by key and protects from DDoS and brute force attacks at any scale.
|
||||
|
||||
It works with _Redis_, process _Memory_, _Cluster_ or _PM2_, _Memcached_, _MongoDB_, _MySQL_, _PostgreSQL_ and allows to control requests rate in single process or distributed environment.
|
||||
|
||||
Memory limiter also works in browser.
|
||||
|
||||
**Atomic increments.** All operations in memory or distributed environment use atomic increments against race conditions.
|
||||
|
||||
Allow **traffic bursts** with [BurstyRateLimiter](https://github.com/animir/node-rate-limiter-flexible/wiki/BurstyRateLimiter).
|
||||
|
||||
**Fast.** Average request takes `0.7ms` in Cluster and `2.5ms` in Distributed application. See [benchmarks](https://github.com/animir/node-rate-limiter-flexible#benchmark).
|
||||
|
||||
**Flexible.** Combine limiters, block key for some duration, delay actions, manage failover with insurance options, configure smart key blocking in memory and many others.
|
||||
|
||||
**Ready for growth.** It provides unified API for all limiters. Whenever your application grows, it is ready. Prepare your limiters in minutes.
|
||||
|
||||
**Friendly.** No matter which node package you prefer: `redis` or `ioredis`, `sequelize`/`typeorm` or `knex`, `memcached`, native driver or `mongoose`. It works with all of them.
|
||||
|
||||
**In memory blocks.** Avoid extra requests to store with [inMemoryBlockOnConsumed](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#inmemoryblockonconsumed).
|
||||
|
||||
**Deno compatible** See [this example](https://gist.github.com/animir/d06ca92931677f330d3f2d4c6c3108e4)
|
||||
|
||||
It uses **fixed window** as it is much faster than rolling window.
|
||||
[See comparative benchmarks with other libraries here](https://github.com/animir/node-rate-limiter-flexible/wiki/Comparative-benchmarks)
|
||||
|
||||
## Installation
|
||||
|
||||
`npm i --save rate-limiter-flexible`
|
||||
|
||||
`yarn add rate-limiter-flexible`
|
||||
|
||||
## Basic Example
|
||||
|
||||
Points can be consumed by IP address, user ID, authorisation token, API route or any other string.
|
||||
|
||||
```javascript
|
||||
const opts = {
|
||||
points: 6, // 6 points
|
||||
duration: 1, // Per second
|
||||
};
|
||||
|
||||
const rateLimiter = new RateLimiterMemory(opts);
|
||||
|
||||
rateLimiter.consume(remoteAddress, 2) // consume 2 points
|
||||
.then((rateLimiterRes) => {
|
||||
// 2 points consumed
|
||||
})
|
||||
.catch((rateLimiterRes) => {
|
||||
// Not enough points to consume
|
||||
});
|
||||
```
|
||||
|
||||
#### RateLimiterRes object
|
||||
|
||||
Both Promise resolve and reject return object of `RateLimiterRes` class if there is no any error.
|
||||
Object attributes:
|
||||
```javascript
|
||||
RateLimiterRes = {
|
||||
msBeforeNext: 250, // Number of milliseconds before next action can be done
|
||||
remainingPoints: 0, // Number of remaining points in current duration
|
||||
consumedPoints: 5, // Number of consumed points in current duration
|
||||
isFirstInDuration: false, // action is first in current duration
|
||||
}
|
||||
```
|
||||
|
||||
You may want to set next HTTP headers to response:
|
||||
```javascript
|
||||
const headers = {
|
||||
"Retry-After": rateLimiterRes.msBeforeNext / 1000,
|
||||
"X-RateLimit-Limit": opts.points,
|
||||
"X-RateLimit-Remaining": rateLimiterRes.remainingPoints,
|
||||
"X-RateLimit-Reset": new Date(Date.now() + rateLimiterRes.msBeforeNext)
|
||||
}
|
||||
```
|
||||
|
||||
### Advantages:
|
||||
* no race conditions
|
||||
* no production dependencies
|
||||
* TypeScript declaration bundled
|
||||
* allow traffic burst with [BurstyRateLimiter](https://github.com/animir/node-rate-limiter-flexible/wiki/BurstyRateLimiter)
|
||||
* Block Strategy against really powerful DDoS attacks (like 100k requests per sec) [Read about it and benchmarking here](https://github.com/animir/node-rate-limiter-flexible/wiki/In-memory-Block-Strategy)
|
||||
* Insurance Strategy as emergency solution if database / store is down [Read about Insurance Strategy here](https://github.com/animir/node-rate-limiter-flexible/wiki/Insurance-Strategy)
|
||||
* works in Cluster or PM2 without additional software [See RateLimiterCluster benchmark and detailed description here](https://github.com/animir/node-rate-limiter-flexible/wiki/Cluster)
|
||||
* useful `get`, `set`, `block`, `delete`, `penalty` and `reward` methods
|
||||
|
||||
### Middlewares, plugins and other packages
|
||||
* [Express middleware](https://github.com/animir/node-rate-limiter-flexible/wiki/Express-Middleware)
|
||||
* [Koa middleware](https://github.com/animir/node-rate-limiter-flexible/wiki/Koa-Middleware)
|
||||
* [Hapi plugin](https://github.com/animir/node-rate-limiter-flexible/wiki/Hapi-plugin)
|
||||
* GraphQL [graphql-rate-limit-directive](https://www.npmjs.com/package/graphql-rate-limit-directive)
|
||||
* NestJS try [nestjs-rate-limiter](https://www.npmjs.com/package/nestjs-rate-limiter)
|
||||
* Fastify based NestJS app try [nestjs-fastify-rate-limiter](https://www.npmjs.com/package/nestjs-fastify-rate-limiter)
|
||||
|
||||
Some copy/paste examples on Wiki:
|
||||
* [Minimal protection against password brute-force](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#minimal-protection-against-password-brute-force)
|
||||
* [Login endpoint protection](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#login-endpoint-protection)
|
||||
* [Websocket connection prevent flooding](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#websocket-single-connection-prevent-flooding)
|
||||
* [Dynamic block duration](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#dynamic-block-duration)
|
||||
* [Authorized users specific limits](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#authorized-and-not-authorized-users)
|
||||
* [Different limits for different parts of application](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#different-limits-for-different-parts-of-application)
|
||||
* [Apply Block Strategy](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#apply-in-memory-block-strategy-to-avoid-extra-requests-to-store)
|
||||
* [Setup Insurance Strategy](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#setup-insurance-strategy-for-store-limiters)
|
||||
* [Third-party API, crawler, bot rate limiting](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#third-party-api-crawler-bot-rate-limiting)
|
||||
|
||||
### Migration from other packages
|
||||
* [express-brute](https://github.com/animir/node-rate-limiter-flexible/wiki/ExpressBrute-migration) Bonus: race conditions fixed, prod deps removed
|
||||
* [limiter](https://github.com/animir/node-rate-limiter-flexible/wiki/RateLimiterQueue#migration-from-limiter) Bonus: multi-server support, respects queue order, native promises
|
||||
|
||||
### Docs and Examples
|
||||
|
||||
* [Options](https://github.com/animir/node-rate-limiter-flexible/wiki/Options)
|
||||
* [API methods](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods)
|
||||
* [BurstyRateLimiter](https://github.com/animir/node-rate-limiter-flexible/wiki/BurstyRateLimiter) Traffic burst support
|
||||
* [RateLimiterRedis](https://github.com/animir/node-rate-limiter-flexible/wiki/Redis)
|
||||
* [RateLimiterMemcache](https://github.com/animir/node-rate-limiter-flexible/wiki/Memcache)
|
||||
* [RateLimiterMongo](https://github.com/animir/node-rate-limiter-flexible/wiki/Mongo) (with [sharding support](https://github.com/animir/node-rate-limiter-flexible/wiki/Mongo#mongodb-sharding-options))
|
||||
* [RateLimiterMySQL](https://github.com/animir/node-rate-limiter-flexible/wiki/MySQL) (support Sequelize and Knex)
|
||||
* [RateLimiterPostgres](https://github.com/animir/node-rate-limiter-flexible/wiki/PostgreSQL) (support Sequelize, TypeORM and Knex)
|
||||
* [RateLimiterCluster](https://github.com/animir/node-rate-limiter-flexible/wiki/Cluster) ([PM2 cluster docs read here](https://github.com/animir/node-rate-limiter-flexible/wiki/PM2-cluster))
|
||||
* [RateLimiterMemory](https://github.com/animir/node-rate-limiter-flexible/wiki/Memory)
|
||||
* [RateLimiterUnion](https://github.com/animir/node-rate-limiter-flexible/wiki/RateLimiterUnion) Combine 2 or more limiters to act as single
|
||||
* [RLWrapperBlackAndWhite](https://github.com/animir/node-rate-limiter-flexible/wiki/Black-and-White-lists) Black and White lists
|
||||
* [RateLimiterQueue](https://github.com/animir/node-rate-limiter-flexible/wiki/RateLimiterQueue) Rate limiter with FIFO queue
|
||||
|
||||
### Changelog
|
||||
|
||||
See [releases](https://github.com/animir/node-rate-limiter-flexible/releases) for detailed changelog.
|
||||
|
||||
## Basic Options
|
||||
|
||||
* **points**
|
||||
|
||||
`Default: 4`
|
||||
|
||||
Maximum number of points can be consumed over duration
|
||||
|
||||
* **duration**
|
||||
|
||||
`Default: 1`
|
||||
|
||||
Number of seconds before consumed points are reset.
|
||||
|
||||
Never reset points, if `duration` is set to 0.
|
||||
|
||||
* **storeClient**
|
||||
|
||||
`Required for store limiters`
|
||||
|
||||
Have to be `redis`, `ioredis`, `memcached`, `mongodb`, `pg`, `mysql2`, `mysql` or any other related pool or connection.
|
||||
|
||||
### Other options on Wiki:
|
||||
* [keyPrefix](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#keyprefix) Make keys unique among different limiters.
|
||||
* [blockDuration](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#blockduration) Block for N seconds, if consumed more than points.
|
||||
* [inMemoryBlockOnConsumed](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#inmemoryblockonconsumed) Avoid extra requests to store.
|
||||
* [inMemoryBlockDuration](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#inmemoryblockduration)
|
||||
* [insuranceLimiter](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#insurancelimiter) Make it more stable with less efforts.
|
||||
* [storeType](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#storetype) Have to be set to `knex`, if you use it.
|
||||
* [dbName](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#dbname) Where to store points.
|
||||
* [tableName](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#tablename) Table/collection.
|
||||
* [tableCreated](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#tablecreated) Is table already created in MySQL or PostgreSQL.
|
||||
* [clearExpiredByTimeout](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#clearexpiredbytimeout) For MySQL and PostgreSQL.
|
||||
|
||||
Smooth out traffic picks:
|
||||
* [execEvenly](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#execevenly)
|
||||
* [execEvenlyMinDelayMs](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#execevenlymindelayms)
|
||||
|
||||
Specific:
|
||||
* [indexKeyPrefix](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#indexkeyprefix) Combined indexes of MongoDB.
|
||||
* [timeoutMs](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#timeoutms) For Cluster.
|
||||
* [rejectIfRedisNotReady](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#rejectifredisnotready)
|
||||
|
||||
## API
|
||||
|
||||
Read detailed description on Wiki.
|
||||
|
||||
* [consume(key, points = 1)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterconsumekey-points--1) Consume points by key.
|
||||
* [get(key)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimitergetkey) Get `RateLimiterRes` or `null`.
|
||||
* [set(key, points, secDuration)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimitersetkey-points-secduration) Set points by key.
|
||||
* [block(key, secDuration)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterblockkey-secduration) Block key for `secDuration` seconds.
|
||||
* [delete(key)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterdeletekey) Reset consumed points.
|
||||
* [deleteInMemoryBlockedAll](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterdeleteinmemoryblockedall)
|
||||
* [penalty(key, points = 1)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterpenaltykey-points--1) Increase number of consumed points in current duration.
|
||||
* [reward(key, points = 1)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterrewardkey-points--1) Decrease number of consumed points in current duration.
|
||||
* [getKey(key)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimitergetkeykey) Get internal prefixed key.
|
||||
|
||||
## Benchmark
|
||||
|
||||
Average latency during test pure NodeJS endpoint in cluster of 4 workers with everything set up on one server.
|
||||
|
||||
1000 concurrent clients with maximum 2000 requests per sec during 30 seconds.
|
||||
|
||||
```text
|
||||
1. Memory 0.34 ms
|
||||
2. Cluster 0.69 ms
|
||||
3. Redis 2.45 ms
|
||||
4. Memcached 3.89 ms
|
||||
5. Mongo 4.75 ms
|
||||
```
|
||||
|
||||
500 concurrent clients with maximum 1000 req per sec during 30 seconds
|
||||
```text
|
||||
6. PostgreSQL 7.48 ms (with connection pool max 100)
|
||||
7. MySQL 14.59 ms (with connection pool 100)
|
||||
```
|
||||
|
||||
Note, you can speed up limiters with [inMemoryBlockOnConsumed](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#inmemoryblockonconsumed) option.
|
||||
|
||||
## Contribution
|
||||
|
||||
Appreciated, feel free!
|
||||
|
||||
Make sure you've launched `npm run eslint` before creating PR, all errors have to be fixed.
|
||||
|
||||
You can try to run `npm run eslint-fix` to fix some issues.
|
||||
|
||||
Any new limiter with storage have to be extended from `RateLimiterStoreAbstract`.
|
||||
It has to implement 4 methods:
|
||||
* `_getRateLimiterRes` parses raw data from store to `RateLimiterRes` object.
|
||||
* `_upsert` must be atomic. it inserts or updates value by key and returns raw data. it must support `forceExpire` mode
|
||||
to overwrite key expiration time.
|
||||
* `_get` returns raw data by key or `null` if there is no key.
|
||||
* `_delete` deletes all key related data and returns `true` on deleted, `false` if key is not found.
|
||||
|
||||
All other methods depends on store. See `RateLimiterRedis` or `RateLimiterPostgres` for example.
|
||||
|
||||
Note: all changes should be covered by tests.
|
||||
BIN
framework/node_modules/node-rate-limiter-flexible/img/chart-exec-evenly-10r-end.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/chart-exec-evenly-10r-start.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 13 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/express-brute-example.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 153 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-cluster-master.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-cluster-worker.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-memcache.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-memory.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 51 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-mongo.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-mysql.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-postgres.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-redis.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/rlflx-logo-small.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 9.1 KiB |
29
framework/node_modules/node-rate-limiter-flexible/index.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
const RateLimiterRedis = require('./lib/RateLimiterRedis');
|
||||
const RateLimiterMongo = require('./lib/RateLimiterMongo');
|
||||
const RateLimiterMySQL = require('./lib/RateLimiterMySQL');
|
||||
const RateLimiterPostgres = require('./lib/RateLimiterPostgres');
|
||||
const {RateLimiterClusterMaster, RateLimiterClusterMasterPM2, RateLimiterCluster} = require('./lib/RateLimiterCluster');
|
||||
const RateLimiterMemory = require('./lib/RateLimiterMemory');
|
||||
const RateLimiterMemcache = require('./lib/RateLimiterMemcache');
|
||||
const RLWrapperBlackAndWhite = require('./lib/RLWrapperBlackAndWhite');
|
||||
const RateLimiterUnion = require('./lib/RateLimiterUnion');
|
||||
const RateLimiterQueue = require('./lib/RateLimiterQueue');
|
||||
const BurstyRateLimiter = require('./lib/BurstyRateLimiter');
|
||||
const RateLimiterRes = require('./lib/RateLimiterRes');
|
||||
|
||||
module.exports = {
|
||||
RateLimiterRedis,
|
||||
RateLimiterMongo,
|
||||
RateLimiterMySQL,
|
||||
RateLimiterPostgres,
|
||||
RateLimiterMemory,
|
||||
RateLimiterMemcache,
|
||||
RateLimiterClusterMaster,
|
||||
RateLimiterClusterMasterPM2,
|
||||
RateLimiterCluster,
|
||||
RLWrapperBlackAndWhite,
|
||||
RateLimiterUnion,
|
||||
RateLimiterQueue,
|
||||
BurstyRateLimiter,
|
||||
RateLimiterRes,
|
||||
};
|
||||
74
framework/node_modules/node-rate-limiter-flexible/lib/BurstyRateLimiter.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
const RateLimiterRes = require("./RateLimiterRes");
|
||||
|
||||
/**
|
||||
* Bursty rate limiter exposes only msBeforeNext time and doesn't expose points from bursty limiter by default
|
||||
* @type {BurstyRateLimiter}
|
||||
*/
|
||||
module.exports = class BurstyRateLimiter {
|
||||
constructor(rateLimiter, burstLimiter) {
|
||||
this._rateLimiter = rateLimiter;
|
||||
this._burstLimiter = burstLimiter
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge rate limiter response objects. Responses can be null
|
||||
*
|
||||
* @param {RateLimiterRes} [rlRes] Rate limiter response
|
||||
* @param {RateLimiterRes} [blRes] Bursty limiter response
|
||||
*/
|
||||
_combineRes(rlRes, blRes) {
|
||||
return new RateLimiterRes(
|
||||
rlRes.remainingPoints,
|
||||
Math.min(rlRes.msBeforeNext, blRes.msBeforeNext),
|
||||
rlRes.consumedPoints,
|
||||
rlRes.isFirstInDuration
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param key
|
||||
* @param pointsToConsume
|
||||
* @param options
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
consume(key, pointsToConsume = 1, options = {}) {
|
||||
return this._rateLimiter.consume(key, pointsToConsume, options)
|
||||
.catch((rlRej) => {
|
||||
if (rlRej instanceof RateLimiterRes) {
|
||||
return this._burstLimiter.consume(key, pointsToConsume, options)
|
||||
.then((blRes) => {
|
||||
return Promise.resolve(this._combineRes(rlRej, blRes))
|
||||
})
|
||||
.catch((blRej) => {
|
||||
if (blRej instanceof RateLimiterRes) {
|
||||
return Promise.reject(this._combineRes(rlRej, blRej))
|
||||
} else {
|
||||
return Promise.reject(blRej)
|
||||
}
|
||||
}
|
||||
)
|
||||
} else {
|
||||
return Promise.reject(rlRej)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* It doesn't expose available points from burstLimiter
|
||||
*
|
||||
* @param key
|
||||
* @returns {Promise<RateLimiterRes>}
|
||||
*/
|
||||
get(key) {
|
||||
return Promise.all([
|
||||
this._rateLimiter.get(key),
|
||||
this._burstLimiter.get(key),
|
||||
]).then(([rlRes, blRes]) => {
|
||||
return this._combineRes(rlRes, blRes);
|
||||
});
|
||||
}
|
||||
|
||||
get points() {
|
||||
return this._rateLimiter.points;
|
||||
}
|
||||
};
|
||||
347
framework/node_modules/node-rate-limiter-flexible/lib/ExpressBruteFlexible.js
generated
vendored
Normal file
@@ -0,0 +1,347 @@
|
||||
const {
|
||||
LIMITER_TYPES,
|
||||
ERR_UNKNOWN_LIMITER_TYPE_MESSAGE,
|
||||
} = require('./constants');
|
||||
const crypto = require('crypto');
|
||||
const {
|
||||
RateLimiterMemory,
|
||||
RateLimiterCluster,
|
||||
RateLimiterMemcache,
|
||||
RateLimiterMongo,
|
||||
RateLimiterMySQL,
|
||||
RateLimiterPostgres,
|
||||
RateLimiterRedis,
|
||||
} = require('../index');
|
||||
|
||||
function getDelayMs(count, delays, maxWait) {
|
||||
let msDelay = maxWait;
|
||||
const delayIndex = count - 1;
|
||||
if (delayIndex >= 0 && delayIndex < delays.length) {
|
||||
msDelay = delays[delayIndex];
|
||||
}
|
||||
|
||||
return msDelay;
|
||||
}
|
||||
|
||||
const ExpressBruteFlexible = function (limiterType, options) {
|
||||
ExpressBruteFlexible.instanceCount++;
|
||||
this.name = `brute${ExpressBruteFlexible.instanceCount}`;
|
||||
|
||||
this.options = Object.assign({}, ExpressBruteFlexible.defaults, options);
|
||||
if (this.options.minWait < 1) {
|
||||
this.options.minWait = 1;
|
||||
}
|
||||
|
||||
const validLimiterTypes = Object.keys(ExpressBruteFlexible.LIMITER_TYPES).map(k => ExpressBruteFlexible.LIMITER_TYPES[k]);
|
||||
if (!validLimiterTypes.includes(limiterType)) {
|
||||
throw new Error(ERR_UNKNOWN_LIMITER_TYPE_MESSAGE);
|
||||
}
|
||||
this.limiterType = limiterType;
|
||||
|
||||
this.delays = [this.options.minWait];
|
||||
while (this.delays[this.delays.length - 1] < this.options.maxWait) {
|
||||
const nextNum = this.delays[this.delays.length - 1] + (this.delays.length > 1 ? this.delays[this.delays.length - 2] : 0);
|
||||
this.delays.push(nextNum);
|
||||
}
|
||||
this.delays[this.delays.length - 1] = this.options.maxWait;
|
||||
|
||||
// set default lifetime
|
||||
if (typeof this.options.lifetime === 'undefined') {
|
||||
this.options.lifetime = Math.ceil((this.options.maxWait / 1000) * (this.delays.length + this.options.freeRetries));
|
||||
}
|
||||
|
||||
this.prevent = this.getMiddleware({
|
||||
prefix: this.options.prefix,
|
||||
});
|
||||
};
|
||||
|
||||
ExpressBruteFlexible.prototype.getMiddleware = function (options) {
|
||||
const opts = Object.assign({}, options);
|
||||
const commonKeyPrefix = opts.prefix || '';
|
||||
const freeLimiterOptions = {
|
||||
storeClient: this.options.storeClient,
|
||||
storeType: this.options.storeType,
|
||||
keyPrefix: `${commonKeyPrefix}free`,
|
||||
dbName: this.options.dbName,
|
||||
tableName: this.options.tableName,
|
||||
points: this.options.freeRetries > 0 ? this.options.freeRetries - 1 : 0,
|
||||
duration: this.options.lifetime,
|
||||
};
|
||||
|
||||
const blockLimiterOptions = {
|
||||
storeClient: this.options.storeClient,
|
||||
storeType: this.options.storeType,
|
||||
keyPrefix: `${commonKeyPrefix}block`,
|
||||
dbName: this.options.dbName,
|
||||
tableName: this.options.tableName,
|
||||
points: 1,
|
||||
duration: Math.min(this.options.lifetime, Math.ceil((this.options.maxWait / 1000))),
|
||||
};
|
||||
|
||||
const counterLimiterOptions = {
|
||||
storeClient: this.options.storeClient,
|
||||
storeType: this.options.storeType,
|
||||
keyPrefix: `${commonKeyPrefix}counter`,
|
||||
dbName: this.options.dbName,
|
||||
tableName: this.options.tableName,
|
||||
points: 1,
|
||||
duration: this.options.lifetime,
|
||||
};
|
||||
|
||||
switch (this.limiterType) {
|
||||
case 'memory':
|
||||
this.freeLimiter = new RateLimiterMemory(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterMemory(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterMemory(counterLimiterOptions);
|
||||
break;
|
||||
case 'cluster':
|
||||
this.freeLimiter = new RateLimiterCluster(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterCluster(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterCluster(counterLimiterOptions);
|
||||
break;
|
||||
case 'memcache':
|
||||
this.freeLimiter = new RateLimiterMemcache(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterMemcache(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterMemcache(counterLimiterOptions);
|
||||
break;
|
||||
case 'mongo':
|
||||
this.freeLimiter = new RateLimiterMongo(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterMongo(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterMongo(counterLimiterOptions);
|
||||
break;
|
||||
case 'mysql':
|
||||
this.freeLimiter = new RateLimiterMySQL(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterMySQL(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterMySQL(counterLimiterOptions);
|
||||
break;
|
||||
case 'postgres':
|
||||
this.freeLimiter = new RateLimiterPostgres(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterPostgres(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterPostgres(counterLimiterOptions);
|
||||
break;
|
||||
case 'redis':
|
||||
this.freeLimiter = new RateLimiterRedis(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterRedis(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterRedis(counterLimiterOptions);
|
||||
break;
|
||||
default:
|
||||
throw new Error(ERR_UNKNOWN_LIMITER_TYPE_MESSAGE);
|
||||
}
|
||||
|
||||
let keyFunc = opts.key;
|
||||
if (typeof keyFunc !== 'function') {
|
||||
keyFunc = function (req, res, next) {
|
||||
next(opts.key);
|
||||
};
|
||||
}
|
||||
|
||||
const getFailCallback = (() => (typeof opts.failCallback === 'undefined' ? this.options.failCallback : opts.failCallback));
|
||||
|
||||
return (req, res, next) => {
|
||||
const cannotIncrementErrorObjectBase = {
|
||||
req,
|
||||
res,
|
||||
next,
|
||||
message: 'Cannot increment request count',
|
||||
};
|
||||
|
||||
keyFunc(req, res, (key) => {
|
||||
if (!opts.ignoreIP) {
|
||||
key = ExpressBruteFlexible._getKey([req.ip, this.name, key]);
|
||||
} else {
|
||||
key = ExpressBruteFlexible._getKey([this.name, key]);
|
||||
}
|
||||
|
||||
// attach a simpler "reset" function to req.brute.reset
|
||||
if (this.options.attachResetToRequest) {
|
||||
let reset = ((callback) => {
|
||||
Promise.all([
|
||||
this.freeLimiter.delete(key),
|
||||
this.blockLimiter.delete(key),
|
||||
this.counterLimiter.delete(key),
|
||||
]).then(() => {
|
||||
if (typeof callback === 'function') {
|
||||
process.nextTick(() => {
|
||||
callback();
|
||||
});
|
||||
}
|
||||
}).catch((err) => {
|
||||
if (typeof callback === 'function') {
|
||||
process.nextTick(() => {
|
||||
callback(err);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (req.brute && req.brute.reset) {
|
||||
// wrap existing reset if one exists
|
||||
const oldReset = req.brute.reset;
|
||||
const newReset = reset;
|
||||
reset = function (callback) {
|
||||
oldReset(() => {
|
||||
newReset(callback);
|
||||
});
|
||||
};
|
||||
}
|
||||
req.brute = {
|
||||
reset,
|
||||
};
|
||||
}
|
||||
|
||||
this.freeLimiter.consume(key)
|
||||
.then(() => {
|
||||
if (typeof next === 'function') {
|
||||
next();
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
Promise.all([
|
||||
this.blockLimiter.get(key),
|
||||
this.counterLimiter.get(key),
|
||||
])
|
||||
.then((allRes) => {
|
||||
const [blockRes, counterRes] = allRes;
|
||||
|
||||
if (blockRes === null) {
|
||||
const msDelay = getDelayMs(
|
||||
counterRes ? counterRes.consumedPoints + 1 : 1,
|
||||
this.delays,
|
||||
// eslint-disable-next-line
|
||||
this.options.maxWait
|
||||
);
|
||||
|
||||
this.blockLimiter.penalty(key, 1, { customDuration: Math.ceil(msDelay / 1000) })
|
||||
.then((blockPenaltyRes) => {
|
||||
if (blockPenaltyRes.consumedPoints === 1) {
|
||||
this.counterLimiter.penalty(key)
|
||||
.then(() => {
|
||||
if (typeof next === 'function') {
|
||||
next();
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
this.options.handleStoreError(Object.assign({}, cannotIncrementErrorObjectBase, { parent: err }));
|
||||
});
|
||||
} else {
|
||||
const nextValidDate = new Date(Date.now() + blockPenaltyRes.msBeforeNext);
|
||||
|
||||
const failCallback = getFailCallback();
|
||||
if (typeof failCallback === 'function') {
|
||||
failCallback(req, res, next, nextValidDate);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
this.options.handleStoreError(Object.assign({}, cannotIncrementErrorObjectBase, { parent: err }));
|
||||
});
|
||||
} else {
|
||||
const nextValidDate = new Date(Date.now() + blockRes.msBeforeNext);
|
||||
|
||||
const failCallback = getFailCallback();
|
||||
if (typeof failCallback === 'function') {
|
||||
failCallback(req, res, next, nextValidDate);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
this.options.handleStoreError(Object.assign({}, cannotIncrementErrorObjectBase, { parent: err }));
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
ExpressBruteFlexible.prototype.reset = function (ip, key, callback) {
|
||||
let keyArgs = [];
|
||||
if (ip) {
|
||||
keyArgs.push(ip)
|
||||
}
|
||||
keyArgs.push(this.name);
|
||||
keyArgs.push(key);
|
||||
const ebKey = ExpressBruteFlexible._getKey(keyArgs);
|
||||
|
||||
Promise.all([
|
||||
this.freeLimiter.delete(ebKey),
|
||||
this.blockLimiter.delete(ebKey),
|
||||
this.counterLimiter.delete(ebKey),
|
||||
]).then(() => {
|
||||
if (typeof callback === 'function') {
|
||||
process.nextTick(() => {
|
||||
callback();
|
||||
});
|
||||
}
|
||||
}).catch((err) => {
|
||||
this.options.handleStoreError({
|
||||
message: 'Cannot reset request count',
|
||||
parent: err,
|
||||
key,
|
||||
ip,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
ExpressBruteFlexible._getKey = function (arr) {
|
||||
let key = '';
|
||||
|
||||
arr.forEach((part) => {
|
||||
if (part) {
|
||||
key += crypto.createHash('sha256').update(part).digest('base64');
|
||||
}
|
||||
});
|
||||
|
||||
return crypto.createHash('sha256').update(key).digest('base64');
|
||||
};
|
||||
|
||||
const setRetryAfter = function (res, nextValidRequestDate) {
|
||||
const secondUntilNextRequest = Math.ceil((nextValidRequestDate.getTime() - Date.now()) / 1000);
|
||||
res.header('Retry-After', secondUntilNextRequest);
|
||||
};
|
||||
ExpressBruteFlexible.FailTooManyRequests = function (req, res, next, nextValidRequestDate) {
|
||||
setRetryAfter(res, nextValidRequestDate);
|
||||
res.status(429);
|
||||
res.send({
|
||||
error: {
|
||||
text: 'Too many requests in this time frame.',
|
||||
nextValidRequestDate,
|
||||
},
|
||||
});
|
||||
};
|
||||
ExpressBruteFlexible.FailForbidden = function (req, res, next, nextValidRequestDate) {
|
||||
setRetryAfter(res, nextValidRequestDate);
|
||||
res.status(403);
|
||||
res.send({
|
||||
error: {
|
||||
text: 'Too many requests in this time frame.',
|
||||
nextValidRequestDate,
|
||||
},
|
||||
});
|
||||
};
|
||||
ExpressBruteFlexible.FailMark = function (req, res, next, nextValidRequestDate) {
|
||||
res.status(429);
|
||||
setRetryAfter(res, nextValidRequestDate);
|
||||
res.nextValidRequestDate = nextValidRequestDate;
|
||||
next();
|
||||
};
|
||||
|
||||
ExpressBruteFlexible.defaults = {
|
||||
freeRetries: 2,
|
||||
attachResetToRequest: true,
|
||||
minWait: 500,
|
||||
maxWait: 1000 * 60 * 15,
|
||||
failCallback: ExpressBruteFlexible.FailTooManyRequests,
|
||||
handleStoreError(err) {
|
||||
// eslint-disable-next-line
|
||||
throw {
|
||||
message: err.message,
|
||||
parent: err.parent,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
ExpressBruteFlexible.LIMITER_TYPES = LIMITER_TYPES;
|
||||
|
||||
ExpressBruteFlexible.instanceCount = 0;
|
||||
|
||||
|
||||
module.exports = ExpressBruteFlexible;
|
||||
195
framework/node_modules/node-rate-limiter-flexible/lib/RLWrapperBlackAndWhite.js
generated
vendored
Normal file
@@ -0,0 +1,195 @@
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
module.exports = class RLWrapperBlackAndWhite {
|
||||
constructor(opts = {}) {
|
||||
this.limiter = opts.limiter;
|
||||
this.blackList = opts.blackList;
|
||||
this.whiteList = opts.whiteList;
|
||||
this.isBlackListed = opts.isBlackListed;
|
||||
this.isWhiteListed = opts.isWhiteListed;
|
||||
this.runActionAnyway = opts.runActionAnyway;
|
||||
}
|
||||
|
||||
get limiter() {
|
||||
return this._limiter;
|
||||
}
|
||||
|
||||
set limiter(value) {
|
||||
if (typeof value === 'undefined') {
|
||||
throw new Error('limiter is not set');
|
||||
}
|
||||
|
||||
this._limiter = value;
|
||||
}
|
||||
|
||||
get runActionAnyway() {
|
||||
return this._runActionAnyway;
|
||||
}
|
||||
|
||||
set runActionAnyway(value) {
|
||||
this._runActionAnyway = typeof value === 'undefined' ? false : value;
|
||||
}
|
||||
|
||||
get blackList() {
|
||||
return this._blackList;
|
||||
}
|
||||
|
||||
set blackList(value) {
|
||||
this._blackList = Array.isArray(value) ? value : [];
|
||||
}
|
||||
|
||||
get isBlackListed() {
|
||||
return this._isBlackListed;
|
||||
}
|
||||
|
||||
set isBlackListed(func) {
|
||||
if (typeof func === 'undefined') {
|
||||
func = () => false;
|
||||
}
|
||||
if (typeof func !== 'function') {
|
||||
throw new Error('isBlackListed must be function');
|
||||
}
|
||||
this._isBlackListed = func;
|
||||
}
|
||||
|
||||
get whiteList() {
|
||||
return this._whiteList;
|
||||
}
|
||||
|
||||
set whiteList(value) {
|
||||
this._whiteList = Array.isArray(value) ? value : [];
|
||||
}
|
||||
|
||||
get isWhiteListed() {
|
||||
return this._isWhiteListed;
|
||||
}
|
||||
|
||||
set isWhiteListed(func) {
|
||||
if (typeof func === 'undefined') {
|
||||
func = () => false;
|
||||
}
|
||||
if (typeof func !== 'function') {
|
||||
throw new Error('isWhiteListed must be function');
|
||||
}
|
||||
this._isWhiteListed = func;
|
||||
}
|
||||
|
||||
isBlackListedSomewhere(key) {
|
||||
return this.blackList.indexOf(key) >= 0 || this.isBlackListed(key);
|
||||
}
|
||||
|
||||
isWhiteListedSomewhere(key) {
|
||||
return this.whiteList.indexOf(key) >= 0 || this.isWhiteListed(key);
|
||||
}
|
||||
|
||||
getBlackRes() {
|
||||
return new RateLimiterRes(0, Number.MAX_SAFE_INTEGER, 0, false);
|
||||
}
|
||||
|
||||
getWhiteRes() {
|
||||
return new RateLimiterRes(Number.MAX_SAFE_INTEGER, 0, 0, false);
|
||||
}
|
||||
|
||||
rejectBlack() {
|
||||
return Promise.reject(this.getBlackRes());
|
||||
}
|
||||
|
||||
resolveBlack() {
|
||||
return Promise.resolve(this.getBlackRes());
|
||||
}
|
||||
|
||||
resolveWhite() {
|
||||
return Promise.resolve(this.getWhiteRes());
|
||||
}
|
||||
|
||||
consume(key, pointsToConsume = 1) {
|
||||
let res;
|
||||
if (this.isWhiteListedSomewhere(key)) {
|
||||
res = this.resolveWhite();
|
||||
} else if (this.isBlackListedSomewhere(key)) {
|
||||
res = this.rejectBlack();
|
||||
}
|
||||
|
||||
if (typeof res === 'undefined') {
|
||||
return this.limiter.consume(key, pointsToConsume);
|
||||
}
|
||||
|
||||
if (this.runActionAnyway) {
|
||||
this.limiter.consume(key, pointsToConsume).catch(() => {});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
block(key, secDuration) {
|
||||
let res;
|
||||
if (this.isWhiteListedSomewhere(key)) {
|
||||
res = this.resolveWhite();
|
||||
} else if (this.isBlackListedSomewhere(key)) {
|
||||
res = this.resolveBlack();
|
||||
}
|
||||
|
||||
if (typeof res === 'undefined') {
|
||||
return this.limiter.block(key, secDuration);
|
||||
}
|
||||
|
||||
if (this.runActionAnyway) {
|
||||
this.limiter.block(key, secDuration).catch(() => {});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
penalty(key, points) {
|
||||
let res;
|
||||
if (this.isWhiteListedSomewhere(key)) {
|
||||
res = this.resolveWhite();
|
||||
} else if (this.isBlackListedSomewhere(key)) {
|
||||
res = this.resolveBlack();
|
||||
}
|
||||
|
||||
if (typeof res === 'undefined') {
|
||||
return this.limiter.penalty(key, points);
|
||||
}
|
||||
|
||||
if (this.runActionAnyway) {
|
||||
this.limiter.penalty(key, points).catch(() => {});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
reward(key, points) {
|
||||
let res;
|
||||
if (this.isWhiteListedSomewhere(key)) {
|
||||
res = this.resolveWhite();
|
||||
} else if (this.isBlackListedSomewhere(key)) {
|
||||
res = this.resolveBlack();
|
||||
}
|
||||
|
||||
if (typeof res === 'undefined') {
|
||||
return this.limiter.reward(key, points);
|
||||
}
|
||||
|
||||
if (this.runActionAnyway) {
|
||||
this.limiter.reward(key, points).catch(() => {});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
get(key) {
|
||||
let res;
|
||||
if (this.isWhiteListedSomewhere(key)) {
|
||||
res = this.resolveWhite();
|
||||
} else if (this.isBlackListedSomewhere(key)) {
|
||||
res = this.resolveBlack();
|
||||
}
|
||||
|
||||
if (typeof res === 'undefined' || this.runActionAnyway) {
|
||||
return this.limiter.get(key);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
delete(key) {
|
||||
return this.limiter.delete(key);
|
||||
}
|
||||
};
|
||||
125
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterAbstract.js
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
module.exports = class RateLimiterAbstract {
|
||||
/**
|
||||
*
|
||||
* @param opts Object Defaults {
|
||||
* points: 4, // Number of points
|
||||
* duration: 1, // Per seconds
|
||||
* blockDuration: 0, // Block if consumed more than points in current duration for blockDuration seconds
|
||||
* execEvenly: false, // Execute allowed actions evenly over duration
|
||||
* execEvenlyMinDelayMs: duration * 1000 / points, // ms, works with execEvenly=true option
|
||||
* keyPrefix: 'rlflx',
|
||||
* }
|
||||
*/
|
||||
constructor(opts = {}) {
|
||||
this.points = opts.points;
|
||||
this.duration = opts.duration;
|
||||
this.blockDuration = opts.blockDuration;
|
||||
this.execEvenly = opts.execEvenly;
|
||||
this.execEvenlyMinDelayMs = opts.execEvenlyMinDelayMs;
|
||||
this.keyPrefix = opts.keyPrefix;
|
||||
}
|
||||
|
||||
get points() {
|
||||
return this._points;
|
||||
}
|
||||
|
||||
set points(value) {
|
||||
this._points = value >= 0 ? value : 4;
|
||||
}
|
||||
|
||||
get duration() {
|
||||
return this._duration;
|
||||
}
|
||||
|
||||
set duration(value) {
|
||||
this._duration = typeof value === 'undefined' ? 1 : value;
|
||||
}
|
||||
|
||||
get msDuration() {
|
||||
return this.duration * 1000;
|
||||
}
|
||||
|
||||
get blockDuration() {
|
||||
return this._blockDuration;
|
||||
}
|
||||
|
||||
set blockDuration(value) {
|
||||
this._blockDuration = typeof value === 'undefined' ? 0 : value;
|
||||
}
|
||||
|
||||
get msBlockDuration() {
|
||||
return this.blockDuration * 1000;
|
||||
}
|
||||
|
||||
get execEvenly() {
|
||||
return this._execEvenly;
|
||||
}
|
||||
|
||||
set execEvenly(value) {
|
||||
this._execEvenly = typeof value === 'undefined' ? false : Boolean(value);
|
||||
}
|
||||
|
||||
get execEvenlyMinDelayMs() {
|
||||
return this._execEvenlyMinDelayMs;
|
||||
}
|
||||
|
||||
set execEvenlyMinDelayMs(value) {
|
||||
this._execEvenlyMinDelayMs = typeof value === 'undefined' ? Math.ceil(this.msDuration / this.points) : value;
|
||||
}
|
||||
|
||||
get keyPrefix() {
|
||||
return this._keyPrefix;
|
||||
}
|
||||
|
||||
set keyPrefix(value) {
|
||||
if (typeof value === 'undefined') {
|
||||
value = 'rlflx';
|
||||
}
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error('keyPrefix must be string');
|
||||
}
|
||||
this._keyPrefix = value;
|
||||
}
|
||||
|
||||
_getKeySecDuration(options = {}) {
|
||||
return options && options.customDuration >= 0
|
||||
? options.customDuration
|
||||
: this.duration;
|
||||
}
|
||||
|
||||
getKey(key) {
|
||||
return this.keyPrefix.length > 0 ? `${this.keyPrefix}:${key}` : key;
|
||||
}
|
||||
|
||||
parseKey(rlKey) {
|
||||
return rlKey.substring(this.keyPrefix.length);
|
||||
}
|
||||
|
||||
consume() {
|
||||
throw new Error("You have to implement the method 'consume'!");
|
||||
}
|
||||
|
||||
penalty() {
|
||||
throw new Error("You have to implement the method 'penalty'!");
|
||||
}
|
||||
|
||||
reward() {
|
||||
throw new Error("You have to implement the method 'reward'!");
|
||||
}
|
||||
|
||||
get() {
|
||||
throw new Error("You have to implement the method 'get'!");
|
||||
}
|
||||
|
||||
set() {
|
||||
throw new Error("You have to implement the method 'set'!");
|
||||
}
|
||||
|
||||
block() {
|
||||
throw new Error("You have to implement the method 'block'!");
|
||||
}
|
||||
|
||||
delete() {
|
||||
throw new Error("You have to implement the method 'delete'!");
|
||||
}
|
||||
};
|
||||
367
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterCluster.js
generated
vendored
Normal file
@@ -0,0 +1,367 @@
|
||||
/**
|
||||
* Implements rate limiting in cluster using built-in IPC
|
||||
*
|
||||
* Two classes are described here: master and worker
|
||||
* Master have to be create in the master process without any options.
|
||||
* Any number of rate limiters can be created in workers, but each rate limiter must be with unique keyPrefix
|
||||
*
|
||||
* Workflow:
|
||||
* 1. master rate limiter created in master process
|
||||
* 2. worker rate limiter sends 'init' message with necessary options during creating
|
||||
* 3. master receives options and adds new rate limiter by keyPrefix if it isn't created yet
|
||||
* 4. master sends 'init' back to worker's rate limiter
|
||||
* 5. worker can process requests immediately,
|
||||
* but they will be postponed by 'workerWaitInit' until master sends 'init' to worker
|
||||
* 6. every request to worker rate limiter creates a promise
|
||||
* 7. if master doesn't response for 'timeout', promise is rejected
|
||||
* 8. master sends 'resolve' or 'reject' command to worker
|
||||
* 9. worker resolves or rejects promise depending on message from master
|
||||
*
|
||||
*/
|
||||
|
||||
const cluster = require('cluster');
|
||||
const crypto = require('crypto');
|
||||
const RateLimiterAbstract = require('./RateLimiterAbstract');
|
||||
const RateLimiterMemory = require('./RateLimiterMemory');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
const channel = 'rate_limiter_flexible';
|
||||
let masterInstance = null;
|
||||
|
||||
const masterSendToWorker = function (worker, msg, type, res) {
|
||||
let data;
|
||||
if (res === null || res === true || res === false) {
|
||||
data = res;
|
||||
} else {
|
||||
data = {
|
||||
remainingPoints: res.remainingPoints,
|
||||
msBeforeNext: res.msBeforeNext,
|
||||
consumedPoints: res.consumedPoints,
|
||||
isFirstInDuration: res.isFirstInDuration,
|
||||
};
|
||||
}
|
||||
worker.send({
|
||||
channel,
|
||||
keyPrefix: msg.keyPrefix, // which rate limiter exactly
|
||||
promiseId: msg.promiseId,
|
||||
type,
|
||||
data,
|
||||
});
|
||||
};
|
||||
|
||||
const workerWaitInit = function (payload) {
|
||||
setTimeout(() => {
|
||||
if (this._initiated) {
|
||||
process.send(payload);
|
||||
// Promise will be removed by timeout if too long
|
||||
} else if (typeof this._promises[payload.promiseId] !== 'undefined') {
|
||||
workerWaitInit.call(this, payload);
|
||||
}
|
||||
}, 30);
|
||||
};
|
||||
|
||||
const workerSendToMaster = function (func, promiseId, key, arg, opts) {
|
||||
const payload = {
|
||||
channel,
|
||||
keyPrefix: this.keyPrefix,
|
||||
func,
|
||||
promiseId,
|
||||
data: {
|
||||
key,
|
||||
arg,
|
||||
opts,
|
||||
},
|
||||
};
|
||||
|
||||
if (!this._initiated) {
|
||||
// Wait init before sending messages to master
|
||||
workerWaitInit.call(this, payload);
|
||||
} else {
|
||||
process.send(payload);
|
||||
}
|
||||
};
|
||||
|
||||
const masterProcessMsg = function (worker, msg) {
|
||||
if (!msg || msg.channel !== channel || typeof this._rateLimiters[msg.keyPrefix] === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
let promise;
|
||||
|
||||
switch (msg.func) {
|
||||
case 'consume':
|
||||
promise = this._rateLimiters[msg.keyPrefix].consume(msg.data.key, msg.data.arg, msg.data.opts);
|
||||
break;
|
||||
case 'penalty':
|
||||
promise = this._rateLimiters[msg.keyPrefix].penalty(msg.data.key, msg.data.arg, msg.data.opts);
|
||||
break;
|
||||
case 'reward':
|
||||
promise = this._rateLimiters[msg.keyPrefix].reward(msg.data.key, msg.data.arg, msg.data.opts);
|
||||
break;
|
||||
case 'block':
|
||||
promise = this._rateLimiters[msg.keyPrefix].block(msg.data.key, msg.data.arg, msg.data.opts);
|
||||
break;
|
||||
case 'get':
|
||||
promise = this._rateLimiters[msg.keyPrefix].get(msg.data.key, msg.data.opts);
|
||||
break;
|
||||
case 'delete':
|
||||
promise = this._rateLimiters[msg.keyPrefix].delete(msg.data.key, msg.data.opts);
|
||||
break;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
|
||||
if (promise) {
|
||||
promise
|
||||
.then((res) => {
|
||||
masterSendToWorker(worker, msg, 'resolve', res);
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
masterSendToWorker(worker, msg, 'reject', rejRes);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const workerProcessMsg = function (msg) {
|
||||
if (!msg || msg.channel !== channel || msg.keyPrefix !== this.keyPrefix) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this._promises[msg.promiseId]) {
|
||||
clearTimeout(this._promises[msg.promiseId].timeoutId);
|
||||
let res;
|
||||
if (msg.data === null || msg.data === true || msg.data === false) {
|
||||
res = msg.data;
|
||||
} else {
|
||||
res = new RateLimiterRes(
|
||||
msg.data.remainingPoints,
|
||||
msg.data.msBeforeNext,
|
||||
msg.data.consumedPoints,
|
||||
msg.data.isFirstInDuration // eslint-disable-line comma-dangle
|
||||
);
|
||||
}
|
||||
|
||||
switch (msg.type) {
|
||||
case 'resolve':
|
||||
this._promises[msg.promiseId].resolve(res);
|
||||
break;
|
||||
case 'reject':
|
||||
this._promises[msg.promiseId].reject(res);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`RateLimiterCluster: no such message type '${msg.type}'`);
|
||||
}
|
||||
|
||||
delete this._promises[msg.promiseId];
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Prepare options to send to master
|
||||
* Master will create rate limiter depending on options
|
||||
*
|
||||
* @returns {{points: *, duration: *, blockDuration: *, execEvenly: *, execEvenlyMinDelayMs: *, keyPrefix: *}}
|
||||
*/
|
||||
const getOpts = function () {
|
||||
return {
|
||||
points: this.points,
|
||||
duration: this.duration,
|
||||
blockDuration: this.blockDuration,
|
||||
execEvenly: this.execEvenly,
|
||||
execEvenlyMinDelayMs: this.execEvenlyMinDelayMs,
|
||||
keyPrefix: this.keyPrefix,
|
||||
};
|
||||
};
|
||||
|
||||
const savePromise = function (resolve, reject) {
|
||||
const hrtime = process.hrtime();
|
||||
let promiseId = hrtime[0].toString() + hrtime[1].toString();
|
||||
|
||||
if (typeof this._promises[promiseId] !== 'undefined') {
|
||||
promiseId += crypto.randomBytes(12).toString('base64');
|
||||
}
|
||||
|
||||
this._promises[promiseId] = {
|
||||
resolve,
|
||||
reject,
|
||||
timeoutId: setTimeout(() => {
|
||||
delete this._promises[promiseId];
|
||||
reject(new Error('RateLimiterCluster timeout: no answer from master in time'));
|
||||
}, this.timeoutMs),
|
||||
};
|
||||
|
||||
return promiseId;
|
||||
};
|
||||
|
||||
class RateLimiterClusterMaster {
|
||||
constructor() {
|
||||
if (masterInstance) {
|
||||
return masterInstance;
|
||||
}
|
||||
|
||||
this._rateLimiters = {};
|
||||
|
||||
cluster.setMaxListeners(0);
|
||||
|
||||
cluster.on('message', (worker, msg) => {
|
||||
if (msg && msg.channel === channel && msg.type === 'init') {
|
||||
// If init request, check or create rate limiter by key prefix and send 'init' back to worker
|
||||
if (typeof this._rateLimiters[msg.opts.keyPrefix] === 'undefined') {
|
||||
this._rateLimiters[msg.opts.keyPrefix] = new RateLimiterMemory(msg.opts);
|
||||
}
|
||||
|
||||
worker.send({
|
||||
channel,
|
||||
type: 'init',
|
||||
keyPrefix: msg.opts.keyPrefix,
|
||||
});
|
||||
} else {
|
||||
masterProcessMsg.call(this, worker, msg);
|
||||
}
|
||||
});
|
||||
|
||||
masterInstance = this;
|
||||
}
|
||||
}
|
||||
|
||||
class RateLimiterClusterMasterPM2 {
|
||||
constructor(pm2) {
|
||||
if (masterInstance) {
|
||||
return masterInstance;
|
||||
}
|
||||
|
||||
this._rateLimiters = {};
|
||||
|
||||
pm2.launchBus((err, pm2Bus) => {
|
||||
pm2Bus.on('process:msg', (packet) => {
|
||||
const msg = packet.raw;
|
||||
if (msg && msg.channel === channel && msg.type === 'init') {
|
||||
// If init request, check or create rate limiter by key prefix and send 'init' back to worker
|
||||
if (typeof this._rateLimiters[msg.opts.keyPrefix] === 'undefined') {
|
||||
this._rateLimiters[msg.opts.keyPrefix] = new RateLimiterMemory(msg.opts);
|
||||
}
|
||||
|
||||
pm2.sendDataToProcessId(packet.process.pm_id, {
|
||||
data: {},
|
||||
topic: channel,
|
||||
channel,
|
||||
type: 'init',
|
||||
keyPrefix: msg.opts.keyPrefix,
|
||||
}, (sendErr, res) => {
|
||||
if (sendErr) {
|
||||
console.log(sendErr, res);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const worker = {
|
||||
send: (msgData) => {
|
||||
const pm2Message = msgData;
|
||||
pm2Message.topic = channel;
|
||||
if (typeof pm2Message.data === 'undefined') {
|
||||
pm2Message.data = {};
|
||||
}
|
||||
pm2.sendDataToProcessId(packet.process.pm_id, pm2Message, (sendErr, res) => {
|
||||
if (sendErr) {
|
||||
console.log(sendErr, res);
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
masterProcessMsg.call(this, worker, msg);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
masterInstance = this;
|
||||
}
|
||||
}
|
||||
|
||||
class RateLimiterClusterWorker extends RateLimiterAbstract {
|
||||
get timeoutMs() {
|
||||
return this._timeoutMs;
|
||||
}
|
||||
|
||||
set timeoutMs(value) {
|
||||
this._timeoutMs = typeof value === 'undefined' ? 5000 : Math.abs(parseInt(value));
|
||||
}
|
||||
|
||||
constructor(opts = {}) {
|
||||
super(opts);
|
||||
|
||||
process.setMaxListeners(0);
|
||||
|
||||
this.timeoutMs = opts.timeoutMs;
|
||||
|
||||
this._initiated = false;
|
||||
|
||||
process.on('message', (msg) => {
|
||||
if (msg && msg.channel === channel && msg.type === 'init' && msg.keyPrefix === this.keyPrefix) {
|
||||
this._initiated = true;
|
||||
} else {
|
||||
workerProcessMsg.call(this, msg);
|
||||
}
|
||||
});
|
||||
|
||||
// Create limiter on master with specific options
|
||||
process.send({
|
||||
channel,
|
||||
type: 'init',
|
||||
opts: getOpts.call(this),
|
||||
});
|
||||
|
||||
this._promises = {};
|
||||
}
|
||||
|
||||
consume(key, pointsToConsume = 1, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'consume', promiseId, key, pointsToConsume, options);
|
||||
});
|
||||
}
|
||||
|
||||
penalty(key, points = 1, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'penalty', promiseId, key, points, options);
|
||||
});
|
||||
}
|
||||
|
||||
reward(key, points = 1, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'reward', promiseId, key, points, options);
|
||||
});
|
||||
}
|
||||
|
||||
block(key, secDuration, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'block', promiseId, key, secDuration, options);
|
||||
});
|
||||
}
|
||||
|
||||
get(key, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'get', promiseId, key, options);
|
||||
});
|
||||
}
|
||||
|
||||
delete(key, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'delete', promiseId, key, options);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
RateLimiterClusterMaster,
|
||||
RateLimiterClusterMasterPM2,
|
||||
RateLimiterCluster: RateLimiterClusterWorker,
|
||||
};
|
||||
150
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterMemcache.js
generated
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
const RateLimiterStoreAbstract = require('./RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
class RateLimiterMemcache extends RateLimiterStoreAbstract {
|
||||
/**
|
||||
*
|
||||
* @param {Object} opts
|
||||
* Defaults {
|
||||
* ... see other in RateLimiterStoreAbstract
|
||||
*
|
||||
* storeClient: memcacheClient
|
||||
* }
|
||||
*/
|
||||
constructor(opts) {
|
||||
super(opts);
|
||||
|
||||
this.client = opts.storeClient;
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, result) {
|
||||
const res = new RateLimiterRes();
|
||||
res.consumedPoints = parseInt(result.consumedPoints);
|
||||
res.isFirstInDuration = result.consumedPoints === changedPoints;
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = result.msBeforeNext;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
_upsert(rlKey, points, msDuration, forceExpire = false, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nowMs = Date.now();
|
||||
const secDuration = Math.floor(msDuration / 1000);
|
||||
|
||||
if (forceExpire) {
|
||||
this.client.set(rlKey, points, secDuration, (err) => {
|
||||
if (!err) {
|
||||
this.client.set(
|
||||
`${rlKey}_expire`,
|
||||
secDuration > 0 ? nowMs + (secDuration * 1000) : -1,
|
||||
secDuration,
|
||||
() => {
|
||||
const res = {
|
||||
consumedPoints: points,
|
||||
msBeforeNext: secDuration > 0 ? secDuration * 1000 : -1,
|
||||
};
|
||||
resolve(res);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.client.incr(rlKey, points, (err, consumedPoints) => {
|
||||
if (err || consumedPoints === false) {
|
||||
this.client.add(rlKey, points, secDuration, (errAddKey, createdNew) => {
|
||||
if (errAddKey || !createdNew) {
|
||||
// Try to upsert again in case of race condition
|
||||
if (typeof options.attemptNumber === 'undefined' || options.attemptNumber < 3) {
|
||||
const nextOptions = Object.assign({}, options);
|
||||
nextOptions.attemptNumber = nextOptions.attemptNumber ? (nextOptions.attemptNumber + 1) : 1;
|
||||
|
||||
this._upsert(rlKey, points, msDuration, forceExpire, nextOptions)
|
||||
.then(resUpsert => resolve(resUpsert))
|
||||
.catch(errUpsert => reject(errUpsert));
|
||||
} else {
|
||||
reject(new Error('Can not add key'));
|
||||
}
|
||||
} else {
|
||||
this.client.add(
|
||||
`${rlKey}_expire`,
|
||||
secDuration > 0 ? nowMs + (secDuration * 1000) : -1,
|
||||
secDuration,
|
||||
() => {
|
||||
const res = {
|
||||
consumedPoints: points,
|
||||
msBeforeNext: secDuration > 0 ? secDuration * 1000 : -1,
|
||||
};
|
||||
resolve(res);
|
||||
}
|
||||
);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.client.get(`${rlKey}_expire`, (errGetExpire, resGetExpireMs) => {
|
||||
if (errGetExpire) {
|
||||
reject(errGetExpire);
|
||||
} else {
|
||||
const expireMs = resGetExpireMs === false ? 0 : resGetExpireMs;
|
||||
const res = {
|
||||
consumedPoints,
|
||||
msBeforeNext: expireMs >= 0 ? Math.max(expireMs - nowMs, 0) : -1,
|
||||
};
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_get(rlKey) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nowMs = Date.now();
|
||||
|
||||
this.client.get(rlKey, (err, consumedPoints) => {
|
||||
if (!consumedPoints) {
|
||||
resolve(null);
|
||||
} else {
|
||||
this.client.get(`${rlKey}_expire`, (errGetExpire, resGetExpireMs) => {
|
||||
if (errGetExpire) {
|
||||
reject(errGetExpire);
|
||||
} else {
|
||||
const expireMs = resGetExpireMs === false ? 0 : resGetExpireMs;
|
||||
const res = {
|
||||
consumedPoints,
|
||||
msBeforeNext: expireMs >= 0 ? Math.max(expireMs - nowMs, 0) : -1,
|
||||
};
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_delete(rlKey) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.del(rlKey, (err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else if (res === false) {
|
||||
resolve(res);
|
||||
} else {
|
||||
this.client.del(`${rlKey}_expire`, (errDelExpire) => {
|
||||
if (errDelExpire) {
|
||||
reject(errDelExpire);
|
||||
} else {
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterMemcache;
|
||||
106
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterMemory.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
const RateLimiterAbstract = require('./RateLimiterAbstract');
|
||||
const MemoryStorage = require('./component/MemoryStorage/MemoryStorage');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
class RateLimiterMemory extends RateLimiterAbstract {
|
||||
constructor(opts = {}) {
|
||||
super(opts);
|
||||
|
||||
this._memoryStorage = new MemoryStorage();
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param pointsToConsume
|
||||
* @param {Object} options
|
||||
* @returns {Promise<RateLimiterRes>}
|
||||
*/
|
||||
consume(key, pointsToConsume = 1, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const rlKey = this.getKey(key);
|
||||
const secDuration = this._getKeySecDuration(options);
|
||||
let res = this._memoryStorage.incrby(rlKey, pointsToConsume, secDuration);
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
|
||||
if (res.consumedPoints > this.points) {
|
||||
// Block only first time when consumed more than points
|
||||
if (this.blockDuration > 0 && res.consumedPoints <= (this.points + pointsToConsume)) {
|
||||
// Block key
|
||||
res = this._memoryStorage.set(rlKey, res.consumedPoints, this.blockDuration);
|
||||
}
|
||||
reject(res);
|
||||
} else if (this.execEvenly && res.msBeforeNext > 0 && !res.isFirstInDuration) {
|
||||
// Execute evenly
|
||||
let delay = Math.ceil(res.msBeforeNext / (res.remainingPoints + 2));
|
||||
if (delay < this.execEvenlyMinDelayMs) {
|
||||
delay = res.consumedPoints * this.execEvenlyMinDelayMs;
|
||||
}
|
||||
|
||||
setTimeout(resolve, delay, res);
|
||||
} else {
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
penalty(key, points = 1, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve) => {
|
||||
const secDuration = this._getKeySecDuration(options);
|
||||
const res = this._memoryStorage.incrby(rlKey, points, secDuration);
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
resolve(res);
|
||||
});
|
||||
}
|
||||
|
||||
reward(key, points = 1, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve) => {
|
||||
const secDuration = this._getKeySecDuration(options);
|
||||
const res = this._memoryStorage.incrby(rlKey, -points, secDuration);
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
resolve(res);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Block any key for secDuration seconds
|
||||
*
|
||||
* @param key
|
||||
* @param secDuration
|
||||
*/
|
||||
block(key, secDuration) {
|
||||
const msDuration = secDuration * 1000;
|
||||
const initPoints = this.points + 1;
|
||||
|
||||
this._memoryStorage.set(this.getKey(key), initPoints, secDuration);
|
||||
return Promise.resolve(
|
||||
new RateLimiterRes(0, msDuration === 0 ? -1 : msDuration, initPoints)
|
||||
);
|
||||
}
|
||||
|
||||
set(key, points, secDuration) {
|
||||
const msDuration = (secDuration >= 0 ? secDuration : this.duration) * 1000;
|
||||
|
||||
this._memoryStorage.set(this.getKey(key), points, secDuration);
|
||||
return Promise.resolve(
|
||||
new RateLimiterRes(0, msDuration === 0 ? -1 : msDuration, points)
|
||||
);
|
||||
}
|
||||
|
||||
get(key) {
|
||||
const res = this._memoryStorage.get(this.getKey(key));
|
||||
if (res !== null) {
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
}
|
||||
|
||||
return Promise.resolve(res);
|
||||
}
|
||||
|
||||
delete(key) {
|
||||
return Promise.resolve(this._memoryStorage.delete(this.getKey(key)));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterMemory;
|
||||
|
||||
273
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterMongo.js
generated
vendored
Normal file
@@ -0,0 +1,273 @@
|
||||
const RateLimiterStoreAbstract = require('./RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
/**
|
||||
* Get MongoDB driver version as upsert options differ
|
||||
* @params {Object} Client instance
|
||||
* @returns {Object} Version Object containing major, feature & minor versions.
|
||||
*/
|
||||
function getDriverVersion(client) {
|
||||
try {
|
||||
const _client = client.client ? client.client : client;
|
||||
|
||||
const { version } = _client.topology.s.options.metadata.driver;
|
||||
const _v = version.split('.').map(v => parseInt(v));
|
||||
|
||||
return {
|
||||
major: _v[0],
|
||||
feature: _v[1],
|
||||
patch: _v[2],
|
||||
};
|
||||
} catch (err) {
|
||||
return { major: 0, feature: 0, patch: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
class RateLimiterMongo extends RateLimiterStoreAbstract {
|
||||
/**
|
||||
*
|
||||
* @param {Object} opts
|
||||
* Defaults {
|
||||
* indexKeyPrefix: {attr1: 1, attr2: 1}
|
||||
* ... see other in RateLimiterStoreAbstract
|
||||
*
|
||||
* mongo: MongoClient
|
||||
* }
|
||||
*/
|
||||
constructor(opts) {
|
||||
super(opts);
|
||||
|
||||
this.dbName = opts.dbName;
|
||||
this.tableName = opts.tableName;
|
||||
this.indexKeyPrefix = opts.indexKeyPrefix;
|
||||
|
||||
if (opts.mongo) {
|
||||
this.client = opts.mongo;
|
||||
} else {
|
||||
this.client = opts.storeClient;
|
||||
}
|
||||
if (typeof this.client.then === 'function') {
|
||||
// If Promise
|
||||
this.client
|
||||
.then((conn) => {
|
||||
this.client = conn;
|
||||
this._initCollection();
|
||||
this._driverVersion = getDriverVersion(this.client);
|
||||
});
|
||||
} else {
|
||||
this._initCollection();
|
||||
this._driverVersion = getDriverVersion(this.client);
|
||||
}
|
||||
}
|
||||
|
||||
get dbName() {
|
||||
return this._dbName;
|
||||
}
|
||||
|
||||
set dbName(value) {
|
||||
this._dbName = typeof value === 'undefined' ? RateLimiterMongo.getDbName() : value;
|
||||
}
|
||||
|
||||
static getDbName() {
|
||||
return 'node-rate-limiter-flexible';
|
||||
}
|
||||
|
||||
get tableName() {
|
||||
return this._tableName;
|
||||
}
|
||||
|
||||
set tableName(value) {
|
||||
this._tableName = typeof value === 'undefined' ? this.keyPrefix : value;
|
||||
}
|
||||
|
||||
get client() {
|
||||
return this._client;
|
||||
}
|
||||
|
||||
set client(value) {
|
||||
if (typeof value === 'undefined') {
|
||||
throw new Error('mongo is not set');
|
||||
}
|
||||
this._client = value;
|
||||
}
|
||||
|
||||
get indexKeyPrefix() {
|
||||
return this._indexKeyPrefix;
|
||||
}
|
||||
|
||||
set indexKeyPrefix(obj) {
|
||||
this._indexKeyPrefix = obj || {};
|
||||
}
|
||||
|
||||
_initCollection() {
|
||||
const db = typeof this.client.db === 'function'
|
||||
? this.client.db(this.dbName)
|
||||
: this.client;
|
||||
|
||||
const collection = db.collection(this.tableName);
|
||||
collection.createIndex({ expire: -1 }, { expireAfterSeconds: 0 });
|
||||
collection.createIndex(Object.assign({}, this.indexKeyPrefix, { key: 1 }), { unique: true });
|
||||
|
||||
this._collection = collection;
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, result) {
|
||||
const res = new RateLimiterRes();
|
||||
|
||||
let doc;
|
||||
if (typeof result.value === 'undefined') {
|
||||
doc = result;
|
||||
} else {
|
||||
doc = result.value;
|
||||
}
|
||||
|
||||
res.isFirstInDuration = doc.points === changedPoints;
|
||||
res.consumedPoints = doc.points;
|
||||
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = doc.expire !== null
|
||||
? Math.max(new Date(doc.expire).getTime() - Date.now(), 0)
|
||||
: -1;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
_upsert(key, points, msDuration, forceExpire = false, options = {}) {
|
||||
if (!this._collection) {
|
||||
return Promise.reject(Error('Mongo connection is not established'));
|
||||
}
|
||||
|
||||
const docAttrs = options.attrs || {};
|
||||
|
||||
let where;
|
||||
let upsertData;
|
||||
if (forceExpire) {
|
||||
where = { key };
|
||||
where = Object.assign(where, docAttrs);
|
||||
upsertData = {
|
||||
$set: {
|
||||
key,
|
||||
points,
|
||||
expire: msDuration > 0 ? new Date(Date.now() + msDuration) : null,
|
||||
},
|
||||
};
|
||||
upsertData.$set = Object.assign(upsertData.$set, docAttrs);
|
||||
} else {
|
||||
where = {
|
||||
$or: [
|
||||
{ expire: { $gt: new Date() } },
|
||||
{ expire: { $eq: null } },
|
||||
],
|
||||
key,
|
||||
};
|
||||
where = Object.assign(where, docAttrs);
|
||||
upsertData = {
|
||||
$setOnInsert: {
|
||||
key,
|
||||
expire: msDuration > 0 ? new Date(Date.now() + msDuration) : null,
|
||||
},
|
||||
$inc: { points },
|
||||
};
|
||||
upsertData.$setOnInsert = Object.assign(upsertData.$setOnInsert, docAttrs);
|
||||
}
|
||||
|
||||
// Options for collection updates differ between driver versions
|
||||
const upsertOptions = {
|
||||
upsert: true,
|
||||
};
|
||||
if ((this._driverVersion.major >= 4) ||
|
||||
(this._driverVersion.major === 3 &&
|
||||
(this._driverVersion.feature >=7) ||
|
||||
(this._driverVersion.feature >= 6 &&
|
||||
this._driverVersion.patch >= 7 )))
|
||||
{
|
||||
upsertOptions.returnDocument = 'after';
|
||||
} else {
|
||||
upsertOptions.returnOriginal = false;
|
||||
}
|
||||
|
||||
/*
|
||||
* 1. Find actual limit and increment points
|
||||
* 2. If limit expired, but Mongo doesn't clean doc by TTL yet, try to replace limit doc completely
|
||||
* 3. If 2 or more Mongo threads try to insert the new limit doc, only the first succeed
|
||||
* 4. Try to upsert from step 1. Actual limit is created now, points are incremented without problems
|
||||
*/
|
||||
return new Promise((resolve, reject) => {
|
||||
this._collection.findOneAndUpdate(
|
||||
where,
|
||||
upsertData,
|
||||
upsertOptions
|
||||
).then((res) => {
|
||||
resolve(res);
|
||||
}).catch((errUpsert) => {
|
||||
if (errUpsert && errUpsert.code === 11000) { // E11000 duplicate key error collection
|
||||
const replaceWhere = Object.assign({ // try to replace OLD limit doc
|
||||
$or: [
|
||||
{ expire: { $lte: new Date() } },
|
||||
{ expire: { $eq: null } },
|
||||
],
|
||||
key,
|
||||
}, docAttrs);
|
||||
|
||||
const replaceTo = {
|
||||
$set: Object.assign({
|
||||
key,
|
||||
points,
|
||||
expire: msDuration > 0 ? new Date(Date.now() + msDuration) : null,
|
||||
}, docAttrs)
|
||||
};
|
||||
|
||||
this._collection.findOneAndUpdate(
|
||||
replaceWhere,
|
||||
replaceTo,
|
||||
upsertOptions
|
||||
).then((res) => {
|
||||
resolve(res);
|
||||
}).catch((errReplace) => {
|
||||
if (errReplace && errReplace.code === 11000) { // E11000 duplicate key error collection
|
||||
this._upsert(key, points, msDuration, forceExpire)
|
||||
.then(res => resolve(res))
|
||||
.catch(err => reject(err));
|
||||
} else {
|
||||
reject(errReplace);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
reject(errUpsert);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_get(rlKey, options = {}) {
|
||||
if (!this._collection) {
|
||||
return Promise.reject(Error('Mongo connection is not established'));
|
||||
}
|
||||
|
||||
const docAttrs = options.attrs || {};
|
||||
|
||||
const where = Object.assign({
|
||||
key: rlKey,
|
||||
$or: [
|
||||
{ expire: { $gt: new Date() } },
|
||||
{ expire: { $eq: null } },
|
||||
],
|
||||
}, docAttrs);
|
||||
|
||||
return this._collection.findOne(where);
|
||||
}
|
||||
|
||||
_delete(rlKey, options = {}) {
|
||||
if (!this._collection) {
|
||||
return Promise.reject(Error('Mongo connection is not established'));
|
||||
}
|
||||
|
||||
const docAttrs = options.attrs || {};
|
||||
const where = Object.assign({ key: rlKey }, docAttrs);
|
||||
|
||||
return this._collection.deleteOne(where)
|
||||
.then(res => res.deletedCount > 0);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterMongo;
|
||||
379
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterMySQL.js
generated
vendored
Normal file
@@ -0,0 +1,379 @@
|
||||
const RateLimiterStoreAbstract = require('./RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
class RateLimiterMySQL extends RateLimiterStoreAbstract {
|
||||
/**
|
||||
* @callback callback
|
||||
* @param {Object} err
|
||||
*
|
||||
* @param {Object} opts
|
||||
* @param {callback} cb
|
||||
* Defaults {
|
||||
* ... see other in RateLimiterStoreAbstract
|
||||
*
|
||||
* storeClient: anySqlClient,
|
||||
* storeType: 'knex', // required only for Knex instance
|
||||
* dbName: 'string',
|
||||
* tableName: 'string',
|
||||
* }
|
||||
*/
|
||||
constructor(opts, cb = null) {
|
||||
super(opts);
|
||||
|
||||
this.client = opts.storeClient;
|
||||
this.clientType = opts.storeType;
|
||||
|
||||
this.dbName = opts.dbName;
|
||||
this.tableName = opts.tableName;
|
||||
|
||||
this.clearExpiredByTimeout = opts.clearExpiredByTimeout;
|
||||
|
||||
this.tableCreated = opts.tableCreated;
|
||||
if (!this.tableCreated) {
|
||||
this._createDbAndTable()
|
||||
.then(() => {
|
||||
this.tableCreated = true;
|
||||
if (this.clearExpiredByTimeout) {
|
||||
this._clearExpiredHourAgo();
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
if (typeof cb === 'function') {
|
||||
cb(err);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (this.clearExpiredByTimeout) {
|
||||
this._clearExpiredHourAgo();
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
clearExpired(expire) {
|
||||
return new Promise((resolve) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
conn.query(`DELETE FROM ??.?? WHERE expire < ?`, [this.dbName, this.tableName, expire], () => {
|
||||
this._releaseConnection(conn);
|
||||
resolve();
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_clearExpiredHourAgo() {
|
||||
if (this._clearExpiredTimeoutId) {
|
||||
clearTimeout(this._clearExpiredTimeoutId);
|
||||
}
|
||||
this._clearExpiredTimeoutId = setTimeout(() => {
|
||||
this.clearExpired(Date.now() - 3600000) // Never rejected
|
||||
.then(() => {
|
||||
this._clearExpiredHourAgo();
|
||||
});
|
||||
}, 300000);
|
||||
this._clearExpiredTimeoutId.unref();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return Promise<any>
|
||||
* @private
|
||||
*/
|
||||
_getConnection() {
|
||||
switch (this.clientType) {
|
||||
case 'pool':
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.getConnection((errConn, conn) => {
|
||||
if (errConn) {
|
||||
return reject(errConn);
|
||||
}
|
||||
|
||||
resolve(conn);
|
||||
});
|
||||
});
|
||||
case 'sequelize':
|
||||
return this.client.connectionManager.getConnection();
|
||||
case 'knex':
|
||||
return this.client.client.acquireConnection();
|
||||
default:
|
||||
return Promise.resolve(this.client);
|
||||
}
|
||||
}
|
||||
|
||||
_releaseConnection(conn) {
|
||||
switch (this.clientType) {
|
||||
case 'pool':
|
||||
return conn.release();
|
||||
case 'sequelize':
|
||||
return this.client.connectionManager.releaseConnection(conn);
|
||||
case 'knex':
|
||||
return this.client.client.releaseConnection(conn);
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Promise<any>}
|
||||
* @private
|
||||
*/
|
||||
_createDbAndTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
conn.query(`CREATE DATABASE IF NOT EXISTS \`${this.dbName}\`;`, (errDb) => {
|
||||
if (errDb) {
|
||||
this._releaseConnection(conn);
|
||||
return reject(errDb);
|
||||
}
|
||||
conn.query(this._getCreateTableStmt(), (err) => {
|
||||
if (err) {
|
||||
this._releaseConnection(conn);
|
||||
return reject(err);
|
||||
}
|
||||
this._releaseConnection(conn);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_getCreateTableStmt() {
|
||||
return `CREATE TABLE IF NOT EXISTS \`${this.dbName}\`.\`${this.tableName}\` (` +
|
||||
'`key` VARCHAR(255) CHARACTER SET utf8 NOT NULL,' +
|
||||
'`points` INT(9) NOT NULL default 0,' +
|
||||
'`expire` BIGINT UNSIGNED,' +
|
||||
'PRIMARY KEY (`key`)' +
|
||||
') ENGINE = INNODB;';
|
||||
}
|
||||
|
||||
get clientType() {
|
||||
return this._clientType;
|
||||
}
|
||||
|
||||
set clientType(value) {
|
||||
if (typeof value === 'undefined') {
|
||||
if (this.client.constructor.name === 'Connection') {
|
||||
value = 'connection';
|
||||
} else if (this.client.constructor.name === 'Pool') {
|
||||
value = 'pool';
|
||||
} else if (this.client.constructor.name === 'Sequelize') {
|
||||
value = 'sequelize';
|
||||
} else {
|
||||
throw new Error('storeType is not defined');
|
||||
}
|
||||
}
|
||||
this._clientType = value.toLowerCase();
|
||||
}
|
||||
|
||||
get dbName() {
|
||||
return this._dbName;
|
||||
}
|
||||
|
||||
set dbName(value) {
|
||||
this._dbName = typeof value === 'undefined' ? 'rtlmtrflx' : value;
|
||||
}
|
||||
|
||||
get tableName() {
|
||||
return this._tableName;
|
||||
}
|
||||
|
||||
set tableName(value) {
|
||||
this._tableName = typeof value === 'undefined' ? this.keyPrefix : value;
|
||||
}
|
||||
|
||||
get tableCreated() {
|
||||
return this._tableCreated
|
||||
}
|
||||
|
||||
set tableCreated(value) {
|
||||
this._tableCreated = typeof value === 'undefined' ? false : !!value;
|
||||
}
|
||||
|
||||
get clearExpiredByTimeout() {
|
||||
return this._clearExpiredByTimeout;
|
||||
}
|
||||
|
||||
set clearExpiredByTimeout(value) {
|
||||
this._clearExpiredByTimeout = typeof value === 'undefined' ? true : Boolean(value);
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, result) {
|
||||
const res = new RateLimiterRes();
|
||||
const [row] = result;
|
||||
|
||||
res.isFirstInDuration = changedPoints === row.points;
|
||||
res.consumedPoints = res.isFirstInDuration ? changedPoints : row.points;
|
||||
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = row.expire
|
||||
? Math.max(row.expire - Date.now(), 0)
|
||||
: -1;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
_upsertTransaction(conn, key, points, msDuration, forceExpire) {
|
||||
return new Promise((resolve, reject) => {
|
||||
conn.query('BEGIN', (errBegin) => {
|
||||
if (errBegin) {
|
||||
conn.rollback();
|
||||
|
||||
return reject(errBegin);
|
||||
}
|
||||
|
||||
const dateNow = Date.now();
|
||||
const newExpire = msDuration > 0 ? dateNow + msDuration : null;
|
||||
|
||||
let q;
|
||||
let values;
|
||||
if (forceExpire) {
|
||||
q = `INSERT INTO ??.?? VALUES (?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
points = ?,
|
||||
expire = ?;`;
|
||||
values = [
|
||||
this.dbName, this.tableName, key, points, newExpire,
|
||||
points,
|
||||
newExpire,
|
||||
];
|
||||
} else {
|
||||
q = `INSERT INTO ??.?? VALUES (?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
points = IF(expire <= ?, ?, points + (?)),
|
||||
expire = IF(expire <= ?, ?, expire);`;
|
||||
values = [
|
||||
this.dbName, this.tableName, key, points, newExpire,
|
||||
dateNow, points, points,
|
||||
dateNow, newExpire,
|
||||
];
|
||||
}
|
||||
|
||||
conn.query(q, values, (errUpsert) => {
|
||||
if (errUpsert) {
|
||||
conn.rollback();
|
||||
|
||||
return reject(errUpsert);
|
||||
}
|
||||
conn.query('SELECT points, expire FROM ??.?? WHERE `key` = ?;', [this.dbName, this.tableName, key], (errSelect, res) => {
|
||||
if (errSelect) {
|
||||
conn.rollback();
|
||||
|
||||
return reject(errSelect);
|
||||
}
|
||||
|
||||
conn.query('COMMIT', (err) => {
|
||||
if (err) {
|
||||
conn.rollback();
|
||||
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(res);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_upsert(key, points, msDuration, forceExpire = false) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
this._upsertTransaction(conn, key, points, msDuration, forceExpire)
|
||||
.then((res) => {
|
||||
resolve(res);
|
||||
this._releaseConnection(conn);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
this._releaseConnection(conn);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_get(rlKey) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
conn.query(
|
||||
'SELECT points, expire FROM ??.?? WHERE `key` = ? AND (`expire` > ? OR `expire` IS NULL)',
|
||||
[this.dbName, this.tableName, rlKey, Date.now()],
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else if (res.length === 0) {
|
||||
resolve(null);
|
||||
} else {
|
||||
resolve(res);
|
||||
}
|
||||
|
||||
this._releaseConnection(conn);
|
||||
} // eslint-disable-line
|
||||
);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_delete(rlKey) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
conn.query(
|
||||
'DELETE FROM ??.?? WHERE `key` = ?',
|
||||
[this.dbName, this.tableName, rlKey],
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(res.affectedRows > 0);
|
||||
}
|
||||
|
||||
this._releaseConnection(conn);
|
||||
} // eslint-disable-line
|
||||
);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterMySQL;
|
||||
312
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterPostgres.js
generated
vendored
Normal file
@@ -0,0 +1,312 @@
|
||||
const RateLimiterStoreAbstract = require('./RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
class RateLimiterPostgres extends RateLimiterStoreAbstract {
|
||||
/**
|
||||
* @callback callback
|
||||
* @param {Object} err
|
||||
*
|
||||
* @param {Object} opts
|
||||
* @param {callback} cb
|
||||
* Defaults {
|
||||
* ... see other in RateLimiterStoreAbstract
|
||||
*
|
||||
* storeClient: postgresClient,
|
||||
* storeType: 'knex', // required only for Knex instance
|
||||
* tableName: 'string',
|
||||
* }
|
||||
*/
|
||||
constructor(opts, cb = null) {
|
||||
super(opts);
|
||||
|
||||
this.client = opts.storeClient;
|
||||
this.clientType = opts.storeType;
|
||||
|
||||
this.tableName = opts.tableName;
|
||||
|
||||
this.clearExpiredByTimeout = opts.clearExpiredByTimeout;
|
||||
|
||||
this.tableCreated = opts.tableCreated;
|
||||
if (!this.tableCreated) {
|
||||
this._createTable()
|
||||
.then(() => {
|
||||
this.tableCreated = true;
|
||||
if (this.clearExpiredByTimeout) {
|
||||
this._clearExpiredHourAgo();
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
if (typeof cb === 'function') {
|
||||
cb(err);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
clearExpired(expire) {
|
||||
return new Promise((resolve) => {
|
||||
const q = {
|
||||
name: 'rlflx-clear-expired',
|
||||
text: `DELETE FROM ${this.tableName} WHERE expire < $1`,
|
||||
values: [expire],
|
||||
};
|
||||
this._query(q)
|
||||
.then(() => {
|
||||
resolve();
|
||||
})
|
||||
.catch(() => {
|
||||
// Deleting expired query is not critical
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all rows expired 1 hour ago once per 5 minutes
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_clearExpiredHourAgo() {
|
||||
if (this._clearExpiredTimeoutId) {
|
||||
clearTimeout(this._clearExpiredTimeoutId);
|
||||
}
|
||||
this._clearExpiredTimeoutId = setTimeout(() => {
|
||||
this.clearExpired(Date.now() - 3600000) // Never rejected
|
||||
.then(() => {
|
||||
this._clearExpiredHourAgo();
|
||||
});
|
||||
}, 300000);
|
||||
this._clearExpiredTimeoutId.unref();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return Promise<any>
|
||||
* @private
|
||||
*/
|
||||
_getConnection() {
|
||||
switch (this.clientType) {
|
||||
case 'pool':
|
||||
return Promise.resolve(this.client);
|
||||
case 'sequelize':
|
||||
return this.client.connectionManager.getConnection();
|
||||
case 'knex':
|
||||
return this.client.client.acquireConnection();
|
||||
case 'typeorm':
|
||||
return Promise.resolve(this.client.driver.master);
|
||||
default:
|
||||
return Promise.resolve(this.client);
|
||||
}
|
||||
}
|
||||
|
||||
_releaseConnection(conn) {
|
||||
switch (this.clientType) {
|
||||
case 'pool':
|
||||
return true;
|
||||
case 'sequelize':
|
||||
return this.client.connectionManager.releaseConnection(conn);
|
||||
case 'knex':
|
||||
return this.client.client.releaseConnection(conn);
|
||||
case 'typeorm':
|
||||
return true;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Promise<any>}
|
||||
* @private
|
||||
*/
|
||||
_createTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
this._query({
|
||||
text: this._getCreateTableStmt(),
|
||||
})
|
||||
.then(() => {
|
||||
resolve();
|
||||
})
|
||||
.catch((err) => {
|
||||
if (err.code === '23505') {
|
||||
// Error: duplicate key value violates unique constraint "pg_type_typname_nsp_index"
|
||||
// Postgres doesn't handle concurrent table creation
|
||||
// It is supposed, that table is created by another worker
|
||||
resolve();
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_getCreateTableStmt() {
|
||||
return `CREATE TABLE IF NOT EXISTS ${this.tableName} (
|
||||
key varchar(255) PRIMARY KEY,
|
||||
points integer NOT NULL DEFAULT 0,
|
||||
expire bigint
|
||||
);`;
|
||||
}
|
||||
|
||||
get clientType() {
|
||||
return this._clientType;
|
||||
}
|
||||
|
||||
set clientType(value) {
|
||||
const constructorName = this.client.constructor.name;
|
||||
|
||||
if (typeof value === 'undefined') {
|
||||
if (constructorName === 'Client') {
|
||||
value = 'client';
|
||||
} else if (
|
||||
constructorName === 'Pool' ||
|
||||
constructorName === 'BoundPool'
|
||||
) {
|
||||
value = 'pool';
|
||||
} else if (constructorName === 'Sequelize') {
|
||||
value = 'sequelize';
|
||||
} else {
|
||||
throw new Error('storeType is not defined');
|
||||
}
|
||||
}
|
||||
|
||||
this._clientType = value.toLowerCase();
|
||||
}
|
||||
|
||||
get tableName() {
|
||||
return this._tableName;
|
||||
}
|
||||
|
||||
set tableName(value) {
|
||||
this._tableName = typeof value === 'undefined' ? this.keyPrefix : value;
|
||||
}
|
||||
|
||||
get tableCreated() {
|
||||
return this._tableCreated
|
||||
}
|
||||
|
||||
set tableCreated(value) {
|
||||
this._tableCreated = typeof value === 'undefined' ? false : !!value;
|
||||
}
|
||||
|
||||
get clearExpiredByTimeout() {
|
||||
return this._clearExpiredByTimeout;
|
||||
}
|
||||
|
||||
set clearExpiredByTimeout(value) {
|
||||
this._clearExpiredByTimeout = typeof value === 'undefined' ? true : Boolean(value);
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, result) {
|
||||
const res = new RateLimiterRes();
|
||||
const row = result.rows[0];
|
||||
|
||||
res.isFirstInDuration = changedPoints === row.points;
|
||||
res.consumedPoints = res.isFirstInDuration ? changedPoints : row.points;
|
||||
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = row.expire
|
||||
? Math.max(row.expire - Date.now(), 0)
|
||||
: -1;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
_query(q) {
|
||||
const prefix = this.tableName.toLowerCase();
|
||||
const queryObj = { name: `${prefix}:${q.name}`, text: q.text, values: q.values };
|
||||
return new Promise((resolve, reject) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
conn.query(queryObj)
|
||||
.then((res) => {
|
||||
resolve(res);
|
||||
this._releaseConnection(conn);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
this._releaseConnection(conn);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_upsert(key, points, msDuration, forceExpire = false) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
const newExpire = msDuration > 0 ? Date.now() + msDuration : null;
|
||||
const expireQ = forceExpire
|
||||
? ' $3 '
|
||||
: ` CASE
|
||||
WHEN ${this.tableName}.expire <= $4 THEN $3
|
||||
ELSE ${this.tableName}.expire
|
||||
END `;
|
||||
|
||||
return this._query({
|
||||
name: forceExpire ? 'rlflx-upsert-force' : 'rlflx-upsert',
|
||||
text: `
|
||||
INSERT INTO ${this.tableName} VALUES ($1, $2, $3)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
points = CASE
|
||||
WHEN (${this.tableName}.expire <= $4 OR 1=${forceExpire ? 1 : 0}) THEN $2
|
||||
ELSE ${this.tableName}.points + ($2)
|
||||
END,
|
||||
expire = ${expireQ}
|
||||
RETURNING points, expire;`,
|
||||
values: [key, points, newExpire, Date.now()],
|
||||
});
|
||||
}
|
||||
|
||||
_get(rlKey) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._query({
|
||||
name: 'rlflx-get',
|
||||
text: `
|
||||
SELECT points, expire FROM ${this.tableName} WHERE key = $1 AND (expire > $2 OR expire IS NULL);`,
|
||||
values: [rlKey, Date.now()],
|
||||
})
|
||||
.then((res) => {
|
||||
if (res.rowCount === 0) {
|
||||
res = null;
|
||||
}
|
||||
resolve(res);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_delete(rlKey) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
return this._query({
|
||||
name: 'rlflx-delete',
|
||||
text: `DELETE FROM ${this.tableName} WHERE key = $1`,
|
||||
values: [rlKey],
|
||||
})
|
||||
.then(res => res.rowCount > 0);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterPostgres;
|
||||
127
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterQueue.js
generated
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
const RateLimiterQueueError = require('./component/RateLimiterQueueError')
|
||||
const MAX_QUEUE_SIZE = 4294967295;
|
||||
const KEY_DEFAULT = 'limiter';
|
||||
|
||||
module.exports = class RateLimiterQueue {
|
||||
constructor(limiterFlexible, opts = {
|
||||
maxQueueSize: MAX_QUEUE_SIZE,
|
||||
}) {
|
||||
this._queueLimiters = {
|
||||
KEY_DEFAULT: new RateLimiterQueueInternal(limiterFlexible, opts)
|
||||
};
|
||||
this._limiterFlexible = limiterFlexible;
|
||||
this._maxQueueSize = opts.maxQueueSize
|
||||
}
|
||||
|
||||
getTokensRemaining(key = KEY_DEFAULT) {
|
||||
if (this._queueLimiters[key]) {
|
||||
return this._queueLimiters[key].getTokensRemaining()
|
||||
} else {
|
||||
return Promise.resolve(this._limiterFlexible.points)
|
||||
}
|
||||
}
|
||||
|
||||
removeTokens(tokens, key = KEY_DEFAULT) {
|
||||
if (!this._queueLimiters[key]) {
|
||||
this._queueLimiters[key] = new RateLimiterQueueInternal(
|
||||
this._limiterFlexible, {
|
||||
key,
|
||||
maxQueueSize: this._maxQueueSize,
|
||||
})
|
||||
}
|
||||
|
||||
return this._queueLimiters[key].removeTokens(tokens)
|
||||
}
|
||||
};
|
||||
|
||||
class RateLimiterQueueInternal {
|
||||
|
||||
constructor(limiterFlexible, opts = {
|
||||
maxQueueSize: MAX_QUEUE_SIZE,
|
||||
key: KEY_DEFAULT,
|
||||
}) {
|
||||
this._key = opts.key;
|
||||
this._waitTimeout = null;
|
||||
this._queue = [];
|
||||
this._limiterFlexible = limiterFlexible;
|
||||
|
||||
this._maxQueueSize = opts.maxQueueSize
|
||||
}
|
||||
|
||||
getTokensRemaining() {
|
||||
return this._limiterFlexible.get(this._key)
|
||||
.then((rlRes) => {
|
||||
return rlRes !== null ? rlRes.remainingPoints : this._limiterFlexible.points;
|
||||
})
|
||||
}
|
||||
|
||||
removeTokens(tokens) {
|
||||
const _this = this;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (tokens > _this._limiterFlexible.points) {
|
||||
reject(new RateLimiterQueueError(`Requested tokens ${tokens} exceeds maximum ${_this._limiterFlexible.points} tokens per interval`));
|
||||
return
|
||||
}
|
||||
|
||||
if (_this._queue.length > 0) {
|
||||
_this._queueRequest.call(_this, resolve, reject, tokens);
|
||||
} else {
|
||||
_this._limiterFlexible.consume(_this._key, tokens)
|
||||
.then((res) => {
|
||||
resolve(res.remainingPoints);
|
||||
})
|
||||
.catch((rej) => {
|
||||
if (rej instanceof Error) {
|
||||
reject(rej);
|
||||
} else {
|
||||
_this._queueRequest.call(_this, resolve, reject, tokens);
|
||||
if (_this._waitTimeout === null) {
|
||||
_this._waitTimeout = setTimeout(_this._processFIFO.bind(_this), rej.msBeforeNext);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
_queueRequest(resolve, reject, tokens) {
|
||||
const _this = this;
|
||||
if (_this._queue.length < _this._maxQueueSize) {
|
||||
_this._queue.push({resolve, reject, tokens});
|
||||
} else {
|
||||
reject(new RateLimiterQueueError(`Number of requests reached it's maximum ${_this._maxQueueSize}`))
|
||||
}
|
||||
}
|
||||
|
||||
_processFIFO() {
|
||||
const _this = this;
|
||||
|
||||
if (_this._waitTimeout !== null) {
|
||||
clearTimeout(_this._waitTimeout);
|
||||
_this._waitTimeout = null;
|
||||
}
|
||||
|
||||
if (_this._queue.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const item = _this._queue.shift();
|
||||
_this._limiterFlexible.consume(_this._key, item.tokens)
|
||||
.then((res) => {
|
||||
item.resolve(res.remainingPoints);
|
||||
_this._processFIFO.call(_this);
|
||||
})
|
||||
.catch((rej) => {
|
||||
if (rej instanceof Error) {
|
||||
item.reject(rej);
|
||||
_this._processFIFO.call(_this);
|
||||
} else {
|
||||
_this._queue.unshift(item);
|
||||
if (_this._waitTimeout === null) {
|
||||
_this._waitTimeout = setTimeout(_this._processFIFO.bind(_this), rej.msBeforeNext);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
173
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterRedis.js
generated
vendored
Normal file
@@ -0,0 +1,173 @@
|
||||
const RateLimiterStoreAbstract = require('./RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
const incrTtlLuaScript = `redis.call('set', KEYS[1], 0, 'EX', ARGV[2], 'NX') \
|
||||
local consumed = redis.call('incrby', KEYS[1], ARGV[1]) \
|
||||
local ttl = redis.call('pttl', KEYS[1]) \
|
||||
if ttl == -1 then \
|
||||
redis.call('expire', KEYS[1], ARGV[2]) \
|
||||
ttl = 1000 * ARGV[2] \
|
||||
end \
|
||||
return {consumed, ttl} \
|
||||
`;
|
||||
|
||||
class RateLimiterRedis extends RateLimiterStoreAbstract {
|
||||
/**
|
||||
*
|
||||
* @param {Object} opts
|
||||
* Defaults {
|
||||
* ... see other in RateLimiterStoreAbstract
|
||||
*
|
||||
* redis: RedisClient
|
||||
* rejectIfRedisNotReady: boolean = false - reject / invoke insuranceLimiter immediately when redis connection is not "ready"
|
||||
* }
|
||||
*/
|
||||
constructor(opts) {
|
||||
super(opts);
|
||||
if (opts.redis) {
|
||||
this.client = opts.redis;
|
||||
} else {
|
||||
this.client = opts.storeClient;
|
||||
}
|
||||
|
||||
this._rejectIfRedisNotReady = !!opts.rejectIfRedisNotReady;
|
||||
|
||||
if (typeof this.client.defineCommand === 'function') {
|
||||
this.client.defineCommand("rlflxIncr", {
|
||||
numberOfKeys: 1,
|
||||
lua: incrTtlLuaScript,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prevent actual redis call if redis connection is not ready
|
||||
* Because of different connection state checks for ioredis and node-redis, only this clients would be actually checked.
|
||||
* For any other clients all the requests would be passed directly to redis client
|
||||
* @return {boolean}
|
||||
* @private
|
||||
*/
|
||||
_isRedisReady() {
|
||||
if (!this._rejectIfRedisNotReady) {
|
||||
return true;
|
||||
}
|
||||
// ioredis client
|
||||
if (this.client.status && this.client.status !== 'ready') {
|
||||
return false;
|
||||
}
|
||||
// node-redis client
|
||||
if (typeof this.client.isReady === 'function' && !this.client.isReady()) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, result) {
|
||||
let [consumed, resTtlMs] = result;
|
||||
// Support ioredis results format
|
||||
if (Array.isArray(consumed)) {
|
||||
[, consumed] = consumed;
|
||||
[, resTtlMs] = resTtlMs;
|
||||
}
|
||||
|
||||
const res = new RateLimiterRes();
|
||||
res.consumedPoints = parseInt(consumed);
|
||||
res.isFirstInDuration = res.consumedPoints === changedPoints;
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = resTtlMs;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
_upsert(rlKey, points, msDuration, forceExpire = false) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!this._isRedisReady()) {
|
||||
return reject(new Error('Redis connection is not ready'));
|
||||
}
|
||||
|
||||
const secDuration = Math.floor(msDuration / 1000);
|
||||
const multi = this.client.multi();
|
||||
if (forceExpire) {
|
||||
if (secDuration > 0) {
|
||||
multi.set(rlKey, points, 'EX', secDuration);
|
||||
} else {
|
||||
multi.set(rlKey, points);
|
||||
}
|
||||
|
||||
multi.pttl(rlKey)
|
||||
.exec((err, res) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(res);
|
||||
});
|
||||
} else {
|
||||
if (secDuration > 0) {
|
||||
const incrCallback = function(err, result) {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(result);
|
||||
};
|
||||
|
||||
if (typeof this.client.rlflxIncr === 'function') {
|
||||
this.client.rlflxIncr(rlKey, points, secDuration, incrCallback);
|
||||
} else {
|
||||
this.client.eval(incrTtlLuaScript, 1, rlKey, points, secDuration, incrCallback);
|
||||
}
|
||||
} else {
|
||||
multi.incrby(rlKey, points)
|
||||
.pttl(rlKey)
|
||||
.exec((err, res) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(res);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_get(rlKey) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!this._isRedisReady()) {
|
||||
return reject(new Error('Redis connection is not ready'));
|
||||
}
|
||||
|
||||
this.client
|
||||
.multi()
|
||||
.get(rlKey)
|
||||
.pttl(rlKey)
|
||||
.exec((err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
const [points] = res;
|
||||
if (points === null) {
|
||||
return resolve(null)
|
||||
}
|
||||
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_delete(rlKey) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.del(rlKey, (err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(res > 0);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterRedis;
|
||||
64
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterRes.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
module.exports = class RateLimiterRes {
|
||||
constructor(remainingPoints, msBeforeNext, consumedPoints, isFirstInDuration) {
|
||||
this.remainingPoints = typeof remainingPoints === 'undefined' ? 0 : remainingPoints; // Remaining points in current duration
|
||||
this.msBeforeNext = typeof msBeforeNext === 'undefined' ? 0 : msBeforeNext; // Milliseconds before next action
|
||||
this.consumedPoints = typeof consumedPoints === 'undefined' ? 0 : consumedPoints; // Consumed points in current duration
|
||||
this.isFirstInDuration = typeof isFirstInDuration === 'undefined' ? false : isFirstInDuration;
|
||||
}
|
||||
|
||||
get msBeforeNext() {
|
||||
return this._msBeforeNext;
|
||||
}
|
||||
|
||||
set msBeforeNext(ms) {
|
||||
this._msBeforeNext = ms;
|
||||
return this;
|
||||
}
|
||||
|
||||
get remainingPoints() {
|
||||
return this._remainingPoints;
|
||||
}
|
||||
|
||||
set remainingPoints(p) {
|
||||
this._remainingPoints = p;
|
||||
return this;
|
||||
}
|
||||
|
||||
get consumedPoints() {
|
||||
return this._consumedPoints;
|
||||
}
|
||||
|
||||
set consumedPoints(p) {
|
||||
this._consumedPoints = p;
|
||||
return this;
|
||||
}
|
||||
|
||||
get isFirstInDuration() {
|
||||
return this._isFirstInDuration;
|
||||
}
|
||||
|
||||
set isFirstInDuration(value) {
|
||||
this._isFirstInDuration = Boolean(value);
|
||||
}
|
||||
|
||||
_getDecoratedProperties() {
|
||||
return {
|
||||
remainingPoints: this.remainingPoints,
|
||||
msBeforeNext: this.msBeforeNext,
|
||||
consumedPoints: this.consumedPoints,
|
||||
isFirstInDuration: this.isFirstInDuration,
|
||||
};
|
||||
}
|
||||
|
||||
[Symbol.for("nodejs.util.inspect.custom")]() {
|
||||
return this._getDecoratedProperties();
|
||||
}
|
||||
|
||||
toString() {
|
||||
return JSON.stringify(this._getDecoratedProperties());
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return this._getDecoratedProperties();
|
||||
}
|
||||
};
|
||||
442
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterStoreAbstract.js
generated
vendored
Normal file
@@ -0,0 +1,442 @@
|
||||
const RateLimiterAbstract = require('./RateLimiterAbstract');
|
||||
const BlockedKeys = require('./component/BlockedKeys');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
module.exports = class RateLimiterStoreAbstract extends RateLimiterAbstract {
|
||||
/**
|
||||
*
|
||||
* @param opts Object Defaults {
|
||||
* ... see other in RateLimiterAbstract
|
||||
*
|
||||
* inMemoryBlockOnConsumed: 40, // Number of points when key is blocked
|
||||
* inMemoryBlockDuration: 10, // Block duration in seconds
|
||||
* insuranceLimiter: RateLimiterAbstract
|
||||
* }
|
||||
*/
|
||||
constructor(opts = {}) {
|
||||
super(opts);
|
||||
|
||||
this.inMemoryBlockOnConsumed = opts.inMemoryBlockOnConsumed || opts.inmemoryBlockOnConsumed;
|
||||
this.inMemoryBlockDuration = opts.inMemoryBlockDuration || opts.inmemoryBlockDuration;
|
||||
this.insuranceLimiter = opts.insuranceLimiter;
|
||||
this._inMemoryBlockedKeys = new BlockedKeys();
|
||||
}
|
||||
|
||||
get client() {
|
||||
return this._client;
|
||||
}
|
||||
|
||||
set client(value) {
|
||||
if (typeof value === 'undefined') {
|
||||
throw new Error('storeClient is not set');
|
||||
}
|
||||
this._client = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Have to be launched after consume
|
||||
* It blocks key and execute evenly depending on result from store
|
||||
*
|
||||
* It uses _getRateLimiterRes function to prepare RateLimiterRes from store result
|
||||
*
|
||||
* @param resolve
|
||||
* @param reject
|
||||
* @param rlKey
|
||||
* @param changedPoints
|
||||
* @param storeResult
|
||||
* @param {Object} options
|
||||
* @private
|
||||
*/
|
||||
_afterConsume(resolve, reject, rlKey, changedPoints, storeResult, options = {}) {
|
||||
const res = this._getRateLimiterRes(rlKey, changedPoints, storeResult);
|
||||
|
||||
if (this.inMemoryBlockOnConsumed > 0 && !(this.inMemoryBlockDuration > 0)
|
||||
&& res.consumedPoints >= this.inMemoryBlockOnConsumed
|
||||
) {
|
||||
this._inMemoryBlockedKeys.addMs(rlKey, res.msBeforeNext);
|
||||
if (res.consumedPoints > this.points) {
|
||||
return reject(res);
|
||||
} else {
|
||||
return resolve(res)
|
||||
}
|
||||
} else if (res.consumedPoints > this.points) {
|
||||
let blockPromise = Promise.resolve();
|
||||
// Block only first time when consumed more than points
|
||||
if (this.blockDuration > 0 && res.consumedPoints <= (this.points + changedPoints)) {
|
||||
res.msBeforeNext = this.msBlockDuration;
|
||||
blockPromise = this._block(rlKey, res.consumedPoints, this.msBlockDuration, options);
|
||||
}
|
||||
|
||||
if (this.inMemoryBlockOnConsumed > 0 && res.consumedPoints >= this.inMemoryBlockOnConsumed) {
|
||||
// Block key for this.inMemoryBlockDuration seconds
|
||||
this._inMemoryBlockedKeys.add(rlKey, this.inMemoryBlockDuration);
|
||||
res.msBeforeNext = this.msInMemoryBlockDuration;
|
||||
}
|
||||
|
||||
blockPromise
|
||||
.then(() => {
|
||||
reject(res);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
} else if (this.execEvenly && res.msBeforeNext > 0 && !res.isFirstInDuration) {
|
||||
let delay = Math.ceil(res.msBeforeNext / (res.remainingPoints + 2));
|
||||
if (delay < this.execEvenlyMinDelayMs) {
|
||||
delay = res.consumedPoints * this.execEvenlyMinDelayMs;
|
||||
}
|
||||
|
||||
setTimeout(resolve, delay, res);
|
||||
} else {
|
||||
resolve(res);
|
||||
}
|
||||
}
|
||||
|
||||
_handleError(err, funcName, resolve, reject, key, data = false, options = {}) {
|
||||
if (!(this.insuranceLimiter instanceof RateLimiterAbstract)) {
|
||||
reject(err);
|
||||
} else {
|
||||
this.insuranceLimiter[funcName](key, data, options)
|
||||
.then((res) => {
|
||||
resolve(res);
|
||||
})
|
||||
.catch((res) => {
|
||||
reject(res);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @returns {BlockedKeys}
|
||||
* @private
|
||||
*/
|
||||
get _inmemoryBlockedKeys() {
|
||||
return this._inMemoryBlockedKeys
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @param rlKey
|
||||
* @returns {number}
|
||||
*/
|
||||
getInmemoryBlockMsBeforeExpire(rlKey) {
|
||||
return this.getInMemoryBlockMsBeforeExpire(rlKey)
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @returns {number|number}
|
||||
*/
|
||||
get inmemoryBlockOnConsumed() {
|
||||
return this.inMemoryBlockOnConsumed;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @param value
|
||||
*/
|
||||
set inmemoryBlockOnConsumed(value) {
|
||||
this.inMemoryBlockOnConsumed = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @returns {number|number}
|
||||
*/
|
||||
get inmemoryBlockDuration() {
|
||||
return this.inMemoryBlockDuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @param value
|
||||
*/
|
||||
set inmemoryBlockDuration(value) {
|
||||
this.inMemoryBlockDuration = value
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @returns {number}
|
||||
*/
|
||||
get msInmemoryBlockDuration() {
|
||||
return this.inMemoryBlockDuration * 1000;
|
||||
}
|
||||
|
||||
getInMemoryBlockMsBeforeExpire(rlKey) {
|
||||
if (this.inMemoryBlockOnConsumed > 0) {
|
||||
return this._inMemoryBlockedKeys.msBeforeExpire(rlKey);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
get inMemoryBlockOnConsumed() {
|
||||
return this._inMemoryBlockOnConsumed;
|
||||
}
|
||||
|
||||
set inMemoryBlockOnConsumed(value) {
|
||||
this._inMemoryBlockOnConsumed = value ? parseInt(value) : 0;
|
||||
if (this.inMemoryBlockOnConsumed > 0 && this.points > this.inMemoryBlockOnConsumed) {
|
||||
throw new Error('inMemoryBlockOnConsumed option must be greater or equal "points" option');
|
||||
}
|
||||
}
|
||||
|
||||
get inMemoryBlockDuration() {
|
||||
return this._inMemoryBlockDuration;
|
||||
}
|
||||
|
||||
set inMemoryBlockDuration(value) {
|
||||
this._inMemoryBlockDuration = value ? parseInt(value) : 0;
|
||||
if (this.inMemoryBlockDuration > 0 && this.inMemoryBlockOnConsumed === 0) {
|
||||
throw new Error('inMemoryBlockOnConsumed option must be set up');
|
||||
}
|
||||
}
|
||||
|
||||
get msInMemoryBlockDuration() {
|
||||
return this._inMemoryBlockDuration * 1000;
|
||||
}
|
||||
|
||||
get insuranceLimiter() {
|
||||
return this._insuranceLimiter;
|
||||
}
|
||||
|
||||
set insuranceLimiter(value) {
|
||||
if (typeof value !== 'undefined' && !(value instanceof RateLimiterAbstract)) {
|
||||
throw new Error('insuranceLimiter must be instance of RateLimiterAbstract');
|
||||
}
|
||||
this._insuranceLimiter = value;
|
||||
if (this._insuranceLimiter) {
|
||||
this._insuranceLimiter.blockDuration = this.blockDuration;
|
||||
this._insuranceLimiter.execEvenly = this.execEvenly;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Block any key for secDuration seconds
|
||||
*
|
||||
* @param key
|
||||
* @param secDuration
|
||||
* @param {Object} options
|
||||
*
|
||||
* @return Promise<RateLimiterRes>
|
||||
*/
|
||||
block(key, secDuration, options = {}) {
|
||||
const msDuration = secDuration * 1000;
|
||||
return this._block(this.getKey(key), this.points + 1, msDuration, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set points by key for any duration
|
||||
*
|
||||
* @param key
|
||||
* @param points
|
||||
* @param secDuration
|
||||
* @param {Object} options
|
||||
*
|
||||
* @return Promise<RateLimiterRes>
|
||||
*/
|
||||
set(key, points, secDuration, options = {}) {
|
||||
const msDuration = (secDuration >= 0 ? secDuration : this.duration) * 1000;
|
||||
return this._block(this.getKey(key), points, msDuration, options);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param pointsToConsume
|
||||
* @param {Object} options
|
||||
* @returns Promise<RateLimiterRes>
|
||||
*/
|
||||
consume(key, pointsToConsume = 1, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const rlKey = this.getKey(key);
|
||||
|
||||
const inMemoryBlockMsBeforeExpire = this.getInMemoryBlockMsBeforeExpire(rlKey);
|
||||
if (inMemoryBlockMsBeforeExpire > 0) {
|
||||
return reject(new RateLimiterRes(0, inMemoryBlockMsBeforeExpire));
|
||||
}
|
||||
|
||||
this._upsert(rlKey, pointsToConsume, this._getKeySecDuration(options) * 1000, false, options)
|
||||
.then((res) => {
|
||||
this._afterConsume(resolve, reject, rlKey, pointsToConsume, res);
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'consume', resolve, reject, key, pointsToConsume, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param points
|
||||
* @param {Object} options
|
||||
* @returns Promise<RateLimiterRes>
|
||||
*/
|
||||
penalty(key, points = 1, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
this._upsert(rlKey, points, this._getKeySecDuration(options) * 1000, false, options)
|
||||
.then((res) => {
|
||||
resolve(this._getRateLimiterRes(rlKey, points, res));
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'penalty', resolve, reject, key, points, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param points
|
||||
* @param {Object} options
|
||||
* @returns Promise<RateLimiterRes>
|
||||
*/
|
||||
reward(key, points = 1, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
this._upsert(rlKey, -points, this._getKeySecDuration(options) * 1000, false, options)
|
||||
.then((res) => {
|
||||
resolve(this._getRateLimiterRes(rlKey, -points, res));
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'reward', resolve, reject, key, points, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param {Object} options
|
||||
* @returns Promise<RateLimiterRes>|null
|
||||
*/
|
||||
get(key, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
this._get(rlKey, options)
|
||||
.then((res) => {
|
||||
if (res === null || typeof res === 'undefined') {
|
||||
resolve(null);
|
||||
} else {
|
||||
resolve(this._getRateLimiterRes(rlKey, 0, res));
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'get', resolve, reject, key, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param {Object} options
|
||||
* @returns Promise<boolean>
|
||||
*/
|
||||
delete(key, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
this._delete(rlKey, options)
|
||||
.then((res) => {
|
||||
this._inMemoryBlockedKeys.delete(rlKey);
|
||||
resolve(res);
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'delete', resolve, reject, key, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup keys no-matter expired or not.
|
||||
*/
|
||||
deleteInMemoryBlockedAll() {
|
||||
this._inMemoryBlockedKeys.delete();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get RateLimiterRes object filled depending on storeResult, which specific for exact store
|
||||
*
|
||||
* @param rlKey
|
||||
* @param changedPoints
|
||||
* @param storeResult
|
||||
* @private
|
||||
*/
|
||||
_getRateLimiterRes(rlKey, changedPoints, storeResult) { // eslint-disable-line no-unused-vars
|
||||
throw new Error("You have to implement the method '_getRateLimiterRes'!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Block key for this.msBlockDuration milliseconds
|
||||
* Usually, it just prolongs lifetime of key
|
||||
*
|
||||
* @param rlKey
|
||||
* @param initPoints
|
||||
* @param msDuration
|
||||
* @param {Object} options
|
||||
*
|
||||
* @return Promise<any>
|
||||
*/
|
||||
_block(rlKey, initPoints, msDuration, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this._upsert(rlKey, initPoints, msDuration, true, options)
|
||||
.then(() => {
|
||||
resolve(new RateLimiterRes(0, msDuration > 0 ? msDuration : -1, initPoints));
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'block', resolve, reject, this.parseKey(rlKey), msDuration / 1000, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Have to be implemented in every limiter
|
||||
* Resolve with raw result from Store OR null if rlKey is not set
|
||||
* or Reject with error
|
||||
*
|
||||
* @param rlKey
|
||||
* @param {Object} options
|
||||
* @private
|
||||
*
|
||||
* @return Promise<any>
|
||||
*/
|
||||
_get(rlKey, options = {}) { // eslint-disable-line no-unused-vars
|
||||
throw new Error("You have to implement the method '_get'!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Have to be implemented
|
||||
* Resolve with true OR false if rlKey doesn't exist
|
||||
* or Reject with error
|
||||
*
|
||||
* @param rlKey
|
||||
* @param {Object} options
|
||||
* @private
|
||||
*
|
||||
* @return Promise<any>
|
||||
*/
|
||||
_delete(rlKey, options = {}) { // eslint-disable-line no-unused-vars
|
||||
throw new Error("You have to implement the method '_delete'!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Have to be implemented
|
||||
* Resolve with object used for {@link _getRateLimiterRes} to generate {@link RateLimiterRes}
|
||||
*
|
||||
* @param {string} rlKey
|
||||
* @param {number} points
|
||||
* @param {number} msDuration
|
||||
* @param {boolean} forceExpire
|
||||
* @param {Object} options
|
||||
* @abstract
|
||||
*
|
||||
* @return Promise<Object>
|
||||
*/
|
||||
_upsert(rlKey, points, msDuration, forceExpire = false, options = {}) {
|
||||
throw new Error("You have to implement the method '_upsert'!");
|
||||
}
|
||||
};
|
||||
51
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterUnion.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
const RateLimiterAbstract = require('./RateLimiterAbstract');
|
||||
|
||||
module.exports = class RateLimiterUnion {
|
||||
constructor(...limiters) {
|
||||
if (limiters.length < 1) {
|
||||
throw new Error('RateLimiterUnion: at least one limiter have to be passed');
|
||||
}
|
||||
limiters.forEach((limiter) => {
|
||||
if (!(limiter instanceof RateLimiterAbstract)) {
|
||||
throw new Error('RateLimiterUnion: all limiters have to be instance of RateLimiterAbstract');
|
||||
}
|
||||
});
|
||||
|
||||
this._limiters = limiters;
|
||||
}
|
||||
|
||||
consume(key, points = 1) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promises = [];
|
||||
this._limiters.forEach((limiter) => {
|
||||
promises.push(limiter.consume(key, points).catch(rej => ({ rejected: true, rej })));
|
||||
});
|
||||
|
||||
Promise.all(promises)
|
||||
.then((res) => {
|
||||
const resObj = {};
|
||||
let rejected = false;
|
||||
|
||||
res.forEach((item) => {
|
||||
if (item.rejected === true) {
|
||||
rejected = true;
|
||||
}
|
||||
});
|
||||
|
||||
for (let i = 0; i < res.length; i++) {
|
||||
if (rejected && res[i].rejected === true) {
|
||||
resObj[this._limiters[i].keyPrefix] = res[i].rej;
|
||||
} else if (!rejected) {
|
||||
resObj[this._limiters[i].keyPrefix] = res[i];
|
||||
}
|
||||
}
|
||||
|
||||
if (rejected) {
|
||||
reject(resObj);
|
||||
} else {
|
||||
resolve(resObj);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
75
framework/node_modules/node-rate-limiter-flexible/lib/component/BlockedKeys/BlockedKeys.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
module.exports = class BlockedKeys {
|
||||
constructor() {
|
||||
this._keys = {}; // {'key': 1526279430331}
|
||||
this._addedKeysAmount = 0;
|
||||
}
|
||||
|
||||
collectExpired() {
|
||||
const now = Date.now();
|
||||
|
||||
Object.keys(this._keys).forEach((key) => {
|
||||
if (this._keys[key] <= now) {
|
||||
delete this._keys[key];
|
||||
}
|
||||
});
|
||||
|
||||
this._addedKeysAmount = Object.keys(this._keys).length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add new blocked key
|
||||
*
|
||||
* @param key String
|
||||
* @param sec Number
|
||||
*/
|
||||
add(key, sec) {
|
||||
this.addMs(key, sec * 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add new blocked key for ms
|
||||
*
|
||||
* @param key String
|
||||
* @param ms Number
|
||||
*/
|
||||
addMs(key, ms) {
|
||||
this._keys[key] = Date.now() + ms;
|
||||
this._addedKeysAmount++;
|
||||
if (this._addedKeysAmount > 999) {
|
||||
this.collectExpired();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 0 means not blocked
|
||||
*
|
||||
* @param key
|
||||
* @returns {number}
|
||||
*/
|
||||
msBeforeExpire(key) {
|
||||
const expire = this._keys[key];
|
||||
|
||||
if (expire && expire >= Date.now()) {
|
||||
this.collectExpired();
|
||||
const now = Date.now();
|
||||
return expire >= now ? expire - now : 0;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* If key is not given, delete all data in memory
|
||||
*
|
||||
* @param {string|undefined} key
|
||||
*/
|
||||
delete(key) {
|
||||
if (key) {
|
||||
delete this._keys[key];
|
||||
} else {
|
||||
Object.keys(this._keys).forEach((key) => {
|
||||
delete this._keys[key];
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
3
framework/node_modules/node-rate-limiter-flexible/lib/component/BlockedKeys/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
const BlockedKeys = require('./BlockedKeys');
|
||||
|
||||
module.exports = BlockedKeys;
|
||||
83
framework/node_modules/node-rate-limiter-flexible/lib/component/MemoryStorage/MemoryStorage.js
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
const Record = require('./Record');
|
||||
const RateLimiterRes = require('../../RateLimiterRes');
|
||||
|
||||
module.exports = class MemoryStorage {
|
||||
constructor() {
|
||||
/**
|
||||
* @type {Object.<string, Record>}
|
||||
* @private
|
||||
*/
|
||||
this._storage = {};
|
||||
}
|
||||
|
||||
incrby(key, value, durationSec) {
|
||||
if (this._storage[key]) {
|
||||
const msBeforeExpires = this._storage[key].expiresAt
|
||||
? this._storage[key].expiresAt.getTime() - new Date().getTime()
|
||||
: -1;
|
||||
if (msBeforeExpires !== 0) {
|
||||
// Change value
|
||||
this._storage[key].value = this._storage[key].value + value;
|
||||
|
||||
return new RateLimiterRes(0, msBeforeExpires, this._storage[key].value, false);
|
||||
}
|
||||
|
||||
return this.set(key, value, durationSec);
|
||||
}
|
||||
return this.set(key, value, durationSec);
|
||||
}
|
||||
|
||||
set(key, value, durationSec) {
|
||||
const durationMs = durationSec * 1000;
|
||||
|
||||
if (this._storage[key] && this._storage[key].timeoutId) {
|
||||
clearTimeout(this._storage[key].timeoutId);
|
||||
}
|
||||
|
||||
this._storage[key] = new Record(
|
||||
value,
|
||||
durationMs > 0 ? new Date(Date.now() + durationMs) : null
|
||||
);
|
||||
if (durationMs > 0) {
|
||||
this._storage[key].timeoutId = setTimeout(() => {
|
||||
delete this._storage[key];
|
||||
}, durationMs);
|
||||
if (this._storage[key].timeoutId.unref) {
|
||||
this._storage[key].timeoutId.unref();
|
||||
}
|
||||
}
|
||||
|
||||
return new RateLimiterRes(0, durationMs === 0 ? -1 : durationMs, this._storage[key].value, true);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @returns {*}
|
||||
*/
|
||||
get(key) {
|
||||
if (this._storage[key]) {
|
||||
const msBeforeExpires = this._storage[key].expiresAt
|
||||
? this._storage[key].expiresAt.getTime() - new Date().getTime()
|
||||
: -1;
|
||||
return new RateLimiterRes(0, msBeforeExpires, this._storage[key].value, false);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @returns {boolean}
|
||||
*/
|
||||
delete(key) {
|
||||
if (this._storage[key]) {
|
||||
if (this._storage[key].timeoutId) {
|
||||
clearTimeout(this._storage[key].timeoutId);
|
||||
}
|
||||
delete this._storage[key];
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
40
framework/node_modules/node-rate-limiter-flexible/lib/component/MemoryStorage/Record.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
module.exports = class Record {
|
||||
/**
|
||||
*
|
||||
* @param value int
|
||||
* @param expiresAt Date|int
|
||||
* @param timeoutId
|
||||
*/
|
||||
constructor(value, expiresAt, timeoutId = null) {
|
||||
this.value = value;
|
||||
this.expiresAt = expiresAt;
|
||||
this.timeoutId = timeoutId;
|
||||
}
|
||||
|
||||
get value() {
|
||||
return this._value;
|
||||
}
|
||||
|
||||
set value(value) {
|
||||
this._value = parseInt(value);
|
||||
}
|
||||
|
||||
get expiresAt() {
|
||||
return this._expiresAt;
|
||||
}
|
||||
|
||||
set expiresAt(value) {
|
||||
if (!(value instanceof Date) && Number.isInteger(value)) {
|
||||
value = new Date(value);
|
||||
}
|
||||
this._expiresAt = value;
|
||||
}
|
||||
|
||||
get timeoutId() {
|
||||
return this._timeoutId;
|
||||
}
|
||||
|
||||
set timeoutId(value) {
|
||||
this._timeoutId = value;
|
||||
}
|
||||
};
|
||||
3
framework/node_modules/node-rate-limiter-flexible/lib/component/MemoryStorage/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
const MemoryStorage = require('./MemoryStorage');
|
||||
|
||||
module.exports = MemoryStorage;
|
||||
13
framework/node_modules/node-rate-limiter-flexible/lib/component/RateLimiterQueueError.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
module.exports = class RateLimiterQueueError extends Error {
|
||||
constructor(message, extra) {
|
||||
super();
|
||||
if (Error.captureStackTrace) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
}
|
||||
this.name = 'CustomError';
|
||||
this.message = message;
|
||||
if (extra) {
|
||||
this.extra = extra;
|
||||
}
|
||||
}
|
||||
};
|
||||
9
framework/node_modules/node-rate-limiter-flexible/lib/component/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export class RateLimiterQueueError extends Error {
|
||||
|
||||
constructor(message?: string, extra?: string);
|
||||
|
||||
readonly name: string;
|
||||
readonly message: string;
|
||||
readonly extra: string;
|
||||
|
||||
}
|
||||
16
framework/node_modules/node-rate-limiter-flexible/lib/constants.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
const LIMITER_TYPES = {
|
||||
MEMORY: 'memory',
|
||||
CLUSTER: 'cluster',
|
||||
MEMCACHE: 'memcache',
|
||||
MONGO: 'mongo',
|
||||
REDIS: 'redis',
|
||||
MYSQL: 'mysql',
|
||||
POSTGRES: 'postgres',
|
||||
};
|
||||
|
||||
const ERR_UNKNOWN_LIMITER_TYPE_MESSAGE = 'Unknown limiter type. Use one of LIMITER_TYPES constants.';
|
||||
|
||||
module.exports = {
|
||||
LIMITER_TYPES,
|
||||
ERR_UNKNOWN_LIMITER_TYPE_MESSAGE,
|
||||
};
|
||||
392
framework/node_modules/node-rate-limiter-flexible/lib/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,392 @@
|
||||
export interface IRateLimiterRes {
|
||||
msBeforeNext?: number;
|
||||
remainingPoints?: number;
|
||||
consumedPoints?: number;
|
||||
isFirstInDuration?: boolean;
|
||||
}
|
||||
|
||||
export class RateLimiterRes {
|
||||
constructor(
|
||||
remainingPoints?: number,
|
||||
msBeforeNext?: number,
|
||||
consumedPoints?: number,
|
||||
isFirstInDuration?: boolean
|
||||
);
|
||||
|
||||
readonly msBeforeNext: number;
|
||||
readonly remainingPoints: number;
|
||||
readonly consumedPoints: number;
|
||||
readonly isFirstInDuration: boolean;
|
||||
|
||||
toString(): string;
|
||||
toJSON(): {
|
||||
remainingPoints: number;
|
||||
msBeforeNext: number;
|
||||
consumedPoints: number;
|
||||
isFirstInDuration: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export class RateLimiterAbstract {
|
||||
constructor(opts: IRateLimiterOptions);
|
||||
|
||||
/**
|
||||
* Maximum number of points can be consumed over duration. Limiter compares this number with
|
||||
* number of consumed points by key to decide if an operation should be rejected or resolved.
|
||||
*/
|
||||
points: number;
|
||||
|
||||
/**
|
||||
* Number of seconds before consumed points are reset.
|
||||
* Keys never expire, if duration is 0.
|
||||
*/
|
||||
duration: number;
|
||||
|
||||
/**
|
||||
* duration in milliseconds
|
||||
*/
|
||||
get msDuration(): number;
|
||||
|
||||
/**
|
||||
* If positive number and consumed more than points in current duration, block for blockDuration
|
||||
* seconds.
|
||||
*/
|
||||
blockDuration: number;
|
||||
|
||||
/**
|
||||
* blockDuration in milliseconds
|
||||
*/
|
||||
get msBlockDuration(): number;
|
||||
|
||||
/**
|
||||
* Delay action to be executed evenly over duration First action in duration is executed without
|
||||
* delay. All next allowed actions in current duration are delayed by formula
|
||||
* msBeforeDurationEnd / (remainingPoints + 2) with minimum delay of duration * 1000 / points.
|
||||
* It allows to cut off load peaks similar way to Leaky Bucket.
|
||||
*
|
||||
* Note: it isn't recommended to use it for long duration and few points, as it may delay action
|
||||
* for too long with default execEvenlyMinDelayMs.
|
||||
*/
|
||||
execEvenly: boolean;
|
||||
|
||||
/**
|
||||
* Sets minimum delay in milliseconds, when action is delayed with execEvenly
|
||||
*/
|
||||
execEvenlyMinDelayMs: number;
|
||||
|
||||
/**
|
||||
* If you need to create several limiters for different purpose.
|
||||
* Set to empty string '', if keys should be stored without prefix.
|
||||
*/
|
||||
keyPrefix: string;
|
||||
|
||||
/**
|
||||
* Returns internal key prefixed with keyPrefix option as it is saved in store.
|
||||
*/
|
||||
getKey(key: string | number): string;
|
||||
|
||||
/**
|
||||
* Returns internal key without the keyPrefix.
|
||||
*/
|
||||
parseKey(rlKey: string): string;
|
||||
|
||||
/**
|
||||
* @param key is usually IP address or some unique client id
|
||||
* @param pointsToConsume number of points consumed. default: 1
|
||||
* @param options is object with additional settings:
|
||||
* - customDuration expire in seconds for this operation only overwrites limiter's duration. It doesn't work, if key already created.
|
||||
* @returns Returns Promise, which:
|
||||
* - `resolved` with `RateLimiterRes` when point(s) is consumed, so action can be done
|
||||
* - `rejected` only for store and database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
* - `rejected` when there is no points to be consumed, where reject reason `rejRes` is `RateLimiterRes` object
|
||||
* - `rejected` when key is blocked (if block strategy is set up), where reject reason `rejRes` is `RateLimiterRes` object
|
||||
*/
|
||||
consume(
|
||||
key: string | number,
|
||||
pointsToConsume?: number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
/**
|
||||
* Fine key by points number of points for one duration.
|
||||
*
|
||||
* Note: Depending on time penalty may go to next durations
|
||||
*
|
||||
* @returns Returns Promise, which:
|
||||
* - `resolved` with RateLimiterRes
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
penalty(
|
||||
key: string | number,
|
||||
points?: number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
/**
|
||||
* Reward key by points number of points for one duration.
|
||||
* Note: Depending on time reward may go to next durations
|
||||
* @returns Promise, which:
|
||||
* - `resolved` with RateLimiterRes
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
reward(
|
||||
key: string | number,
|
||||
points?: number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
/**
|
||||
* Get RateLimiterRes in current duration. It always returns RateLimiterRes.isFirstInDuration=false.
|
||||
* @param key is usually IP address or some unique client id
|
||||
* @param options
|
||||
* @returns Promise, which:
|
||||
* - `resolved` with RateLimiterRes if key is set
|
||||
* - `resolved` with null if key is NOT set or expired
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
get(
|
||||
key: string | number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes | null>;
|
||||
|
||||
/**
|
||||
* Set points to key for secDuration seconds.
|
||||
* Store it forever, if secDuration is 0.
|
||||
* @param key
|
||||
* @param points
|
||||
* @param secDuration
|
||||
* @param options
|
||||
* @returns Promise, which:
|
||||
* - `resolved` with RateLimiterRes
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
set(
|
||||
key: string | number,
|
||||
points: number,
|
||||
secDuration: number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
/**
|
||||
* Block key by setting consumed points to points + 1 for secDuration seconds.
|
||||
*
|
||||
* It force updates expire, if there is already key.
|
||||
*
|
||||
* Blocked key never expires, if secDuration is 0.
|
||||
* @returns Promise, which:
|
||||
* - `resolved` with RateLimiterRes
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
block(
|
||||
key: string | number,
|
||||
secDuration: number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
/**
|
||||
* Delete all data related to key.
|
||||
*
|
||||
* For example, previously blocked key is not blocked after delete as there is no data anymore.
|
||||
* @returns Promise, which:
|
||||
* - `resolved` with boolean, true if data is removed by key, false if there is no such key.
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
delete(
|
||||
key: string | number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<boolean>;
|
||||
}
|
||||
|
||||
export class RateLimiterStoreAbstract extends RateLimiterAbstract {
|
||||
constructor(opts: IRateLimiterStoreOptions);
|
||||
|
||||
/**
|
||||
* Cleanup keys blocked in current process memory
|
||||
*/
|
||||
deleteInMemoryBlockedAll(): void;
|
||||
}
|
||||
|
||||
interface IRateLimiterOptions {
|
||||
keyPrefix?: string;
|
||||
points?: number;
|
||||
duration?: number;
|
||||
execEvenly?: boolean;
|
||||
execEvenlyMinDelayMs?: number;
|
||||
blockDuration?: number;
|
||||
}
|
||||
|
||||
interface IRateLimiterClusterOptions extends IRateLimiterOptions {
|
||||
timeoutMs?: number;
|
||||
}
|
||||
|
||||
interface IRateLimiterStoreOptions extends IRateLimiterOptions {
|
||||
storeClient: any;
|
||||
storeType?: string;
|
||||
inMemoryBlockOnConsumed?: number;
|
||||
inMemoryBlockDuration?: number;
|
||||
/**
|
||||
* @deprecated Use camelCased inMemoryBlockOnConsumed option
|
||||
*/
|
||||
inmemoryBlockOnConsumed?: number;
|
||||
/**
|
||||
* @deprecated Use camelCased inMemoryBlockOnConsumed option
|
||||
*/
|
||||
inmemoryBlockDuration?: number;
|
||||
insuranceLimiter?: RateLimiterAbstract;
|
||||
dbName?: string;
|
||||
tableName?: string;
|
||||
tableCreated?: boolean;
|
||||
}
|
||||
|
||||
interface IRateLimiterStoreNoAutoExpiryOptions extends IRateLimiterStoreOptions {
|
||||
clearExpiredByTimeout?: boolean;
|
||||
}
|
||||
|
||||
interface IRateLimiterMongoOptions extends IRateLimiterStoreOptions {
|
||||
indexKeyPrefix?: {
|
||||
[key: string]: any;
|
||||
};
|
||||
}
|
||||
|
||||
interface IRateLimiterRedisOptions extends IRateLimiterStoreOptions {
|
||||
rejectIfRedisNotReady?: boolean;
|
||||
}
|
||||
|
||||
interface ICallbackReady {
|
||||
(error?: Error): void;
|
||||
}
|
||||
|
||||
interface IRLWrapperBlackAndWhiteOptions {
|
||||
limiter: RateLimiterAbstract;
|
||||
blackList?: string[] | number[];
|
||||
whiteList?: string[] | number[];
|
||||
isBlackListed?(key: any): boolean;
|
||||
isWhiteListed?(key: any): boolean;
|
||||
runActionAnyway?: boolean;
|
||||
}
|
||||
|
||||
export class RateLimiterMemory extends RateLimiterAbstract {
|
||||
constructor(opts: IRateLimiterOptions);
|
||||
}
|
||||
|
||||
export class RateLimiterCluster extends RateLimiterAbstract {
|
||||
constructor(opts: IRateLimiterClusterOptions);
|
||||
}
|
||||
|
||||
export class RateLimiterClusterMaster {
|
||||
constructor();
|
||||
}
|
||||
|
||||
export class RateLimiterClusterMasterPM2 {
|
||||
constructor(pm2: any);
|
||||
}
|
||||
|
||||
export class RateLimiterRedis extends RateLimiterStoreAbstract {
|
||||
constructor(opts: IRateLimiterRedisOptions);
|
||||
}
|
||||
|
||||
export interface IRateLimiterMongoFunctionOptions {
|
||||
attrs: { [key: string]: any };
|
||||
}
|
||||
|
||||
export class RateLimiterMongo extends RateLimiterStoreAbstract {
|
||||
constructor(opts: IRateLimiterMongoOptions);
|
||||
indexKeyPrefix(): Object;
|
||||
indexKeyPrefix(obj?: Object): void;
|
||||
|
||||
consume(
|
||||
key: string | number,
|
||||
pointsToConsume?: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
penalty(
|
||||
key: string | number,
|
||||
points?: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
reward(
|
||||
key: string | number,
|
||||
points?: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
block(
|
||||
key: string | number,
|
||||
secDuration: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
get(
|
||||
key: string | number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes | null>;
|
||||
|
||||
set(
|
||||
key: string | number,
|
||||
points: number,
|
||||
secDuration: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
delete(
|
||||
key: string | number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<boolean>;
|
||||
}
|
||||
|
||||
export class RateLimiterMySQL extends RateLimiterStoreAbstract {
|
||||
constructor(opts: IRateLimiterStoreNoAutoExpiryOptions, cb?: ICallbackReady);
|
||||
}
|
||||
|
||||
export class RateLimiterPostgres extends RateLimiterStoreAbstract {
|
||||
constructor(opts: IRateLimiterStoreNoAutoExpiryOptions, cb?: ICallbackReady);
|
||||
}
|
||||
|
||||
export class RateLimiterMemcache extends RateLimiterStoreAbstract {}
|
||||
|
||||
export class RateLimiterUnion {
|
||||
constructor(...limiters: RateLimiterAbstract[]);
|
||||
|
||||
consume(key: string | number, points?: number): Promise<RateLimiterRes[]>;
|
||||
}
|
||||
|
||||
export class RLWrapperBlackAndWhite extends RateLimiterAbstract {
|
||||
constructor(opts: IRLWrapperBlackAndWhiteOptions);
|
||||
}
|
||||
|
||||
interface IRateLimiterQueueOpts {
|
||||
maxQueueSize?: number;
|
||||
}
|
||||
|
||||
export class RateLimiterQueue {
|
||||
constructor(
|
||||
limiterFlexible: RateLimiterAbstract | BurstyRateLimiter,
|
||||
opts?: IRateLimiterQueueOpts
|
||||
);
|
||||
|
||||
getTokensRemaining(key?: string | number): Promise<number>;
|
||||
|
||||
removeTokens(tokens: number, key?: string | number): Promise<number>;
|
||||
}
|
||||
|
||||
export class BurstyRateLimiter {
|
||||
constructor(
|
||||
rateLimiter: RateLimiterAbstract,
|
||||
burstLimiter: RateLimiterAbstract
|
||||
);
|
||||
|
||||
consume(
|
||||
key: string | number,
|
||||
pointsToConsume?: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
}
|
||||
59
framework/node_modules/node-rate-limiter-flexible/package.json
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
{
|
||||
"name": "rate-limiter-flexible",
|
||||
"version": "2.4.1",
|
||||
"description": "Node.js rate limiter by key and protection from DDoS and Brute-Force attacks in process Memory, Redis, MongoDb, Memcached, MySQL, PostgreSQL, Cluster or PM",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "istanbul -v cover -- _mocha --recursive",
|
||||
"debug-test": "mocha --inspect-brk lib/**/**.test.js",
|
||||
"coveralls": "cat ./coverage/lcov.info | coveralls",
|
||||
"eslint": "eslint --quiet lib/**/**.js test/**/**.js",
|
||||
"eslint-fix": "eslint --fix lib/**/**.js test/**/**.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/animir/node-rate-limiter-flexible.git"
|
||||
},
|
||||
"keywords": [
|
||||
"authorization",
|
||||
"security",
|
||||
"rate",
|
||||
"limit",
|
||||
"ratelimter",
|
||||
"brute",
|
||||
"force",
|
||||
"bruteforce",
|
||||
"throttle",
|
||||
"koa",
|
||||
"express",
|
||||
"hapi",
|
||||
"auth",
|
||||
"ddos",
|
||||
"queue"
|
||||
],
|
||||
"author": "animir <animirr@gmail.com>",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/animir/node-rate-limiter-flexible/issues"
|
||||
},
|
||||
"homepage": "https://github.com/animir/node-rate-limiter-flexible#readme",
|
||||
"types": "./lib/index.d.ts",
|
||||
"devDependencies": {
|
||||
"chai": "^4.1.2",
|
||||
"coveralls": "^3.0.1",
|
||||
"eslint": "^4.19.1",
|
||||
"eslint-config-airbnb-base": "^12.1.0",
|
||||
"eslint-plugin-import": "^2.7.0",
|
||||
"eslint-plugin-node": "^6.0.1",
|
||||
"eslint-plugin-security": "^1.4.0",
|
||||
"istanbul": "^0.4.5",
|
||||
"memcached-mock": "^0.1.0",
|
||||
"mocha": "^5.1.1",
|
||||
"redis-mock": "^0.48.0",
|
||||
"sinon": "^5.0.10"
|
||||
},
|
||||
"browser": {
|
||||
"cluster": false,
|
||||
"crypto": false
|
||||
}
|
||||
}
|
||||
188
framework/node_modules/node-rate-limiter-flexible/test/BurstyRateLimiter.test.js
generated
vendored
Normal file
@@ -0,0 +1,188 @@
|
||||
/* eslint-disable no-unused-expressions */
|
||||
const { describe, it } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const RateLimiterMemory = require('../lib/RateLimiterMemory');
|
||||
const BurstyRateLimiter = require('../lib/BurstyRateLimiter');
|
||||
const RateLimiterRedis = require('../lib/RateLimiterRedis');
|
||||
const redisMock = require('redis-mock');
|
||||
const { redisEvalMock, getRedisClientClosed } = require('./helper');
|
||||
|
||||
describe('BurstyRateLimiter', () => {
|
||||
it('consume 1 point from limiter', (done) => {
|
||||
const testKey = 'consume1';
|
||||
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
|
||||
const blMemory = new RateLimiterMemory({ points: 1, duration: 10 });
|
||||
const bursty = new BurstyRateLimiter(rlMemory, blMemory);
|
||||
bursty.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
expect(res.remainingPoints).to.equal(0);
|
||||
expect(res.msBeforeNext <= 1000).to.equal(true);
|
||||
expect(res.isFirstInDuration).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('consume 1 point from bursty limiter, if all consumed on limiter', (done) => {
|
||||
const testKey = 'consume1frombursty';
|
||||
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
|
||||
const blMemory = new RateLimiterMemory({ points: 1, duration: 10 });
|
||||
const bursty = new BurstyRateLimiter(rlMemory, blMemory);
|
||||
bursty.consume(testKey)
|
||||
.then(() => {
|
||||
bursty.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.remainingPoints).to.equal(0);
|
||||
expect(res.msBeforeNext <= 1000).to.equal(true);
|
||||
expect(res.isFirstInDuration).to.equal(false);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('consume 1 point from limiter and 1 from bursty, and then 1 point reject with data from limiter', (done) => {
|
||||
const testKey = 'consume1frombursty';
|
||||
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
|
||||
const blMemory = new RateLimiterMemory({ points: 1, duration: 10 });
|
||||
const bursty = new BurstyRateLimiter(rlMemory, blMemory);
|
||||
bursty.consume(testKey)
|
||||
.then(() => {
|
||||
bursty.consume(testKey)
|
||||
.then(() => {
|
||||
bursty.consume(testKey)
|
||||
.then(() => {
|
||||
done(new Error('must not'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.consumedPoints).to.equal(3);
|
||||
expect(rej.remainingPoints).to.equal(0);
|
||||
expect(rej.msBeforeNext <= 1000).to.equal(true);
|
||||
expect(rej.isFirstInDuration).to.equal(false);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('do not consume from burst limiter, if rate limiter consume rejected with error', (done) => {
|
||||
const testKey = 'consume-rejected-with-error';
|
||||
const redisMockClient = redisMock.createClient();
|
||||
redisMockClient.eval = redisEvalMock(redisMockClient);
|
||||
const redisClientClosed = getRedisClientClosed(redisMockClient);
|
||||
const rlRedisClosed = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
});
|
||||
const blRedis = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
keyPrefix: 'bursty',
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
const bursty = new BurstyRateLimiter(rlRedisClosed, blRedis);
|
||||
bursty.consume(testKey)
|
||||
.then(() => {
|
||||
done(new Error('must not'));
|
||||
})
|
||||
.catch((err) => {
|
||||
expect(err instanceof Error).to.equal(true);
|
||||
blRedis.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('reject with burst limiter error if it happens', (done) => {
|
||||
const testKey = 'consume-rejected-with-error';
|
||||
const redisMockClient = redisMock.createClient();
|
||||
redisMockClient.eval = redisEvalMock(redisMockClient);
|
||||
const redisClientClosed = getRedisClientClosed(redisMockClient);
|
||||
const rlRedis = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
const blRedisClosed = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
keyPrefix: 'bursty',
|
||||
});
|
||||
const bursty = new BurstyRateLimiter(rlRedis, blRedisClosed);
|
||||
bursty.consume(testKey)
|
||||
.then(() => {
|
||||
bursty.consume(testKey)
|
||||
.then(() => {
|
||||
done(new Error('must not'));
|
||||
})
|
||||
.catch((err) => {
|
||||
expect(err instanceof Error).to.equal(true);
|
||||
rlRedis.get(testKey)
|
||||
.then((rlRes) => {
|
||||
expect(rlRes.consumedPoints).to.equal(2);
|
||||
expect(rlRes.remainingPoints).to.equal(0);
|
||||
expect(rlRes.msBeforeNext <= 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('consume and get return the combined RateLimiterRes of both limiters with correct msBeforeNext', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 1, duration: 10 });
|
||||
const rlBurstMemory = new RateLimiterMemory({ points: 20, duration: 1 });
|
||||
|
||||
const bl = new BurstyRateLimiter(rlMemory, rlBurstMemory);
|
||||
|
||||
bl.consume('keyGet', 1)
|
||||
.then((firstConsumeRes) => {
|
||||
expect(firstConsumeRes.isFirstInDuration).to.equal(true);
|
||||
bl.consume('keyGet', 1)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.remainingPoints).to.equal(0);
|
||||
expect(res.msBeforeNext <= 1000).to.equal(true);
|
||||
expect(res.isFirstInDuration).to.equal(false);
|
||||
|
||||
bl.get('keyGet')
|
||||
.then((rlRes) => {
|
||||
expect(rlRes.consumedPoints).to.equal(2);
|
||||
expect(rlRes.remainingPoints).to.equal(0);
|
||||
expect(rlRes.msBeforeNext <= 1000).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(err => done(err));
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('returns points from limiter', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 1, duration: 10 });
|
||||
const rlBurstMemory = new RateLimiterMemory({ points: 20, duration: 1 });
|
||||
|
||||
const brl = new BurstyRateLimiter(rlMemory, rlBurstMemory);
|
||||
expect(brl.points).to.equal(1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
436
framework/node_modules/node-rate-limiter-flexible/test/ExpressBruteFlexible.test.js
generated
vendored
Normal file
@@ -0,0 +1,436 @@
|
||||
/* eslint-disable no-unused-expressions */
|
||||
/* eslint-disable prefer-promise-reject-errors */
|
||||
const { describe, it, beforeEach } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const sinon = require('sinon');
|
||||
const redisMock = require('redis-mock');
|
||||
const Memcached = require('memcached-mock');
|
||||
const ExpressBruteFlexible = require('../lib/ExpressBruteFlexible');
|
||||
const limiters = require('../index');
|
||||
const { redisEvalMock } = require('./helper');
|
||||
|
||||
const makeRequest = (middleware, req, res, next) => new Promise((resolve) => {
|
||||
middleware(req, res, (err) => {
|
||||
if (err) {
|
||||
resolve(err);
|
||||
} else {
|
||||
next();
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('ExpressBruteFlexible', function ExpressBruteFlexibleTest() {
|
||||
this.timeout(10000);
|
||||
|
||||
const resObj = {
|
||||
header: () => {
|
||||
},
|
||||
status: () => {
|
||||
},
|
||||
send: () => {
|
||||
},
|
||||
};
|
||||
|
||||
const memcacheMockClient = new Memcached('localhost:11211');
|
||||
const redisMockClient = redisMock.createClient();
|
||||
redisMockClient.eval = redisEvalMock(redisMockClient);
|
||||
|
||||
const mongoCollection = {
|
||||
createIndex: () => {
|
||||
},
|
||||
findOneAndUpdate: () => {
|
||||
},
|
||||
findOne: () => {
|
||||
},
|
||||
deleteOne: () => {
|
||||
},
|
||||
};
|
||||
|
||||
const mongoClientMock = {
|
||||
db: () => {
|
||||
},
|
||||
};
|
||||
|
||||
const mongoDb = {
|
||||
collection: () => {
|
||||
},
|
||||
};
|
||||
|
||||
sinon.stub(mongoDb, 'collection').callsFake(() => mongoCollection);
|
||||
sinon.stub(mongoClientMock, 'db').callsFake(() => mongoDb);
|
||||
|
||||
const mysqlClientMock = {
|
||||
query: () => {
|
||||
},
|
||||
};
|
||||
|
||||
const pgClientMock = {
|
||||
query: () => Promise.resolve(),
|
||||
};
|
||||
|
||||
const pgClientErrored = {
|
||||
query: () => Promise.reject({ code: 0 }),
|
||||
};
|
||||
|
||||
beforeEach((done) => {
|
||||
memcacheMockClient.flush(() => {
|
||||
redisMockClient.flushall(done);
|
||||
});
|
||||
});
|
||||
|
||||
it('allows 1 request with 1 free try', (done) => {
|
||||
const brute = new ExpressBruteFlexible('memory', {
|
||||
freeRetries: 1,
|
||||
});
|
||||
|
||||
brute.prevent({ ip: '127.0.0.1' }, resObj, () => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('allows 2 requests with 2 free try', (done) => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MEMCACHE, {
|
||||
storeClient: memcacheMockClient,
|
||||
freeRetries: 2,
|
||||
handleStoreError(err) {
|
||||
done(err);
|
||||
},
|
||||
});
|
||||
|
||||
const next = sinon.spy();
|
||||
Promise.all([
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, resObj, next),
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, resObj, next),
|
||||
]).then(() => {
|
||||
expect(next.calledTwice).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('works 0 free try', (done) => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MEMCACHE, {
|
||||
storeClient: memcacheMockClient,
|
||||
freeRetries: 0,
|
||||
handleStoreError(err) {
|
||||
done(err);
|
||||
},
|
||||
failCallback(req, res, next) {
|
||||
next({ message: 'blocked' });
|
||||
},
|
||||
});
|
||||
|
||||
const next = sinon.spy();
|
||||
Promise.all([
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, resObj, next),
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, resObj, next),
|
||||
]).then(() => {
|
||||
expect(next.calledOnce).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('blocks the second request when no free tries and calls failCallback', () => {
|
||||
const brute = new ExpressBruteFlexible('memory', {
|
||||
freeRetries: 0,
|
||||
minWait: 1000,
|
||||
failCallback(req, res, next, nextValidRequestDate) {
|
||||
res.status(403);
|
||||
res.send({
|
||||
error: {
|
||||
nextValidRequestDate,
|
||||
},
|
||||
});
|
||||
next();
|
||||
},
|
||||
});
|
||||
|
||||
const next = sinon.spy();
|
||||
const mockRes = Object.assign({}, resObj);
|
||||
const resStatusSpy = sinon.spy(mockRes, 'status');
|
||||
const resSendSpy = sinon.spy(mockRes, 'send');
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next)
|
||||
.then(() => {
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next)
|
||||
.then(() => {
|
||||
expect(resStatusSpy.calledWith(403)).to.equal(true);
|
||||
const spySendCall = resSendSpy.getCall(0);
|
||||
const blockDuration = spySendCall.args[0].error.nextValidRequestDate.getTime() - Date.now();
|
||||
expect(blockDuration > 0 && blockDuration <= 1000).to.equal(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('maxWait limits maximum block duration on high traffic', (done) => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
|
||||
storeClient: redisMockClient,
|
||||
freeRetries: 0,
|
||||
minWait: 2000,
|
||||
maxWait: 3000,
|
||||
failCallback(req, res, next, nextValidRequestDate) {
|
||||
res.send({
|
||||
error: {
|
||||
nextValidRequestDate,
|
||||
},
|
||||
});
|
||||
next();
|
||||
},
|
||||
});
|
||||
|
||||
let maximumBlockDuration = 0;
|
||||
const mockRes = Object.assign({}, resObj);
|
||||
mockRes.send = (obj) => {
|
||||
const blockDuration = obj.error.nextValidRequestDate.getTime() - Date.now();
|
||||
if (blockDuration > maximumBlockDuration) {
|
||||
maximumBlockDuration = blockDuration;
|
||||
}
|
||||
};
|
||||
|
||||
const next = sinon.spy();
|
||||
const resSendSpy = sinon.spy(mockRes, 'send');
|
||||
Promise.all([
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
]).then(() => {
|
||||
setTimeout(() => {
|
||||
Promise.all([
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
]).then(() => {
|
||||
setTimeout(() => {
|
||||
Promise.all([
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
]).then(() => {
|
||||
setTimeout(() => {
|
||||
expect(maximumBlockDuration <= 3000).to.be.true;
|
||||
expect(resSendSpy.callCount).to.equal(3);
|
||||
done();
|
||||
}, 4100);
|
||||
});
|
||||
}, 3100);
|
||||
});
|
||||
}, 2100);
|
||||
});
|
||||
});
|
||||
|
||||
it('block time grows fibonacci-like way', (done) => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
|
||||
storeClient: redisMockClient,
|
||||
freeRetries: 0,
|
||||
minWait: 2000,
|
||||
maxWait: 10000,
|
||||
lifetime: 10000,
|
||||
failCallback(req, res, next, nextValidRequestDate) {
|
||||
res.send({
|
||||
error: {
|
||||
nextValidRequestDate,
|
||||
},
|
||||
});
|
||||
next();
|
||||
},
|
||||
});
|
||||
|
||||
let sequenceLength = 0;
|
||||
const mockRes = Object.assign({}, resObj);
|
||||
mockRes.send = (obj) => {
|
||||
const blockDuration = obj.error.nextValidRequestDate.getTime() - Date.now();
|
||||
if (blockDuration > 1000 && blockDuration <= 2000 && sequenceLength === 0) {
|
||||
sequenceLength++;
|
||||
}
|
||||
if (blockDuration > 1000 && blockDuration <= 2000 && sequenceLength === 1) {
|
||||
sequenceLength++;
|
||||
}
|
||||
if (blockDuration > 2000 && blockDuration <= 4000 && sequenceLength === 2) {
|
||||
sequenceLength++;
|
||||
}
|
||||
};
|
||||
|
||||
const next = sinon.spy();
|
||||
Promise.all([
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
]).then(() => {
|
||||
setTimeout(() => {
|
||||
Promise.all([
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
]).then(() => {
|
||||
setTimeout(() => {
|
||||
Promise.all([
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
makeRequest(brute.prevent, { ip: '127.0.0.1' }, mockRes, next),
|
||||
]).then(() => {
|
||||
setTimeout(() => {
|
||||
expect(sequenceLength).to.equal(3);
|
||||
done();
|
||||
}, 4100);
|
||||
});
|
||||
}, 2100);
|
||||
});
|
||||
}, 2100);
|
||||
});
|
||||
});
|
||||
|
||||
it('attaches reset to request by default and reset works', (done) => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
|
||||
storeClient: redisMockClient,
|
||||
freeRetries: 1,
|
||||
minWait: 1000,
|
||||
maxWait: 5000,
|
||||
});
|
||||
|
||||
const req = { ip: '127.0.0.1' };
|
||||
|
||||
brute.prevent(req, resObj, () => {
|
||||
expect(typeof req.brute.reset).to.equal('function');
|
||||
req.brute.reset(() => {
|
||||
brute.prevent(req, resObj, () => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('does not attach request if option is false', (done) => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
|
||||
storeClient: redisMockClient,
|
||||
freeRetries: 1,
|
||||
minWait: 1000,
|
||||
maxWait: 5000,
|
||||
attachResetToRequest: false,
|
||||
});
|
||||
|
||||
const req = { ip: '127.0.0.1' };
|
||||
|
||||
brute.prevent(req, resObj, () => {
|
||||
expect(typeof req.brute === 'undefined' || typeof req.brute.reset === 'undefined').to.be.true;
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('getMiddleware returns middleware function and works', (done) => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
|
||||
storeClient: redisMockClient,
|
||||
freeRetries: 1,
|
||||
minWait: 1000,
|
||||
maxWait: 5000,
|
||||
attachResetToRequest: false,
|
||||
});
|
||||
|
||||
const middleware = brute.getMiddleware();
|
||||
|
||||
const req = { ip: '127.0.0.1' };
|
||||
|
||||
middleware(req, resObj, done);
|
||||
});
|
||||
|
||||
it('ignores IP from key if getMiddleware is with option ignoreIP=false', (done) => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
|
||||
storeClient: redisMockClient,
|
||||
freeRetries: 1,
|
||||
minWait: 1000,
|
||||
maxWait: 5000,
|
||||
attachResetToRequest: false,
|
||||
handleStoreError(err) {
|
||||
done(err);
|
||||
},
|
||||
});
|
||||
|
||||
const getKeySpy = sinon.spy(ExpressBruteFlexible, '_getKey');
|
||||
const middleware = brute.getMiddleware({
|
||||
ignoreIP: true,
|
||||
});
|
||||
|
||||
const req = { ip: '127.0.0.1' };
|
||||
|
||||
middleware(req, resObj, () => {
|
||||
const getKeySpyCall = getKeySpy.getCall(0);
|
||||
expect(getKeySpyCall.lastArg[0]).to.not.equal(req.ip);
|
||||
getKeySpy.restore();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('memory limiters created internally by storeType', () => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MEMORY);
|
||||
expect(brute.counterLimiter instanceof limiters.RateLimiterMemory).to.be.true;
|
||||
expect(brute.blockLimiter instanceof limiters.RateLimiterMemory).to.be.true;
|
||||
});
|
||||
|
||||
it('memcache limiters created internally by storeType', () => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MEMCACHE, {
|
||||
storeClient: memcacheMockClient,
|
||||
});
|
||||
expect(brute.counterLimiter instanceof limiters.RateLimiterMemcache).to.be.true;
|
||||
expect(brute.blockLimiter instanceof limiters.RateLimiterMemcache).to.be.true;
|
||||
});
|
||||
|
||||
it('mongo limiters created internally by storeType', () => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MONGO, {
|
||||
storeClient: mongoClientMock,
|
||||
});
|
||||
expect(brute.counterLimiter instanceof limiters.RateLimiterMongo).to.be.true;
|
||||
expect(brute.blockLimiter instanceof limiters.RateLimiterMongo).to.be.true;
|
||||
});
|
||||
|
||||
it('redis limiters created internally by storeType', () => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.REDIS, {
|
||||
storeClient: redisMockClient,
|
||||
});
|
||||
expect(brute.counterLimiter instanceof limiters.RateLimiterRedis).to.be.true;
|
||||
expect(brute.blockLimiter instanceof limiters.RateLimiterRedis).to.be.true;
|
||||
});
|
||||
|
||||
it('mysql limiters created internally by storeType', () => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MYSQL, {
|
||||
storeClient: mysqlClientMock,
|
||||
storeType: 'client',
|
||||
});
|
||||
expect(brute.counterLimiter instanceof limiters.RateLimiterMySQL).to.be.true;
|
||||
expect(brute.blockLimiter instanceof limiters.RateLimiterMySQL).to.be.true;
|
||||
});
|
||||
|
||||
it('postgres limiters created internally by storeType', () => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.POSTGRES, {
|
||||
storeClient: pgClientMock,
|
||||
storeType: 'client',
|
||||
});
|
||||
expect(brute.counterLimiter instanceof limiters.RateLimiterPostgres).to.be.true;
|
||||
expect(brute.blockLimiter instanceof limiters.RateLimiterPostgres).to.be.true;
|
||||
});
|
||||
|
||||
it('global reset works', (done) => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.MEMORY, {
|
||||
freeRetries: 1,
|
||||
});
|
||||
|
||||
const ip = '127.0.0.1';
|
||||
|
||||
brute.prevent({ ip }, resObj, () => {
|
||||
brute.reset(ip, undefined, () => {
|
||||
const key = ExpressBruteFlexible._getKey([ip, brute.name]);
|
||||
brute.freeLimiter.get(key)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('global reset launches handleStoreError function', (done) => {
|
||||
const brute = new ExpressBruteFlexible(ExpressBruteFlexible.LIMITER_TYPES.POSTGRES, {
|
||||
storeClient: pgClientMock,
|
||||
storeType: 'client',
|
||||
freeRetries: 1,
|
||||
handleStoreError() {
|
||||
done();
|
||||
},
|
||||
});
|
||||
|
||||
const ip = '127.0.0.1';
|
||||
brute.freeLimiter.client = pgClientErrored;
|
||||
brute.reset(ip);
|
||||
});
|
||||
});
|
||||
512
framework/node_modules/node-rate-limiter-flexible/test/RLWrapperBlackAndWhite.test.js
generated
vendored
Normal file
@@ -0,0 +1,512 @@
|
||||
const { describe, it } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const RLWrapperBlackAndWhite = require('../lib/RLWrapperBlackAndWhite');
|
||||
const RateLimiterMemory = require('../lib/RateLimiterMemory');
|
||||
|
||||
describe('RLWrapperBlackAndWhite ', () => {
|
||||
it('consume if not blacked', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
blackList: ['blacked'],
|
||||
});
|
||||
limiterWrapped
|
||||
.consume('test')
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints === 0 && res.consumedPoints === 1).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('rejected on consume if blacked', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
blackList: ['blacked'],
|
||||
});
|
||||
limiterWrapped
|
||||
.consume('blacked')
|
||||
.then(() => {
|
||||
done(Error('must not consume'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.remainingPoints === 0 && rej.consumedPoints === 0).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('block if not blacked', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
blackList: ['blacked'],
|
||||
});
|
||||
limiterWrapped
|
||||
.block('test', 30)
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext > 1000 && res.msBeforeNext <= 30000).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('block resolved if blacked', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
blackList: ['blacked'],
|
||||
});
|
||||
limiterWrapped
|
||||
.block('blacked', 30)
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext > 30000).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('penalty if not blacked', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 2,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
blackList: ['blacked'],
|
||||
});
|
||||
limiterWrapped
|
||||
.penalty('test', 1)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints === 1).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('penalty resolved if blacked', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
blackList: ['blacked'],
|
||||
});
|
||||
limiterWrapped
|
||||
.penalty('blacked', 1)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints === 0 && res.remainingPoints === 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('reward if not blacked', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
blackList: ['blacked'],
|
||||
});
|
||||
limiterWrapped.consume('test').then(() => {
|
||||
limiterWrapped
|
||||
.reward('test', 1)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints === 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('reward resolved if blacked', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
blackList: ['blacked'],
|
||||
});
|
||||
limiterWrapped
|
||||
.reward('blacked', 1)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints === 0 && res.remainingPoints === 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('get if not blacked', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
blackList: ['blacked'],
|
||||
});
|
||||
limiterWrapped.consume('test').then(() => {
|
||||
limiterWrapped
|
||||
.get('test')
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints === 1).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('get resolved if blacked', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
blackList: ['blacked'],
|
||||
});
|
||||
limiterWrapped
|
||||
.get('blacked')
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints === 0 && res.remainingPoints === 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('resolve consume if whited', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
whiteList: ['white'],
|
||||
});
|
||||
|
||||
limiterWrapped
|
||||
.consume('white', 3)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints === 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('resolve block if whited', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
whiteList: ['white'],
|
||||
});
|
||||
|
||||
limiterWrapped
|
||||
.block('white', 3)
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext === 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('resolve penalty if whited', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
whiteList: ['white'],
|
||||
});
|
||||
|
||||
limiterWrapped
|
||||
.penalty('white', 3)
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext === 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('resolve reward if whited', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
whiteList: ['white'],
|
||||
});
|
||||
|
||||
limiterWrapped
|
||||
.reward('white', 3)
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext === 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('resolve get if whited', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
whiteList: ['white'],
|
||||
});
|
||||
|
||||
limiterWrapped
|
||||
.get('white')
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints === Number.MAX_SAFE_INTEGER).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('consume resolved if in white and in black', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
whiteList: ['test'],
|
||||
blackList: ['test'],
|
||||
});
|
||||
limiterWrapped
|
||||
.consume('test')
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints === Number.MAX_SAFE_INTEGER).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('consume resolved if isWhiteListed func returns true', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
isWhiteListed: key => key === 'test',
|
||||
});
|
||||
limiterWrapped
|
||||
.consume('test')
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints === Number.MAX_SAFE_INTEGER).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('consume rejected if isBlackListed func returns true', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
isBlackListed: key => key === 'test',
|
||||
});
|
||||
limiterWrapped
|
||||
.consume('test')
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext === Number.MAX_SAFE_INTEGER).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('consume even if black listed when runAction set to true', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
isBlackListed: key => key === 'test',
|
||||
runActionAnyway: true,
|
||||
});
|
||||
limiterWrapped
|
||||
.consume('test')
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch(() => {
|
||||
limiterWrapped.get('test').then((res) => {
|
||||
expect(res.consumedPoints === 1).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('block even if black listed when runAction set to true', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
isBlackListed: key => key === 'test',
|
||||
runActionAnyway: true,
|
||||
});
|
||||
limiterWrapped
|
||||
.block('test', 30)
|
||||
.then(() => {
|
||||
limiterWrapped.get('test').then((res) => {
|
||||
expect(res.msBeforeNext > 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('penalty even if blacked when runAction set to true', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
isBlackListed: key => key === 'test',
|
||||
runActionAnyway: true,
|
||||
});
|
||||
limiterWrapped
|
||||
.penalty('test', 1)
|
||||
.then(() => {
|
||||
limiterWrapped.get('test').then((res) => {
|
||||
expect(res.consumedPoints === 1).to.equal(true);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('reward even if blacked when runAction set to true', (done) => {
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
isBlackListed: key => key === 'test',
|
||||
runActionAnyway: true,
|
||||
});
|
||||
limiterWrapped
|
||||
.reward('test', 1)
|
||||
.then(() => {
|
||||
limiterWrapped.get('test').then((res) => {
|
||||
expect(res.consumedPoints === -1).to.equal(true);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('delete data straight on limiter even if key is black or white listed', (done) => {
|
||||
const testKey = 'test';
|
||||
const limiter = new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
const limiterWrapped = new RLWrapperBlackAndWhite({
|
||||
limiter,
|
||||
isBlackListed: key => key === testKey,
|
||||
isWhiteListed: key => key === testKey,
|
||||
});
|
||||
limiter.consume(testKey)
|
||||
.then(() => {
|
||||
limiterWrapped.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
13
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterAbstract.test.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
const { describe, it } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const RateLimiterAbstract = require('../lib/RateLimiterAbstract');
|
||||
|
||||
describe('RateLimiterAbstract', function () {
|
||||
this.timeout(5000);
|
||||
|
||||
it('do not prefix key, if keyPrefix is empty string', () => {
|
||||
const testKey = 'test1';
|
||||
const rateLimiter = new RateLimiterAbstract({ keyPrefix: '' });
|
||||
expect(rateLimiter.getKey(testKey)).to.equal(testKey);
|
||||
});
|
||||
});
|
||||
221
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterCluster.test.js
generated
vendored
Normal file
@@ -0,0 +1,221 @@
|
||||
/* eslint-env mocha */
|
||||
/* eslint-disable no-unused-expressions */
|
||||
/* eslint-disable security/detect-object-injection */
|
||||
const cluster = require('cluster');
|
||||
const sinon = require('sinon');
|
||||
const { describe, it, after } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const { RateLimiterClusterMaster, RateLimiterCluster } = require('../lib/RateLimiterCluster');
|
||||
|
||||
const masterEvents = [];
|
||||
const workerEvents = [];
|
||||
|
||||
const worker = {
|
||||
send: (data) => {
|
||||
workerEvents.forEach((cb) => {
|
||||
cb(data);
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
global.process.on = (eventName, cb) => {
|
||||
if (eventName === 'message') {
|
||||
workerEvents.push(cb);
|
||||
}
|
||||
};
|
||||
global.process.send = (data) => {
|
||||
masterEvents.forEach((cb) => {
|
||||
cb(worker, data);
|
||||
});
|
||||
};
|
||||
|
||||
describe('RateLimiterCluster', function RateLimiterClusterTest() {
|
||||
let rateLimiterClusterMaster;
|
||||
let clusterStubOn;
|
||||
this.timeout(5000);
|
||||
|
||||
before(() => {
|
||||
clusterStubOn = sinon.stub(cluster, 'on').callsFake((eventName, cb) => {
|
||||
masterEvents.push(cb);
|
||||
});
|
||||
rateLimiterClusterMaster = new RateLimiterClusterMaster();
|
||||
});
|
||||
|
||||
after(() => {
|
||||
clusterStubOn.restore();
|
||||
});
|
||||
|
||||
it('master must be singleton', () => {
|
||||
const rateLimiterClusterMaster2 = new RateLimiterClusterMaster();
|
||||
expect(rateLimiterClusterMaster2 === rateLimiterClusterMaster).to.equal(true);
|
||||
});
|
||||
|
||||
it('consume 1 point', (done) => {
|
||||
const key = 'consume1';
|
||||
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix: key });
|
||||
rateLimiterCluster.consume(key)
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((rej) => {
|
||||
done(rej);
|
||||
});
|
||||
});
|
||||
|
||||
it('reject on consuming more than maximum points', (done) => {
|
||||
const key = 'reject';
|
||||
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix: key });
|
||||
rateLimiterCluster.consume(key, 3)
|
||||
.then(() => {
|
||||
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
expect(rejRes.remainingPoints).to.equal(0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
//
|
||||
it('execute evenly over duration', (done) => {
|
||||
const key = 'evenly';
|
||||
const rateLimiterCluster = new RateLimiterCluster({
|
||||
points: 2, duration: 5, execEvenly: true, keyPrefix: key,
|
||||
});
|
||||
rateLimiterCluster.consume(key)
|
||||
.then(() => {
|
||||
const timeFirstConsume = Date.now();
|
||||
rateLimiterCluster.consume(key)
|
||||
.then(() => {
|
||||
/* Second consume should be delayed more than 2 seconds
|
||||
Explanation:
|
||||
1) consume at 0ms, remaining duration = 4444ms
|
||||
2) delayed consume for (4444 / (0 + 2)) ~= 2222ms, where 2 is a fixed value
|
||||
, because it mustn't delay in the beginning and in the end of duration
|
||||
3) consume after 2222ms by timeout
|
||||
*/
|
||||
expect((Date.now() - timeFirstConsume) > 2000).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('use keyPrefix from options', (done) => {
|
||||
const key = 'use keyPrefix from options';
|
||||
|
||||
const keyPrefix = 'test';
|
||||
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix });
|
||||
rateLimiterCluster.consume(key)
|
||||
.then(() => {
|
||||
expect(typeof rateLimiterClusterMaster._rateLimiters[keyPrefix]._memoryStorage._storage[`${keyPrefix}:${key}`]
|
||||
!== 'undefined').to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
});
|
||||
|
||||
it('create 2 rate limiters depending on keyPrefix', (done) => {
|
||||
const keyPrefixes = ['create1', 'create2'];
|
||||
const rateLimiterClusterprocess1 = new RateLimiterCluster({ keyPrefix: keyPrefixes[0] });
|
||||
const rateLimiterClusterprocess2 = new RateLimiterCluster({ keyPrefix: keyPrefixes[1] });
|
||||
rateLimiterClusterprocess1.consume('key1')
|
||||
.then(() => {
|
||||
rateLimiterClusterprocess2.consume('key2')
|
||||
.then(() => {
|
||||
const createdKeyLimiters = Object.keys(rateLimiterClusterMaster._rateLimiters);
|
||||
expect(createdKeyLimiters.indexOf(keyPrefixes[0]) !== -1 && createdKeyLimiters.indexOf(keyPrefixes[0]) !== -1).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('penalty', (done) => {
|
||||
const key = 'penalty';
|
||||
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix: key });
|
||||
rateLimiterCluster.penalty(key)
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints).to.equal(1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('reward', (done) => {
|
||||
const key = 'reward';
|
||||
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix: key });
|
||||
rateLimiterCluster.consume(key)
|
||||
.then(() => {
|
||||
rateLimiterCluster.reward(key)
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints).to.equal(2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('block', (done) => {
|
||||
const key = 'block';
|
||||
const rateLimiterCluster = new RateLimiterCluster({ points: 1, duration: 1, keyPrefix: key });
|
||||
rateLimiterCluster.block(key, 2)
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext > 1000 && res.msBeforeNext <= 2000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('get', (done) => {
|
||||
const key = 'get';
|
||||
const rateLimiterCluster = new RateLimiterCluster({ points: 1, duration: 1, keyPrefix: key });
|
||||
rateLimiterCluster.consume(key)
|
||||
.then(() => {
|
||||
rateLimiterCluster.get(key)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('get null', (done) => {
|
||||
const key = 'getnull';
|
||||
const rateLimiterCluster = new RateLimiterCluster({ points: 1, duration: 1, keyPrefix: key });
|
||||
rateLimiterCluster.get(key)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('delete', (done) => {
|
||||
const key = 'deletetrue';
|
||||
const rateLimiterCluster = new RateLimiterCluster({ points: 1, duration: 10, keyPrefix: key });
|
||||
rateLimiterCluster.consume(key)
|
||||
.then(() => {
|
||||
rateLimiterCluster.delete(key)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('consume applies options.customDuration to set expire', (done) => {
|
||||
const key = 'consume.customDuration';
|
||||
const rateLimiterCluster = new RateLimiterCluster({ points: 2, duration: 5, keyPrefix: key });
|
||||
rateLimiterCluster.consume(key, 1, { customDuration: 1 })
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext <= 1000).to.be.true;
|
||||
done();
|
||||
})
|
||||
.catch((rej) => {
|
||||
done(rej);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
532
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterMemcache.test.js
generated
vendored
Normal file
@@ -0,0 +1,532 @@
|
||||
const { describe, it, beforeEach } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const RateLimiterMemcache = require('../lib/RateLimiterMemcache');
|
||||
const Memcached = require('memcached-mock');
|
||||
|
||||
describe('RateLimiterMemcache', function RateLimiterMemcacheTest() {
|
||||
this.timeout(5000);
|
||||
const memcacheMockClient = new Memcached('localhost:11211');
|
||||
|
||||
const memcacheUnavailableClient = new Proxy({}, {
|
||||
get: () => (...args) => {
|
||||
const cb = args.pop();
|
||||
cb(Error('Server Unavailable'));
|
||||
},
|
||||
});
|
||||
|
||||
beforeEach((done) => {
|
||||
memcacheMockClient.flush(done);
|
||||
});
|
||||
|
||||
it('consume 1 point', (done) => {
|
||||
const testKey = 'consume1';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 2,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
memcacheMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
|
||||
if (!err) {
|
||||
expect(consumedPoints).to.equal(1);
|
||||
done();
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('rejected when consume more than maximum points', (done) => {
|
||||
const testKey = 'consume2';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 1,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
expect(rejRes.msBeforeNext >= 0).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('execute evenly over duration', (done) => {
|
||||
const testKey = 'consumeEvenly';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 2,
|
||||
duration: 5,
|
||||
execEvenly: true,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
const timeFirstConsume = Date.now();
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
/* Second consume should be delayed more than 2 seconds
|
||||
Explanation:
|
||||
1) consume at 0ms, remaining duration = 5000ms
|
||||
2) delayed consume for (4999 / (0 + 2)) ~= 2500ms, where 2 is a fixed value
|
||||
, because it mustn't delay in the beginning and in the end of duration
|
||||
3) consume after 2500ms by timeout
|
||||
*/
|
||||
const diff = Date.now() - timeFirstConsume;
|
||||
expect(diff > 2400 && diff < 2600).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('execute evenly over duration with minimum delay 20 ms', (done) => {
|
||||
const testKey = 'consumeEvenlyMinDelay';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 100,
|
||||
duration: 1,
|
||||
execEvenly: true,
|
||||
execEvenlyMinDelayMs: 20,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
const timeFirstConsume = Date.now();
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
expect(Date.now() - timeFirstConsume >= 20).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('makes penalty', (done) => {
|
||||
const testKey = 'penalty1';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 3,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.penalty(testKey)
|
||||
.then(() => {
|
||||
memcacheMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
|
||||
if (!err) {
|
||||
expect(consumedPoints).to.equal(2);
|
||||
done();
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('reward points', (done) => {
|
||||
const testKey = 'reward';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 1,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.reward(testKey)
|
||||
.then(() => {
|
||||
memcacheMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
|
||||
if (!err) {
|
||||
expect(consumedPoints).to.equal(0);
|
||||
done();
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('block key in memory when inMemory block options set up', (done) => {
|
||||
const testKey = 'blockmem';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 1,
|
||||
duration: 5,
|
||||
inmemoryBlockOnConsumed: 2, // @deprecated Kept to test backward compatability
|
||||
inmemoryBlockDuration: 10, // @deprecated Kept to test backward compatability
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
// msBeforeNext more than 5000, so key was blocked
|
||||
expect(rejRes.msBeforeNext > 5000 && rejRes.remainingPoints === 0).to.equal(true);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
});
|
||||
|
||||
it('expire inMemory blocked key', (done) => {
|
||||
const testKey = 'blockmem2';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
inMemoryBlockOnConsumed: 2,
|
||||
inMemoryBlockDuration: 2,
|
||||
});
|
||||
// It blocks on the first consume as consumed points more than available
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {
|
||||
})
|
||||
.catch(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then((res) => {
|
||||
// Block expired
|
||||
expect(res.msBeforeNext <= 1000 && res.remainingPoints === 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
}, 2001);
|
||||
});
|
||||
});
|
||||
|
||||
it('throws error when inMemoryBlockOnConsumed is not set, but inMemoryBlockDuration is set', (done) => {
|
||||
try {
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
inMemoryBlockDuration: 2,
|
||||
});
|
||||
rateLimiter.reward('test');
|
||||
} catch (err) {
|
||||
expect(err instanceof Error).to.equal(true);
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('throws error when inMemoryBlockOnConsumed less than points', (done) => {
|
||||
try {
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 2,
|
||||
inMemoryBlockOnConsumed: 1,
|
||||
});
|
||||
rateLimiter.reward('test');
|
||||
} catch (err) {
|
||||
expect(err instanceof Error).to.equal(true);
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('use keyPrefix from options', () => {
|
||||
const testKey = 'key';
|
||||
const keyPrefix = 'test';
|
||||
const rateLimiter = new RateLimiterMemcache({ keyPrefix, storeClient: memcacheMockClient });
|
||||
|
||||
expect(rateLimiter.getKey(testKey)).to.equal('test:key');
|
||||
});
|
||||
|
||||
it('blocks key for block duration when consumed more than points', (done) => {
|
||||
const testKey = 'block';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
blockDuration: 2,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext > 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('block expires in blockDuration seconds', (done) => {
|
||||
const testKey = 'blockexpires';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
blockDuration: 2,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must resolve'));
|
||||
});
|
||||
}, 2000);
|
||||
});
|
||||
});
|
||||
|
||||
it('block custom key', (done) => {
|
||||
const testKey = 'blockcustom';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
rateLimiter.block(testKey, 2).then(() => {
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext > 1000 && rej.msBeforeNext <= 2000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('get points', (done) => {
|
||||
const testKey = 'get';
|
||||
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('get must not reject'));
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('consume must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('get returns NULL if key is not set', (done) => {
|
||||
const testKey = 'getnull';
|
||||
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
});
|
||||
rateLimiter
|
||||
.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('get must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('delete key and return true', (done) => {
|
||||
const testKey = 'deletetrue';
|
||||
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 2,
|
||||
duration: 10,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(true);
|
||||
rateLimiter
|
||||
.get(testKey)
|
||||
.then((resGet) => {
|
||||
expect(resGet).to.equal(null);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('delete returns false, if there is no key', (done) => {
|
||||
const testKey = 'deletefalse';
|
||||
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 2,
|
||||
duration: 10,
|
||||
});
|
||||
rateLimiter
|
||||
.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(false);
|
||||
rateLimiter
|
||||
.get(testKey)
|
||||
.then((resGet) => {
|
||||
expect(resGet).to.equal(null);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('creates key and increment on 2 parallel requests', () => {
|
||||
const testKey = 'parallel';
|
||||
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheMockClient,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
});
|
||||
|
||||
return Promise.all([
|
||||
rateLimiter.consume(testKey),
|
||||
rateLimiter.consume(testKey),
|
||||
]).then((resAll) => {
|
||||
expect(resAll[0].consumedPoints === 1 && resAll[1].consumedPoints === 2).to.equal(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('rejected when MemcachedClient error', (done) => {
|
||||
const testKey = 'memcacheerror';
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheUnavailableClient,
|
||||
points: 1,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {
|
||||
expect.fail('should not be resolved');
|
||||
})
|
||||
.catch(() => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('consume using insuranceLimiter when MemcachedClient error', (done) => {
|
||||
const testKey = 'memcacheerror2';
|
||||
|
||||
const rateLimiter = new RateLimiterMemcache({
|
||||
storeClient: memcacheUnavailableClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
insuranceLimiter: new RateLimiterMemcache({
|
||||
points: 2,
|
||||
duration: 2,
|
||||
storeClient: memcacheMockClient,
|
||||
}),
|
||||
});
|
||||
|
||||
// Consume from insurance limiter with different options
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints === 1 && res.msBeforeNext > 1000).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
});
|
||||
|
||||
it('does not expire key if duration set to 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const rateLimiter = new RateLimiterMemcache({ storeClient: memcacheMockClient, points: 2, duration: 0 });
|
||||
rateLimiter.consume(testKey, 1)
|
||||
.then(() => {
|
||||
rateLimiter.consume(testKey, 1)
|
||||
.then(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('block key forever, if secDuration is 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const rateLimiter = new RateLimiterMemcache({ storeClient: memcacheMockClient, points: 1, duration: 1 });
|
||||
rateLimiter.block(testKey, 0)
|
||||
.then(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
done();
|
||||
});
|
||||
}, 1000);
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
391
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterMemory.test.js
generated
vendored
Normal file
@@ -0,0 +1,391 @@
|
||||
/* eslint-disable no-unused-expressions */
|
||||
const { describe, it } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const RateLimiterMemory = require('../lib/RateLimiterMemory');
|
||||
|
||||
describe('RateLimiterMemory with fixed window', function RateLimiterMemoryTest() {
|
||||
this.timeout(5000);
|
||||
|
||||
it('consume 1 point', (done) => {
|
||||
const testKey = 'consume1';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 5 });
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
const res = rateLimiterMemory._memoryStorage.get(rateLimiterMemory.getKey(testKey));
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('can not consume more than maximum points', (done) => {
|
||||
const testKey = 'consume2';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
|
||||
rateLimiterMemory.consume(testKey, 2)
|
||||
.then(() => {})
|
||||
.catch((rejRes) => {
|
||||
expect(rejRes.msBeforeNext >= 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('execute evenly over duration with minimum delay 20 ms', (done) => {
|
||||
const testKey = 'consumeEvenlyMinDelay';
|
||||
const rateLimiterMemory = new RateLimiterMemory({
|
||||
points: 100, duration: 1, execEvenly: true, execEvenlyMinDelayMs: 20,
|
||||
});
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
const timeFirstConsume = Date.now();
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
expect(Date.now() - timeFirstConsume >= 20).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('execute evenly over duration', (done) => {
|
||||
const testKey = 'consumeEvenly';
|
||||
const rateLimiterMemory = new RateLimiterMemory({
|
||||
points: 2, duration: 5, execEvenly: true, execEvenlyMinDelayMs: 1,
|
||||
});
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
const timeFirstConsume = Date.now();
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
/* Second consume should be delayed more than 2 seconds
|
||||
Explanation:
|
||||
1) consume at 0ms, remaining duration = 5000ms
|
||||
2) delayed consume for (4999 / (0 + 2)) ~= 2500ms, where 2 is a fixed value
|
||||
, because it mustn't delay in the beginning and in the end of duration
|
||||
3) consume after 2500ms by timeout
|
||||
*/
|
||||
const diff = Date.now() - timeFirstConsume;
|
||||
expect(diff > 2400 && diff < 2600).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('makes penalty', (done) => {
|
||||
const testKey = 'penalty1';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 3, duration: 5 });
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiterMemory.penalty(testKey)
|
||||
.then(() => {
|
||||
const res = rateLimiterMemory._memoryStorage.get(rateLimiterMemory.getKey(testKey));
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('reward points', (done) => {
|
||||
const testKey = 'reward1';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiterMemory.reward(testKey)
|
||||
.then(() => {
|
||||
const res = rateLimiterMemory._memoryStorage.get(rateLimiterMemory.getKey(testKey));
|
||||
expect(res.consumedPoints).to.equal(0);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('use keyPrefix from options', () => {
|
||||
const testKey = 'key';
|
||||
const keyPrefix = 'test';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ keyPrefix, points: 1, duration: 5 });
|
||||
|
||||
expect(rateLimiterMemory.getKey(testKey)).to.equal('test:key');
|
||||
});
|
||||
|
||||
it('blocks key for block duration when consumed more than points', (done) => {
|
||||
const testKey = 'block';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 1, blockDuration: 2 });
|
||||
rateLimiterMemory.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext > 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('do not block key second time until block expires no matter how many points consumed', (done) => {
|
||||
const testKey = 'donotblocktwice';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 1, blockDuration: 2 });
|
||||
rateLimiterMemory.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext < 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
}, 1001);
|
||||
});
|
||||
});
|
||||
|
||||
it('block expires in blockDuration seconds', (done) => {
|
||||
const testKey = 'blockexpires';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 1, blockDuration: 2 });
|
||||
rateLimiterMemory.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must resolve'));
|
||||
});
|
||||
}, 2000);
|
||||
});
|
||||
});
|
||||
|
||||
it('block custom key', (done) => {
|
||||
const testKey = 'blockcustom';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 1 });
|
||||
rateLimiterMemory.block(testKey, 2);
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext > 1000 && rej.msBeforeNext <= 2000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('get by key', (done) => {
|
||||
const testKey = 'get';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 5 });
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiterMemory.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints).to.equal(1);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('get resolves null if key is not set', (done) => {
|
||||
const testKey = 'getbynotexistingkey';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 5 });
|
||||
rateLimiterMemory.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('delete resolves true if key is set', (done) => {
|
||||
const testKey = 'deletekey';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 5 });
|
||||
rateLimiterMemory.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiterMemory.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(true);
|
||||
done();
|
||||
}).catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('delete resolves false if key is not set', (done) => {
|
||||
const testKey = 'deletekey2';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 5 });
|
||||
rateLimiterMemory.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(false);
|
||||
done();
|
||||
}).catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('consume applies options.customDuration to set expire', (done) => {
|
||||
const testKey = 'options.customDuration';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
|
||||
rateLimiterMemory.consume(testKey, 1, { customDuration: 1 })
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext <= 1000).to.be.true;
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('consume applies options.customDuration to set not expiring key', (done) => {
|
||||
const testKey = 'options.customDuration.forever';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
|
||||
rateLimiterMemory.consume(testKey, 1, { customDuration: 0 })
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext === -1).to.be.true;
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('penalty applies options.customDuration to set expire', (done) => {
|
||||
const testKey = 'options.customDuration';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
|
||||
rateLimiterMemory.penalty(testKey, 1, { customDuration: 1 })
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext <= 1000).to.be.true;
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('reward applies options.customDuration to set expire', (done) => {
|
||||
const testKey = 'options.customDuration';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 1, duration: 5 });
|
||||
rateLimiterMemory.reward(testKey, 1, { customDuration: 1 })
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext <= 1000).to.be.true;
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('does not expire key if duration set to 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const rateLimiterMemory = new RateLimiterMemory({ points: 2, duration: 0 });
|
||||
rateLimiterMemory.consume(testKey, 1)
|
||||
.then(() => {
|
||||
rateLimiterMemory.consume(testKey, 1)
|
||||
.then(() => {
|
||||
rateLimiterMemory.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('block key forever, if secDuration is 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const rateLimiter = new RateLimiterMemory({ points: 1, duration: 1 });
|
||||
rateLimiter.block(testKey, 0)
|
||||
.then(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
done();
|
||||
});
|
||||
}, 1000);
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('set points by key', (done) => {
|
||||
const testKey = 'set';
|
||||
const rateLimiter = new RateLimiterMemory({ points: 10, duration: 1 });
|
||||
rateLimiter.set(testKey, 12)
|
||||
.then(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(12);
|
||||
expect(res.remainingPoints).to.equal(0);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('set points by key forever', (done) => {
|
||||
const testKey = 'setforever';
|
||||
const rateLimiter = new RateLimiterMemory({ points: 10, duration: 1 });
|
||||
rateLimiter.set(testKey, 12, 0)
|
||||
.then(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(12);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
done();
|
||||
});
|
||||
}, 1100);
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
675
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterMongo.test.js
generated
vendored
Normal file
@@ -0,0 +1,675 @@
|
||||
/* eslint-disable no-new */
|
||||
const {
|
||||
describe, it, beforeEach, before,
|
||||
} = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const sinon = require('sinon');
|
||||
const RateLimiterMongo = require('../lib/RateLimiterMongo');
|
||||
const RateLimiterMemory = require('../lib/RateLimiterMemory');
|
||||
|
||||
describe('RateLimiterMongo with fixed window', function RateLimiterMongoTest() {
|
||||
this.timeout(5000);
|
||||
let mongoClient;
|
||||
let mongoClientV4;
|
||||
let mongoClientStub;
|
||||
let mongoDb;
|
||||
let mongoCollection;
|
||||
let stubMongoDbCollection;
|
||||
|
||||
before(() => {
|
||||
mongoClient = {
|
||||
db: () => {},
|
||||
topology: {},
|
||||
};
|
||||
|
||||
mongoClientV4 = {
|
||||
collection: () => {},
|
||||
client: {},
|
||||
};
|
||||
|
||||
mongoDb = {
|
||||
collection: () => {},
|
||||
};
|
||||
|
||||
stubMongoDbCollection = sinon.stub(mongoDb, 'collection').callsFake(() => mongoCollection);
|
||||
mongoClientStub = sinon.stub(mongoClient, 'db').callsFake(() => mongoDb);
|
||||
sinon.stub(mongoClientV4, 'collection').callsFake(() => mongoCollection);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
mongoCollection = {
|
||||
createIndex: () => {},
|
||||
findOneAndUpdate: () => {},
|
||||
findOne: () => {},
|
||||
deleteOne: () => {},
|
||||
};
|
||||
sinon.stub(mongoCollection, 'createIndex').callsFake(() => {});
|
||||
});
|
||||
|
||||
it('throws error if storeClient not set', (done) => {
|
||||
try {
|
||||
new RateLimiterMongo({ points: 2, duration: 5 });
|
||||
} catch (err) {
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('consume 1 point', (done) => {
|
||||
const testKey = 'consume1';
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
|
||||
const res = {
|
||||
value: {
|
||||
points: 1,
|
||||
expire: 5000,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
|
||||
rateLimiter.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('rejected when consume more than maximum points', (done) => {
|
||||
const testKey = 'consumerej';
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
|
||||
const res = {
|
||||
value: {
|
||||
points: 2,
|
||||
expire: 5000,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 1, duration: 5 });
|
||||
rateLimiter.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('have to reject'));
|
||||
})
|
||||
.catch((err) => {
|
||||
expect(err.consumedPoints).to.equal(2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('makes penalty', (done) => {
|
||||
const testKey = 'penalty1';
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
|
||||
const res = {
|
||||
value: {
|
||||
points: 1,
|
||||
expire: 5000,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
|
||||
rateLimiter.penalty(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('reward points', (done) => {
|
||||
const testKey = 'reward1';
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
|
||||
const res = {
|
||||
value: {
|
||||
points: -1,
|
||||
expire: 5000,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
|
||||
rateLimiter.reward(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(-1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('consume using insuranceLimiter when Mongo error', (done) => {
|
||||
const testKey = 'errorinsurance';
|
||||
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => Promise.reject(Error('Mongo error')));
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({
|
||||
storeClient: mongoClient,
|
||||
insuranceLimiter: new RateLimiterMemory({
|
||||
points: 2,
|
||||
duration: 2,
|
||||
}),
|
||||
});
|
||||
rateLimiter.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('block key in memory when inMemory block options set up', (done) => {
|
||||
const testKey = 'blockmem';
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
|
||||
const res = {
|
||||
value: {
|
||||
points: 11,
|
||||
expire: 5000,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({
|
||||
storeClient: mongoClient,
|
||||
points: 2,
|
||||
duration: 5,
|
||||
inMemoryBlockOnConsumed: 10,
|
||||
inMemoryBlockDuration: 10,
|
||||
});
|
||||
rateLimiter.consume(testKey)
|
||||
.then(() => {
|
||||
done(Error('have to reject'));
|
||||
})
|
||||
.catch(() => {
|
||||
expect(rateLimiter._inMemoryBlockedKeys.msBeforeExpire(rateLimiter.getKey(testKey)) > 0).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('blocks key for block duration when consumed more than points', (done) => {
|
||||
const testKey = 'block';
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
|
||||
const res = {
|
||||
value: {
|
||||
points: 2,
|
||||
expire: 1000,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({
|
||||
storeClient: mongoClient, points: 1, duration: 1, blockDuration: 2,
|
||||
});
|
||||
rateLimiter.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext > 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('block using insuranceLimiter when Mongo error', (done) => {
|
||||
const testKey = 'mongoerrorblock';
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => Promise.reject(Error('Mongo error')));
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({
|
||||
storeClient: mongoClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
blockDuration: 2,
|
||||
insuranceLimiter: new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
}),
|
||||
});
|
||||
rateLimiter.block(testKey, 2)
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext > 1000 && res.msBeforeNext <= 2000).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('return correct data with _getRateLimiterRes', () => {
|
||||
const rateLimiter = new RateLimiterMongo({ points: 5, storeClient: mongoClient });
|
||||
|
||||
const res = rateLimiter._getRateLimiterRes('test', 1, {
|
||||
value: {
|
||||
points: 3,
|
||||
expire: new Date(Date.now() + 1000).toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
expect(res.msBeforeNext <= 1000
|
||||
&& res.consumedPoints === 3
|
||||
&& res.isFirstInDuration === false
|
||||
&& res.remainingPoints === 2).to.equal(true);
|
||||
});
|
||||
|
||||
it('get points', (done) => {
|
||||
const testKey = 'get';
|
||||
|
||||
sinon.stub(mongoCollection, 'findOne').callsFake(() => {
|
||||
const res = {
|
||||
value: {
|
||||
points: 1,
|
||||
expire: 1000,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({
|
||||
storeClient: mongoClient, points: 1, duration: 1,
|
||||
});
|
||||
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('get points return NULL if key is not set', (done) => {
|
||||
const testKey = 'getnull';
|
||||
|
||||
sinon.stub(mongoCollection, 'findOne').callsFake(() => {
|
||||
const res = null;
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({
|
||||
storeClient: mongoClient, points: 1, duration: 1,
|
||||
});
|
||||
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('get points return NULL if key is not set and store returns undefined', (done) => {
|
||||
const testKey = 'getnull';
|
||||
|
||||
sinon.stub(mongoCollection, 'findOne').callsFake(() => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({
|
||||
storeClient: mongoClient, points: 1, duration: 1,
|
||||
});
|
||||
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('use dbName from options if db is function', () => {
|
||||
mongoClientStub.restore();
|
||||
mongoClientStub = sinon.stub(mongoClient, 'db').callsFake((dbName) => {
|
||||
expect(dbName).to.equal('test');
|
||||
return mongoDb;
|
||||
});
|
||||
|
||||
new RateLimiterMongo({
|
||||
storeClient: mongoClient, dbName: 'test',
|
||||
});
|
||||
|
||||
mongoClientStub.restore();
|
||||
mongoClientStub = sinon.stub(mongoClient, 'db').callsFake(() => mongoDb);
|
||||
});
|
||||
|
||||
it('use collection from client instead of db if Mongoose in use', () => {
|
||||
const createIndex = sinon.spy();
|
||||
const mongooseConnection = {
|
||||
collection: () => ({
|
||||
createIndex,
|
||||
}),
|
||||
};
|
||||
|
||||
new RateLimiterMongo({
|
||||
storeClient: mongooseConnection,
|
||||
});
|
||||
expect(createIndex.called);
|
||||
});
|
||||
|
||||
it('delete key and return true', (done) => {
|
||||
const testKey = 'deletetrue';
|
||||
sinon.stub(mongoCollection, 'deleteOne').callsFake(() => Promise.resolve({
|
||||
deletedCount: 1,
|
||||
}));
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({
|
||||
storeClient: mongoClient, points: 1, duration: 1, blockDuration: 2,
|
||||
});
|
||||
|
||||
rateLimiter.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('delete returns false, if there is no key', (done) => {
|
||||
const testKey = 'deletefalse';
|
||||
sinon.stub(mongoCollection, 'deleteOne').callsFake(() => Promise.resolve({
|
||||
result: {
|
||||
n: 0,
|
||||
},
|
||||
}));
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({
|
||||
storeClient: mongoClient, points: 1, duration: 1, blockDuration: 2,
|
||||
});
|
||||
|
||||
rateLimiter.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('uses tableName option to create collection', (done) => {
|
||||
const tableName = 'collection_name';
|
||||
|
||||
stubMongoDbCollection.restore();
|
||||
stubMongoDbCollection = sinon.stub(mongoDb, 'collection').callsFake((name) => {
|
||||
expect(name).to.equal(tableName);
|
||||
stubMongoDbCollection.restore();
|
||||
stubMongoDbCollection = sinon.stub(mongoDb, 'collection').callsFake(() => mongoCollection);
|
||||
done();
|
||||
return mongoCollection;
|
||||
});
|
||||
|
||||
const client = {
|
||||
db: () => mongoDb,
|
||||
};
|
||||
|
||||
new RateLimiterMongo({
|
||||
storeClient: client,
|
||||
tableName,
|
||||
});
|
||||
});
|
||||
|
||||
it('_upsert adds options.attrs to where clause to find document by additional attributes in conjunction with key', (done) => {
|
||||
const testKey = '_upsert';
|
||||
const testAttrs = {
|
||||
country: 'country1',
|
||||
};
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake((where) => {
|
||||
expect(where.country).to.equal(testAttrs.country);
|
||||
done();
|
||||
return Promise.resolve({
|
||||
value: {
|
||||
points: 1,
|
||||
expire: 5000,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
|
||||
rateLimiter.consume(testKey, 1, { attrs: testAttrs })
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('forced _upsert adds options.attrs to where clause to find document by additional attributes in conjunction with key', (done) => {
|
||||
const testKey = '_upsertforce';
|
||||
const testAttrs = {
|
||||
country: 'country2',
|
||||
};
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake((where) => {
|
||||
expect(where.country).to.equal(testAttrs.country);
|
||||
done();
|
||||
return Promise.resolve({
|
||||
value: {
|
||||
points: 1,
|
||||
expire: 5000,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
|
||||
rateLimiter.block(testKey, 1, { attrs: testAttrs })
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('_get adds options.attrs to where clause to find document by additional attributes in conjunction with key', (done) => {
|
||||
const testKey = '_get';
|
||||
const testAttrs = {
|
||||
country: 'country3',
|
||||
};
|
||||
sinon.stub(mongoCollection, 'findOne').callsFake((where) => {
|
||||
expect(where.country).to.equal(testAttrs.country);
|
||||
done();
|
||||
return Promise.resolve({
|
||||
value: {
|
||||
points: 1,
|
||||
expire: 5000,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
|
||||
rateLimiter.get(testKey, { attrs: testAttrs });
|
||||
});
|
||||
|
||||
it('_delete adds options.attrs to where clause to find document by additional attributes in conjunction with key', (done) => {
|
||||
const testKey = '_delete';
|
||||
const testAttrs = {
|
||||
country: 'country4',
|
||||
};
|
||||
sinon.stub(mongoCollection, 'deleteOne').callsFake((where) => {
|
||||
expect(where.country).to.equal(testAttrs.country);
|
||||
done();
|
||||
return Promise.resolve({
|
||||
result: {
|
||||
n: 0,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
|
||||
rateLimiter.delete(testKey, { attrs: testAttrs });
|
||||
});
|
||||
|
||||
it('set indexKeyPrefix empty {} if not provided', () => {
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
|
||||
expect(Object.keys(rateLimiter.indexKeyPrefix).length).to.equal(0);
|
||||
});
|
||||
|
||||
it('does not expire key if duration set to 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const stubFindOneAndUpdate = sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
|
||||
const res = {
|
||||
value: {
|
||||
points: 1,
|
||||
expire: null,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 0 });
|
||||
rateLimiter.consume(testKey, 1)
|
||||
.then(() => {
|
||||
stubFindOneAndUpdate.restore();
|
||||
const stubFindOneAndUpdate2 = sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
|
||||
const res = {
|
||||
value: {
|
||||
points: 2,
|
||||
expire: null,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
rateLimiter.consume(testKey, 1)
|
||||
.then(() => {
|
||||
stubFindOneAndUpdate2.restore();
|
||||
const stubFindOne = sinon.stub(mongoCollection, 'findOne').callsFake(() => Promise.resolve({
|
||||
value: {
|
||||
points: 2,
|
||||
expire: null,
|
||||
},
|
||||
}));
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
stubFindOne.restore();
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('block key forever, if secDuration is 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const stubFindOneAndUpdate = sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake(() => {
|
||||
const res = {
|
||||
value: {
|
||||
points: 2,
|
||||
expire: null,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 1, duration: 1 });
|
||||
rateLimiter.block(testKey, 0)
|
||||
.then(() => {
|
||||
setTimeout(() => {
|
||||
stubFindOneAndUpdate.restore();
|
||||
const stubFindOne = sinon.stub(mongoCollection, 'findOne').callsFake(() => Promise.resolve({
|
||||
value: {
|
||||
points: 2,
|
||||
expire: null,
|
||||
},
|
||||
}));
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
stubFindOne.restore();
|
||||
done();
|
||||
});
|
||||
}, 1000);
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('consume 1 point (driver v3)', (done) => {
|
||||
const testKey = 'consume1v3';
|
||||
sinon.stub(mongoClient, 'topology').value({ s: { options: { metadata: { driver: { version: '3.6' } } } } });
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake((where, upsertData, upsertOptions) => {
|
||||
expect(upsertOptions.returnOriginal).to.equal(false);
|
||||
|
||||
const res = {
|
||||
value: {
|
||||
points: 1,
|
||||
expire: 5000,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
|
||||
rateLimiter.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('consume 1 point (driver v4)', (done) => {
|
||||
const testKey = 'consume1v4';
|
||||
sinon.stub(mongoClient, 'topology').value({ s: { options: { metadata: { driver: { version: '4.0' } } } } });
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake((where, upsertData, upsertOptions) => {
|
||||
expect(upsertOptions.returnDocument).to.equal('after');
|
||||
|
||||
const res = {
|
||||
value: {
|
||||
points: 1,
|
||||
expire: 5000,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClient, points: 2, duration: 5 });
|
||||
rateLimiter.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('consume 1 point (driver v4.1.3)', (done) => {
|
||||
const testKey = 'consume1v4.1.3';
|
||||
sinon.stub(mongoClientV4, 'client').value({ topology: { s: { options: { metadata: { driver: { version: '4.1.3' } } } } } });
|
||||
sinon.stub(mongoCollection, 'findOneAndUpdate').callsFake((where, upsertData, upsertOptions) => {
|
||||
expect(upsertOptions.returnDocument).to.equal('after');
|
||||
|
||||
const res = {
|
||||
value: {
|
||||
points: 1,
|
||||
expire: 5000,
|
||||
},
|
||||
};
|
||||
return Promise.resolve(res);
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMongo({ storeClient: mongoClientV4, points: 2, duration: 5 });
|
||||
rateLimiter.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
416
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterMySQL.test.js
generated
vendored
Normal file
@@ -0,0 +1,416 @@
|
||||
const {
|
||||
describe, it, beforeEach, afterEach,
|
||||
} = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const sinon = require('sinon');
|
||||
const RateLimiterMySQL = require('../lib/RateLimiterMySQL');
|
||||
|
||||
describe('RateLimiterMySQL with fixed window', function RateLimiterMySQLTest() {
|
||||
this.timeout(5000);
|
||||
const mysqlClient = {
|
||||
query: () => {},
|
||||
};
|
||||
|
||||
let mysqlClientStub;
|
||||
|
||||
beforeEach(() => {
|
||||
mysqlClientStub = sinon.stub(mysqlClient, 'query').callsFake((q, cb) => {
|
||||
cb();
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mysqlClientStub.restore();
|
||||
});
|
||||
|
||||
it('call back with error if can not create db or table', (done) => {
|
||||
mysqlClientStub.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, cb) => {
|
||||
cb(Error('test'));
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMySQL({ // eslint-disable-line
|
||||
storeClient: mysqlClient, storeType: 'connection', points: 2, duration: 5,
|
||||
}, (e) => {
|
||||
expect(e instanceof Error).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('get connection from pool', (done) => {
|
||||
const poolConnectionReleaseFn = sinon.spy();
|
||||
const mysqlPoolClient = {
|
||||
query: (q, data, cb) => {
|
||||
const res = [
|
||||
{ points: 1, expire: 1000 },
|
||||
];
|
||||
cb(null, res);
|
||||
},
|
||||
getConnection: (cb) => {cb(null, mysqlPoolClient)},
|
||||
release: poolConnectionReleaseFn,
|
||||
};
|
||||
|
||||
let rateLimiter
|
||||
|
||||
process.on('unhandledRejection', (err) => console.error(err))
|
||||
|
||||
new Promise((resolve) => {
|
||||
rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlPoolClient, storeType: 'pool', points: 2, duration: 5, tableCreated: true,
|
||||
}, () => {
|
||||
resolve();
|
||||
});
|
||||
}).then(() => {
|
||||
rateLimiter.get('testPool')
|
||||
.then((rlRes) => {
|
||||
expect(poolConnectionReleaseFn.calledOnce).to.equal(true);
|
||||
expect(rlRes.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
}).catch((err) => {
|
||||
done(err);
|
||||
})
|
||||
});
|
||||
|
||||
it('do not create a table if tableCreated option is true', (done) => {
|
||||
const mysqlClientTableCreated = {
|
||||
query: () => {},
|
||||
};
|
||||
sinon.spy(mysqlClientTableCreated, 'query');
|
||||
const rateLimiter = new RateLimiterMySQL({ // eslint-disable-line
|
||||
storeClient: mysqlClientTableCreated, storeType: 'connection', tableCreated: true,
|
||||
});
|
||||
setTimeout(() => {
|
||||
expect(mysqlClientTableCreated.query.callCount).to.equal(0);
|
||||
done();
|
||||
}, 1000);
|
||||
});
|
||||
|
||||
it('callback called even if tableCreated option is true', (done) => {
|
||||
mysqlClientStub.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, cb) => {
|
||||
cb();
|
||||
});
|
||||
|
||||
const rateLimiter = new RateLimiterMySQL({ // eslint-disable-line
|
||||
storeClient: mysqlClient, storeType: 'connection', tableCreated: true,
|
||||
}, () => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('consume 1 point', (done) => {
|
||||
const testKey = 'consume1';
|
||||
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient, storeType: 'connection', points: 2, duration: 5,
|
||||
}, () => {
|
||||
mysqlClientStub.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
const res = [
|
||||
{ points: 1, expire: 5000 },
|
||||
];
|
||||
if (Array.isArray(data)) {
|
||||
cb(null, res);
|
||||
} else {
|
||||
data(null);
|
||||
}
|
||||
});
|
||||
|
||||
rateLimiter.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('rejected when consume more than maximum points', (done) => {
|
||||
const testKey = 'consumerej';
|
||||
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 5,
|
||||
}, () => {
|
||||
mysqlClientStub.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
const res = [
|
||||
{ points: 2, expire: 5000 },
|
||||
];
|
||||
if (Array.isArray(data)) {
|
||||
cb(null, res);
|
||||
} else {
|
||||
data(null);
|
||||
}
|
||||
});
|
||||
rateLimiter.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('have to reject'));
|
||||
})
|
||||
.catch((err) => {
|
||||
expect(err.consumedPoints).to.equal(2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('blocks key for block duration when consumed more than points', (done) => {
|
||||
const testKey = 'block';
|
||||
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1, blockDuration: 2,
|
||||
}, () => {
|
||||
mysqlClientStub.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
const res = [
|
||||
{ points: 2, expire: 1000 },
|
||||
];
|
||||
if (Array.isArray(data)) {
|
||||
cb(null, res);
|
||||
} else {
|
||||
data(null);
|
||||
}
|
||||
});
|
||||
|
||||
rateLimiter.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext > 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('return correct data with _getRateLimiterRes', () => {
|
||||
const rateLimiter = new RateLimiterMySQL({ points: 5, storeClient: mysqlClient, storeType: 'connection' });
|
||||
|
||||
const res = rateLimiter._getRateLimiterRes('test', 1, [
|
||||
{ points: 3, expire: Date.now() + 1000 },
|
||||
]);
|
||||
|
||||
expect(res.msBeforeNext <= 1000
|
||||
&& res.consumedPoints === 3
|
||||
&& res.isFirstInDuration === false
|
||||
&& res.remainingPoints === 2).to.equal(true);
|
||||
});
|
||||
|
||||
it('get points', (done) => {
|
||||
const testKey = 'get';
|
||||
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1,
|
||||
}, () => {
|
||||
mysqlClientStub.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
const res = [
|
||||
{ points: 1, expire: 1000 },
|
||||
];
|
||||
cb(null, res);
|
||||
});
|
||||
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('get points return NULL if key is not set', (done) => {
|
||||
const testKey = 'getnull';
|
||||
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1,
|
||||
}, () => {
|
||||
mysqlClientStub.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
cb(null, []);
|
||||
});
|
||||
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('delete key and return true', (done) => {
|
||||
const testKey = 'deletetrue';
|
||||
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1,
|
||||
}, () => {
|
||||
mysqlClientStub.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
cb(null, { affectedRows: 1 });
|
||||
});
|
||||
|
||||
rateLimiter.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('delete returns false, if there is no key', (done) => {
|
||||
const testKey = 'deletefalse';
|
||||
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1,
|
||||
}, () => {
|
||||
mysqlClientStub.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
cb(null, { affectedRows: 0 });
|
||||
});
|
||||
|
||||
rateLimiter.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('delete rejects on error', (done) => {
|
||||
const testKey = 'deleteerr';
|
||||
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient, storeType: 'connection', points: 1, duration: 1,
|
||||
}, () => {
|
||||
mysqlClientStub.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
cb(new Error('test'));
|
||||
});
|
||||
|
||||
rateLimiter.delete(testKey)
|
||||
.catch(() => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('clearExpired method uses private _getConnection to get connection', (done) => {
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient, storeType: 'sequelize',
|
||||
}, () => {
|
||||
const rlStub = sinon.stub(rateLimiter, '_getConnection').callsFake(() => {
|
||||
done();
|
||||
return Promise.resolve(mysqlClient);
|
||||
});
|
||||
|
||||
rateLimiter.clearExpired(1);
|
||||
rlStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
it('does not expire key if duration set to 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient,
|
||||
storeType: 'connection',
|
||||
points: 2,
|
||||
duration: 0,
|
||||
}, () => {
|
||||
mysqlClientStub.restore();
|
||||
const queryStub = sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
const res = [
|
||||
{ points: 1, expire: null },
|
||||
];
|
||||
if (Array.isArray(data)) {
|
||||
cb(null, res);
|
||||
} else {
|
||||
data(null);
|
||||
}
|
||||
});
|
||||
rateLimiter.consume(testKey, 1)
|
||||
.then(() => {
|
||||
queryStub.restore();
|
||||
const queryStub2 = sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
const res = [
|
||||
{ points: 2, expire: null },
|
||||
];
|
||||
if (Array.isArray(data)) {
|
||||
cb(null, res);
|
||||
} else {
|
||||
data(null);
|
||||
}
|
||||
});
|
||||
rateLimiter.consume(testKey, 1)
|
||||
.then(() => {
|
||||
queryStub2.restore();
|
||||
sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
const res = [
|
||||
{ points: 2, expire: null },
|
||||
];
|
||||
if (Array.isArray(data)) {
|
||||
cb(null, res);
|
||||
} else {
|
||||
data(null);
|
||||
}
|
||||
});
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('block key forever, if secDuration is 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const rateLimiter = new RateLimiterMySQL({
|
||||
storeClient: mysqlClient,
|
||||
storeType: 'connection',
|
||||
points: 2,
|
||||
duration: 1,
|
||||
}, () => {
|
||||
mysqlClientStub.restore();
|
||||
const queryStub = sinon.stub(mysqlClient, 'query').callsFake((q, data, cb) => {
|
||||
const res = [
|
||||
{ points: 3, expire: null },
|
||||
];
|
||||
if (Array.isArray(data)) {
|
||||
cb(null, res);
|
||||
} else {
|
||||
data(null);
|
||||
}
|
||||
});
|
||||
rateLimiter.block(testKey, 0)
|
||||
.then(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(3);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
queryStub.restore();
|
||||
done();
|
||||
});
|
||||
}, 1000);
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
617
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterPostgres.test.js
generated
vendored
Normal file
@@ -0,0 +1,617 @@
|
||||
/* eslint-disable no-new */
|
||||
const {
|
||||
describe, it, beforeEach, afterEach,
|
||||
} = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const sinon = require('sinon');
|
||||
const RateLimiterPostgres = require('../lib/RateLimiterPostgres');
|
||||
const RateLimiterMemory = require('../lib/RateLimiterMemory');
|
||||
|
||||
describe('RateLimiterPostgres with fixed window', function RateLimiterPostgresTest() {
|
||||
this.timeout(5000);
|
||||
const pgClient = {
|
||||
query: () => {},
|
||||
};
|
||||
|
||||
let pgClientStub;
|
||||
|
||||
beforeEach(() => {
|
||||
pgClientStub = sinon.stub(pgClient, 'query').callsFake(() => Promise.resolve());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
pgClientStub.restore();
|
||||
});
|
||||
|
||||
it('throw error if can not create table', (done) => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').callsFake(() => Promise.reject(Error('test')));
|
||||
|
||||
new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
|
||||
}, (e) => {
|
||||
expect(e instanceof Error).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('do not create a table if tableCreated option is true', (done) => {
|
||||
const pgClientTableCreated = {
|
||||
query: () => {},
|
||||
};
|
||||
sinon.spy(pgClientTableCreated, 'query');
|
||||
const rateLimiter = new RateLimiterPostgres({ // eslint-disable-line
|
||||
storeClient: pgClientTableCreated, storeType: 'client', tableCreated: true,
|
||||
});
|
||||
setTimeout(() => {
|
||||
expect(pgClientTableCreated.query.callCount).to.equal(0);
|
||||
done();
|
||||
}, 1000);
|
||||
});
|
||||
|
||||
it('callback called even if tableCreated option is true', (done) => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').callsFake(() => Promise.resolve());
|
||||
|
||||
new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', tableCreated: true,
|
||||
}, () => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('consume 1 point', (done) => {
|
||||
const testKey = 'consume1';
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').resolves({
|
||||
rows: [{ points: 1, expire: 5000 }],
|
||||
});
|
||||
|
||||
rateLimiter.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('rejected when consume more than maximum points', (done) => {
|
||||
const testKey = 'consumerej';
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', points: 1, duration: 5,
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').resolves({
|
||||
rows: [{ points: 2, expire: 5000 }],
|
||||
});
|
||||
rateLimiter.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('have to reject'));
|
||||
})
|
||||
.catch((err) => {
|
||||
expect(err.consumedPoints).to.equal(2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('blocks key for block duration when consumed more than points', (done) => {
|
||||
const testKey = 'block';
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', points: 1, duration: 1, blockDuration: 2,
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').resolves({
|
||||
rows: [{ points: 2, expire: 1000 }],
|
||||
});
|
||||
|
||||
rateLimiter.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext > 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('return correct data with _getRateLimiterRes', () => {
|
||||
const rateLimiter = new RateLimiterPostgres({ points: 5, storeClient: pgClient, storeType: 'client' });
|
||||
|
||||
const res = rateLimiter._getRateLimiterRes('test', 1, {
|
||||
rows: [{ points: 3, expire: Date.now() + 1000 }],
|
||||
});
|
||||
|
||||
expect(res.msBeforeNext <= 1000
|
||||
&& res.consumedPoints === 3
|
||||
&& res.isFirstInDuration === false
|
||||
&& res.remainingPoints === 2).to.equal(true);
|
||||
});
|
||||
|
||||
it('get points', (done) => {
|
||||
const testKey = 'get';
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').resolves({
|
||||
rows: [{ points: 1, expire: 5000 }],
|
||||
});
|
||||
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('get points return NULL if key is not set', (done) => {
|
||||
const testKey = 'getnull';
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').resolves({
|
||||
rowCount: 0,
|
||||
rows: [],
|
||||
});
|
||||
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('get points using insuranceLimiter on Postgres error', (done) => {
|
||||
const testKey = 'geterror';
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient,
|
||||
storeType: 'client',
|
||||
points: 1,
|
||||
duration: 1,
|
||||
insuranceLimiter: new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
}),
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').callsFake(() => Promise.reject(Error('test')));
|
||||
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('block custom key using insuranceLimiter on Postgres error', (done) => {
|
||||
const testKey = 'postgreserrorblock';
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient,
|
||||
storeType: 'client',
|
||||
points: 1,
|
||||
duration: 1,
|
||||
insuranceLimiter: new RateLimiterMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
}),
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').callsFake(() => Promise.reject(Error('test')));
|
||||
|
||||
rateLimiter.block(testKey, 3)
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext > 2000 && res.msBeforeNext <= 3000).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('delete key and return true', (done) => {
|
||||
const testKey = 'deletetrue';
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').resolves({
|
||||
rowCount: 1,
|
||||
});
|
||||
|
||||
rateLimiter.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('delete returns false, if there is no key', (done) => {
|
||||
const testKey = 'deletefalse';
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').resolves({
|
||||
rowCount: 0,
|
||||
});
|
||||
|
||||
rateLimiter.delete(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('delete rejects on error', (done) => {
|
||||
const testKey = 'deleteerr';
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', points: 2, duration: 5,
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').rejects(new Error());
|
||||
|
||||
rateLimiter.delete(testKey)
|
||||
.catch(() => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('query sets unique prefix to prepared statement for every limiter table', (done) => {
|
||||
let queryName1;
|
||||
let rateLimiter1;
|
||||
let rateLimiter2;
|
||||
|
||||
Promise.all([
|
||||
new Promise((resolve) => {
|
||||
rateLimiter1 = new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', tableName: 'upsertqueryname1',
|
||||
}, () => {
|
||||
resolve();
|
||||
});
|
||||
}),
|
||||
new Promise((resolve) => {
|
||||
rateLimiter2 = new RateLimiterPostgres({
|
||||
storeClient: pgClient, storeType: 'client', tableName: 'upsertqueryname2',
|
||||
}, () => {
|
||||
resolve();
|
||||
});
|
||||
}),
|
||||
]).then(() => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').callsFake((q) => {
|
||||
queryName1 = q.name;
|
||||
return Promise.resolve({
|
||||
rows: [{ points: 1, expire: 5000 }],
|
||||
});
|
||||
});
|
||||
|
||||
rateLimiter1.consume('test')
|
||||
.then(() => {
|
||||
pgClientStub.restore();
|
||||
pgClientStub = sinon.stub(pgClient, 'query').callsFake((q) => {
|
||||
expect(q.name).to.not.equal(queryName1);
|
||||
done();
|
||||
return Promise.resolve({
|
||||
rows: [{ points: 1, expire: 5000 }],
|
||||
});
|
||||
});
|
||||
|
||||
rateLimiter2.consume('test');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('set client type to "client" by constructor name for Client', (done) => {
|
||||
class Client {
|
||||
Client() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: new Client(),
|
||||
}, () => {
|
||||
expect(rateLimiter.clientType).to.equal('client');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('set client type to "pool" by constructor name for Pool', (done) => {
|
||||
class Pool {
|
||||
Pool() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: new Pool(),
|
||||
}, () => {
|
||||
expect(rateLimiter.clientType).to.equal('pool');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('set client type to "sequelize" by constructor name for Sequelize', (done) => {
|
||||
class Sequelize {
|
||||
Sequelize() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: new Sequelize(),
|
||||
}, () => {
|
||||
expect(rateLimiter.clientType).to.equal('sequelize');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('throw error if it is not possible to define client type', (done) => {
|
||||
try {
|
||||
new RateLimiterPostgres({
|
||||
storeClient: {},
|
||||
});
|
||||
} catch (err) {
|
||||
expect(err instanceof Error).to.equal(true);
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('private _getConnection returns client for Pool', (done) => {
|
||||
class Pool {
|
||||
Pool() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const client = new Pool();
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: client,
|
||||
}, () => {
|
||||
rateLimiter._getConnection()
|
||||
.then((conn) => {
|
||||
expect(conn).to.equal(client);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('private _getConnection returns connection from manager for Sequelize', (done) => {
|
||||
class Sequelize {
|
||||
Sequelize() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const client = new Sequelize();
|
||||
client.connectionManager = {
|
||||
getConnection: () => Promise.resolve(123),
|
||||
};
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: client,
|
||||
}, () => {
|
||||
rateLimiter._getConnection()
|
||||
.then((res) => {
|
||||
expect(res).to.equal(123);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('private _getConnection returns acquire connection from Knex', (done) => {
|
||||
class Knex {
|
||||
Knex() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const client = new Knex();
|
||||
client.client = {
|
||||
acquireConnection: () => Promise.resolve(321),
|
||||
};
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: client,
|
||||
storeType: 'knex',
|
||||
}, () => {
|
||||
rateLimiter._getConnection()
|
||||
.then((res) => {
|
||||
expect(res).to.equal(321);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('private _getConnection returns client for TypeORM', (done) => {
|
||||
class Pool {
|
||||
Pool() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const typeORMConnection = {
|
||||
driver: { master: new Pool()}
|
||||
}
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: typeORMConnection,
|
||||
storeType: 'typeorm',
|
||||
}, () => {
|
||||
rateLimiter._getConnection()
|
||||
.then((conn) => {
|
||||
expect(conn).to.equal(typeORMConnection.driver.master);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Pool does not require specific connection releasing', (done) => {
|
||||
class Pool {
|
||||
Pool() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const client = new Pool();
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: client,
|
||||
}, () => {
|
||||
expect(rateLimiter._releaseConnection()).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Sequelize release connection from manager', (done) => {
|
||||
class Sequelize {
|
||||
Sequelize() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const client = new Sequelize();
|
||||
client.connectionManager = {
|
||||
releaseConnection: () => 123,
|
||||
};
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: client,
|
||||
}, () => {
|
||||
expect(rateLimiter._releaseConnection()).to.equal(123);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Knex release connection from client', (done) => {
|
||||
class Knex {
|
||||
Knex() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const client = new Knex();
|
||||
client.client = {
|
||||
releaseConnection: () => 321,
|
||||
};
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: client,
|
||||
storeType: 'knex',
|
||||
}, () => {
|
||||
expect(rateLimiter._releaseConnection()).to.equal(321);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('TypeORM does not require specific connection releasing', (done) => {
|
||||
class Pool {
|
||||
Pool() {}
|
||||
query() {}
|
||||
}
|
||||
|
||||
const typeORMConnection = {
|
||||
driver: { master: new Pool()}
|
||||
}
|
||||
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: typeORMConnection,
|
||||
storeType: 'typeorm',
|
||||
}, () => {
|
||||
expect(rateLimiter._releaseConnection()).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('does not expire key if duration set to 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient,
|
||||
storeType: 'connection',
|
||||
points: 2,
|
||||
duration: 0,
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
const queryStub = sinon.stub(pgClient, 'query').resolves({
|
||||
rows: [{ points: 1, expire: null }],
|
||||
});
|
||||
rateLimiter.consume(testKey, 1)
|
||||
.then(() => {
|
||||
queryStub.restore();
|
||||
sinon.stub(pgClient, 'query').resolves({
|
||||
rows: [{ points: 2, expire: null }],
|
||||
});
|
||||
rateLimiter.consume(testKey, 1)
|
||||
.then(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
it('block key forever, if secDuration is 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const rateLimiter = new RateLimiterPostgres({
|
||||
storeClient: pgClient,
|
||||
storeType: 'connection',
|
||||
points: 2,
|
||||
duration: 1,
|
||||
}, () => {
|
||||
pgClientStub.restore();
|
||||
const queryStub = sinon.stub(pgClient, 'query').resolves({
|
||||
rows: [{ points: 3, expire: null }],
|
||||
});
|
||||
rateLimiter.block(testKey, 0)
|
||||
.then(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(3);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
queryStub.restore();
|
||||
done();
|
||||
});
|
||||
}, 1000);
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
183
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterQueue.test.js
generated
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
const { describe, it } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const RateLimiterMemory = require('../lib/RateLimiterMemory');
|
||||
const BurstyLimiter = require('../lib/BurstyRateLimiter');
|
||||
const RateLimiterQueue = require('../lib/RateLimiterQueue');
|
||||
const RateLimiterQueueError = require('../lib/component/RateLimiterQueueError');
|
||||
|
||||
describe('RateLimiterQueue with FIFO queue', function RateLimiterQueueTest() {
|
||||
this.timeout(5000);
|
||||
|
||||
it('remove 1 token works and 1 remaining', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 2, duration: 1 });
|
||||
const rlQueue = new RateLimiterQueue(rlMemory);
|
||||
rlQueue.removeTokens(1)
|
||||
.then((remainingTokens) => {
|
||||
expect(remainingTokens).to.equal(1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('remove 2 tokens from bursty limiter and returns correct remainingTokens 0', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
|
||||
const blMemory = new RateLimiterMemory({ points: 1, duration: 3 });
|
||||
const burstyLimiter = new BurstyLimiter(rlMemory, blMemory);
|
||||
const rlQueue = new RateLimiterQueue(burstyLimiter);
|
||||
const startTime = Date.now();
|
||||
rlQueue.removeTokens(1)
|
||||
.then((remainingTokens1) => {
|
||||
expect(remainingTokens1).to.equal(0);
|
||||
rlQueue.removeTokens(1)
|
||||
.then((remainingTokens2) => {
|
||||
expect(remainingTokens2).to.equal(0);
|
||||
expect(Date.now() - startTime < 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('remove 2 tokens from bursty limiter and wait 1 more', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
|
||||
const blMemory = new RateLimiterMemory({ points: 1, duration: 3 });
|
||||
const burstyLimiter = new BurstyLimiter(rlMemory, blMemory);
|
||||
const rlQueue = new RateLimiterQueue(burstyLimiter);
|
||||
const startTime = Date.now();
|
||||
rlQueue.removeTokens(1)
|
||||
.then(() => {
|
||||
rlQueue.removeTokens(1)
|
||||
.then(() => {
|
||||
rlQueue.removeTokens(1)
|
||||
.then((remainingTokens) => {
|
||||
expect(remainingTokens).to.equal(0);
|
||||
expect(Date.now() - startTime > 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('remove all tokens works and 0 remaining', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 2, duration: 1 });
|
||||
const rlQueue = new RateLimiterQueue(rlMemory);
|
||||
rlQueue.removeTokens(2)
|
||||
.then((remainingTokens) => {
|
||||
expect(remainingTokens).to.equal(0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('return error if try to remove more tokens than allowed', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 2, duration: 1 });
|
||||
const rlQueue = new RateLimiterQueue(rlMemory);
|
||||
rlQueue.removeTokens(3)
|
||||
.then(() => {
|
||||
})
|
||||
.catch((err) => {
|
||||
expect(err instanceof RateLimiterQueueError).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('queues 1 request and fire it after 1 second', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
|
||||
const rlQueue = new RateLimiterQueue(rlMemory);
|
||||
const time = Date.now();
|
||||
rlQueue.removeTokens(1).then(() => {
|
||||
});
|
||||
rlQueue.removeTokens(1).then((remainingTokens) => {
|
||||
expect(remainingTokens).to.equal(0);
|
||||
expect(Date.now() - time >= 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('respects order of queued callbacks', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
|
||||
const rlQueue = new RateLimiterQueue(rlMemory);
|
||||
let index;
|
||||
rlQueue.removeTokens(1).then(() => {
|
||||
index = 0;
|
||||
});
|
||||
rlQueue.removeTokens(1).then(() => {
|
||||
expect(index).to.equal(0);
|
||||
index = 1;
|
||||
});
|
||||
rlQueue.removeTokens(1).then(() => {
|
||||
expect(index).to.equal(1);
|
||||
index = 2;
|
||||
});
|
||||
rlQueue.removeTokens(1).then(() => {
|
||||
expect(index).to.equal(2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('return error if queue length reaches maximum', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 1, duration: 1 });
|
||||
const rlQueue = new RateLimiterQueue(rlMemory, { maxQueueSize: 1 });
|
||||
rlQueue.removeTokens(1).then(() => {
|
||||
});
|
||||
rlQueue.removeTokens(1).then(() => {
|
||||
done();
|
||||
});
|
||||
rlQueue.removeTokens(1)
|
||||
.then(() => {
|
||||
done(new Error('must not allow to queue'));
|
||||
})
|
||||
.catch((err) => {
|
||||
expect(err instanceof RateLimiterQueueError).to.equal(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('getTokensRemaining works', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 2, duration: 1 });
|
||||
const rlQueue = new RateLimiterQueue(rlMemory);
|
||||
rlQueue.removeTokens(1)
|
||||
.then(() => {
|
||||
rlQueue.getTokensRemaining()
|
||||
.then((tokensRemaining) => {
|
||||
expect(tokensRemaining).to.equal(1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('getTokensRemaining returns maximum if internal limiter by key does not exist', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 23, duration: 1 });
|
||||
const rlQueue = new RateLimiterQueue(rlMemory);
|
||||
rlQueue.getTokensRemaining('test')
|
||||
.then((tokensRemaining) => {
|
||||
expect(tokensRemaining).to.equal(23);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('creates internal instance by key and removes tokens from it', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 2, duration: 1 });
|
||||
const rlQueue = new RateLimiterQueue(rlMemory);
|
||||
rlQueue.removeTokens(1, 'customkey')
|
||||
.then((remainingTokens) => {
|
||||
expect(remainingTokens).to.equal(1);
|
||||
rlQueue.getTokensRemaining()
|
||||
.then((defaultTokensRemaining) => {
|
||||
expect(defaultTokensRemaining).to.equal(2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('getTokensRemaining returns maximum if internal limiter does not have data', (done) => {
|
||||
const rlMemory = new RateLimiterMemory({ points: 23, duration: 1 });
|
||||
const rlQueue = new RateLimiterQueue(rlMemory);
|
||||
rlQueue.removeTokens(1, 'nodata')
|
||||
.then(() => {
|
||||
setTimeout(() => {
|
||||
rlQueue.getTokensRemaining('nodata')
|
||||
.then((tokensRemaining) => {
|
||||
expect(tokensRemaining).to.equal(23);
|
||||
done();
|
||||
});
|
||||
}, 1001)
|
||||
})
|
||||
});
|
||||
});
|
||||
902
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterRedis.test.js
generated
vendored
Normal file
@@ -0,0 +1,902 @@
|
||||
/* eslint-disable new-cap */
|
||||
/* eslint-disable no-unused-expressions */
|
||||
const { describe, it, beforeEach } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const sinon = require('sinon');
|
||||
const RateLimiterRedis = require('../lib/RateLimiterRedis');
|
||||
const redisMock = require('redis-mock');
|
||||
const { redisEvalMock, getRedisClientClosed } = require('./helper');
|
||||
|
||||
describe('RateLimiterRedis with fixed window', function RateLimiterRedisTest() {
|
||||
this.timeout(5000);
|
||||
const redisMockClient = redisMock.createClient();
|
||||
|
||||
redisMockClient.eval = redisEvalMock(redisMockClient);
|
||||
|
||||
const redisClientClosed = getRedisClientClosed(redisMockClient);
|
||||
|
||||
beforeEach((done) => {
|
||||
redisMockClient.flushall(done);
|
||||
});
|
||||
|
||||
it('consume 1 point', (done) => {
|
||||
const testKey = 'consume1';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 2,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
redisMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
|
||||
if (!err) {
|
||||
expect(consumedPoints).to.equal('1');
|
||||
done();
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('rejected when consume more than maximum points', (done) => {
|
||||
const testKey = 'consume2';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {})
|
||||
.catch((rejRes) => {
|
||||
expect(rejRes.msBeforeNext >= 0).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('execute evenly over duration', (done) => {
|
||||
const testKey = 'consumeEvenly';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 2,
|
||||
duration: 5,
|
||||
execEvenly: true,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
const timeFirstConsume = Date.now();
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
/* Second consume should be delayed more than 2 seconds
|
||||
Explanation:
|
||||
1) consume at 0ms, remaining duration = 5000ms
|
||||
2) delayed consume for (4999 / (0 + 2)) ~= 2500ms, where 2 is a fixed value
|
||||
, because it mustn't delay in the beginning and in the end of duration
|
||||
3) consume after 2500ms by timeout
|
||||
*/
|
||||
const diff = Date.now() - timeFirstConsume;
|
||||
expect(diff > 2400 && diff < 2600).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('execute evenly over duration with minimum delay 20 ms', (done) => {
|
||||
const testKey = 'consumeEvenlyMinDelay';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 100,
|
||||
duration: 1,
|
||||
execEvenly: true,
|
||||
execEvenlyMinDelayMs: 20,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
const timeFirstConsume = Date.now();
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
expect(Date.now() - timeFirstConsume >= 20).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('makes penalty', (done) => {
|
||||
const testKey = 'penalty1';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 3,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.penalty(testKey)
|
||||
.then(() => {
|
||||
redisMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
|
||||
if (!err) {
|
||||
expect(consumedPoints).to.equal('2');
|
||||
done();
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('reward points', (done) => {
|
||||
const testKey = 'reward';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.reward(testKey)
|
||||
.then(() => {
|
||||
redisMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
|
||||
if (!err) {
|
||||
expect(consumedPoints).to.equal('0');
|
||||
done();
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('block key in memory when inMemory block options set up', (done) => {
|
||||
const testKey = 'blockmem';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 5,
|
||||
inMemoryBlockOnConsumed: 2,
|
||||
inMemoryBlockDuration: 10,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {})
|
||||
.catch((rejRes) => {
|
||||
// msBeforeNext more than 5000, so key was blocked
|
||||
expect(rejRes.msBeforeNext > 5000 && rejRes.remainingPoints === 0).to.equal(true);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
});
|
||||
|
||||
it('block key in memory for msBeforeNext milliseconds', (done) => {
|
||||
const testKey = 'blockmempoints';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 5,
|
||||
inMemoryBlockOnConsumed: 1,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
expect(rateLimiter._inMemoryBlockedKeys.msBeforeExpire(rateLimiter.getKey(testKey)) > 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
});
|
||||
|
||||
it('reject after block key in memory for msBeforeNext, if consumed more than points', (done) => {
|
||||
const testKey = 'blockmempointsreject';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 5,
|
||||
inMemoryBlockOnConsumed: 1,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(new Error('must not'));
|
||||
})
|
||||
.catch(() => {
|
||||
expect(rateLimiter._inMemoryBlockedKeys.msBeforeExpire(rateLimiter.getKey(testKey)) > 0).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('expire inMemory blocked key', (done) => {
|
||||
const testKey = 'blockmem2';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
inMemoryBlockOnConsumed: 2,
|
||||
inmemoryBlockDuration: 2, // @deprecated Kept to test backward compatability
|
||||
});
|
||||
// It blocks on the first consume as consumed points more than available
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {})
|
||||
.catch(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then((res) => {
|
||||
// Block expired
|
||||
expect(res.msBeforeNext <= 1000 && res.remainingPoints === 0).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
}, 2001);
|
||||
});
|
||||
});
|
||||
|
||||
it('throws error when inMemoryBlockOnConsumed is not set, but inMemoryBlockDuration is set', (done) => {
|
||||
try {
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
inMemoryBlockDuration: 2,
|
||||
});
|
||||
rateLimiter.reward('test');
|
||||
} catch (err) {
|
||||
expect(err instanceof Error).to.equal(true);
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('throws error when inMemoryBlockOnConsumed less than points', (done) => {
|
||||
try {
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 2,
|
||||
inmemoryBlockOnConsumed: 1, // @deprecated Kept to test backward compatability
|
||||
});
|
||||
rateLimiter.reward('test');
|
||||
} catch (err) {
|
||||
expect(err instanceof Error).to.equal(true);
|
||||
done();
|
||||
}
|
||||
});
|
||||
|
||||
it('throws error on RedisClient error', (done) => {
|
||||
const testKey = 'rediserror';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
});
|
||||
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {})
|
||||
.catch((rejRes) => {
|
||||
expect(rejRes instanceof Error).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('consume using insuranceLimiter when RedisClient error', (done) => {
|
||||
const testKey = 'rediserror2';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
insuranceLimiter: new RateLimiterRedis({
|
||||
points: 2,
|
||||
duration: 2,
|
||||
storeClient: redisMockClient,
|
||||
}),
|
||||
});
|
||||
|
||||
// Consume from insurance limiter with different options
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints === 1 && res.msBeforeNext > 1000).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
});
|
||||
|
||||
it('penalty using insuranceLimiter when RedisClient error', (done) => {
|
||||
const testKey = 'rediserror3';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
insuranceLimiter: new RateLimiterRedis({
|
||||
points: 2,
|
||||
duration: 2,
|
||||
storeClient: redisMockClient,
|
||||
}),
|
||||
});
|
||||
|
||||
rateLimiter
|
||||
.penalty(testKey)
|
||||
.then(() => {
|
||||
redisMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
|
||||
if (!err) {
|
||||
expect(consumedPoints).to.equal('1');
|
||||
done();
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
});
|
||||
|
||||
it('reward using insuranceLimiter when RedisClient error', (done) => {
|
||||
const testKey = 'rediserror4';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
insuranceLimiter: new RateLimiterRedis({
|
||||
points: 2,
|
||||
duration: 2,
|
||||
storeClient: redisMockClient,
|
||||
}),
|
||||
});
|
||||
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.reward(testKey)
|
||||
.then(() => {
|
||||
redisMockClient.get(rateLimiter.getKey(testKey), (err, consumedPoints) => {
|
||||
if (!err) {
|
||||
expect(consumedPoints).to.equal('1');
|
||||
done();
|
||||
}
|
||||
});
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
});
|
||||
|
||||
it('block using insuranceLimiter when RedisClient error', (done) => {
|
||||
const testKey = 'rediserrorblock';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
insuranceLimiter: new RateLimiterRedis({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
storeClient: redisMockClient,
|
||||
}),
|
||||
});
|
||||
|
||||
rateLimiter
|
||||
.block(testKey, 3)
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext > 2000 && res.msBeforeNext <= 3000).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('use keyPrefix from options', () => {
|
||||
const testKey = 'key';
|
||||
const keyPrefix = 'test';
|
||||
const rateLimiter = new RateLimiterRedis({ keyPrefix, storeClient: redisClientClosed });
|
||||
|
||||
expect(rateLimiter.getKey(testKey)).to.equal('test:key');
|
||||
});
|
||||
|
||||
it('blocks key for block duration when consumed more than points', (done) => {
|
||||
const testKey = 'block';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
blockDuration: 2,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext > 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('reject with error, if internal block by blockDuration failed', (done) => {
|
||||
const testKey = 'blockdurationfailed';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
blockDuration: 2,
|
||||
});
|
||||
sinon.stub(rateLimiter, '_block').callsFake(() => Promise.reject(new Error()));
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej instanceof Error).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('block expires in blockDuration seconds', (done) => {
|
||||
const testKey = 'blockexpires';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
blockDuration: 2,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey, 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must resolve'));
|
||||
});
|
||||
}, 2000);
|
||||
});
|
||||
});
|
||||
|
||||
it('block custom key', (done) => {
|
||||
const testKey = 'blockcustom';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
rateLimiter.block(testKey, 2).then(() => {
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej.msBeforeNext > 1000).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('get points', (done) => {
|
||||
const testKey = 'get';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('get must not reject'));
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('consume must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('disconnected redis client', () => {
|
||||
it('attempt to invoke redis if rejectIfRedisNotReady is not set', (done) => {
|
||||
const testKey = 'get';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.catch((error) => {
|
||||
expect(error.message).to.equal('closed');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('get throws error with mock redis', (done) => {
|
||||
const testKey = 'get';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
rejectIfRedisNotReady: true,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.catch((error) => {
|
||||
expect(error.message).to.equal('Redis connection is not ready');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('get throws error with disconnected ioredis', (done) => {
|
||||
const testKey = 'get';
|
||||
|
||||
const disconnectedIoRedis = {
|
||||
status: 'closed',
|
||||
};
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: disconnectedIoRedis,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
rejectIfRedisNotReady: true,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.catch((error) => {
|
||||
expect(error.message).to.equal('Redis connection is not ready');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('get throws error with disconnected node-redis', (done) => {
|
||||
const testKey = 'get';
|
||||
|
||||
const disconnectedIoRedis = {
|
||||
isReady: () => false,
|
||||
};
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: disconnectedIoRedis,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
rejectIfRedisNotReady: true,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.catch((error) => {
|
||||
expect(error.message).to.equal('Redis connection is not ready');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('get returns NULL if key is not set', (done) => {
|
||||
const testKey = 'getnull';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
});
|
||||
rateLimiter
|
||||
.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res).to.equal(null);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('get must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('get supports ioredis format', (done) => {
|
||||
const testKey = 'getioredis';
|
||||
class multiStubIoRedisClient {
|
||||
multi() {
|
||||
const multi = redisMockClient.multi();
|
||||
multi.exec = (cb) => {
|
||||
cb(null, [[null, '2'], [null, 4993]]);
|
||||
};
|
||||
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 3,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter.client = new multiStubIoRedisClient();
|
||||
rateLimiter
|
||||
.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints).to.equal(1);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('get must not reject'));
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('consume must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('delete key and return true', (done) => {
|
||||
const testKey = 'deletetrue';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter.delete(testKey)
|
||||
.then((resDel) => {
|
||||
expect(resDel).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('delete returns false, if there is no key', (done) => {
|
||||
const testKey = 'deletefalse';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
});
|
||||
rateLimiter.delete(testKey)
|
||||
.then((resDel) => {
|
||||
expect(resDel).to.equal(false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('delete rejects on error', (done) => {
|
||||
const testKey = 'deleteerr';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
points: 2,
|
||||
duration: 1,
|
||||
});
|
||||
rateLimiter.delete(testKey)
|
||||
.catch(() => done());
|
||||
});
|
||||
|
||||
it('consume applies options.customDuration to set expire', (done) => {
|
||||
const testKey = 'consume.customDuration';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 2,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey, 1, { customDuration: 1 })
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext <= 1000).to.be.true;
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('insurance limiter on error consume applies options.customDuration to set expire', (done) => {
|
||||
const testKey = 'consume.customDuration';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 2,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey, 1, { customDuration: 1 })
|
||||
.then((res) => {
|
||||
expect(res.msBeforeNext <= 1000).to.be.true;
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('insurance limiter on error consume applies options.customDuration to set expire', (done) => {
|
||||
const testKey = 'consume.customDuration.onerror';
|
||||
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisClientClosed,
|
||||
points: 1,
|
||||
duration: 2,
|
||||
insuranceLimiter: new RateLimiterRedis({
|
||||
points: 2,
|
||||
duration: 3,
|
||||
storeClient: redisMockClient,
|
||||
}),
|
||||
});
|
||||
|
||||
// Consume from insurance limiter with different options
|
||||
rateLimiter
|
||||
.consume(testKey, 1, { customDuration: 1 })
|
||||
.then((res) => {
|
||||
expect(res.remainingPoints === 1 && res.msBeforeNext <= 1000).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
});
|
||||
|
||||
it('block key in memory works with blockDuration on store', (done) => {
|
||||
const testKey = 'blockmem+blockduration';
|
||||
const rateLimiter = new RateLimiterRedis({
|
||||
storeClient: redisMockClient,
|
||||
points: 1,
|
||||
duration: 5,
|
||||
blockDuration: 10,
|
||||
inMemoryBlockOnConsumed: 2,
|
||||
inMemoryBlockDuration: 10,
|
||||
});
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {
|
||||
rateLimiter
|
||||
.consume(testKey)
|
||||
.then(() => {})
|
||||
.catch((rejRes) => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((getRes) => {
|
||||
expect(getRes.msBeforeNext > 5000 && rejRes.remainingPoints === 0).to.equal(true);
|
||||
// msBeforeNext more than 5000, so key was blocked in memory
|
||||
expect(rejRes.msBeforeNext > 5000 && rejRes.remainingPoints === 0).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
done(rejRes);
|
||||
});
|
||||
});
|
||||
|
||||
it('does not expire key if duration set to 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const rateLimiter = new RateLimiterRedis({ storeClient: redisMockClient, points: 2, duration: 0 });
|
||||
rateLimiter.consume(testKey, 1)
|
||||
.then(() => {
|
||||
rateLimiter.consume(testKey, 1)
|
||||
.then(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('block key forever, if secDuration is 0', (done) => {
|
||||
const testKey = 'neverexpire';
|
||||
const rateLimiter = new RateLimiterRedis({ storeClient: redisMockClient, points: 1, duration: 1 });
|
||||
rateLimiter.block(testKey, 0)
|
||||
.then(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(2);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
done();
|
||||
});
|
||||
}, 2000);
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('set points by key', (done) => {
|
||||
const testKey = 'set';
|
||||
const rateLimiter = new RateLimiterRedis({ storeClient: redisMockClient, points: 1, duration: 1 });
|
||||
rateLimiter.set(testKey, 12)
|
||||
.then(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(12);
|
||||
done();
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('set points by key forever', (done) => {
|
||||
const testKey = 'setforever';
|
||||
const rateLimiter = new RateLimiterRedis({ storeClient: redisMockClient, points: 1, duration: 1 });
|
||||
rateLimiter.set(testKey, 12, 0)
|
||||
.then(() => {
|
||||
setTimeout(() => {
|
||||
rateLimiter.get(testKey)
|
||||
.then((res) => {
|
||||
expect(res.consumedPoints).to.equal(12);
|
||||
expect(res.msBeforeNext).to.equal(-1);
|
||||
done();
|
||||
});
|
||||
}, 1100);
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
58
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterRes.test.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
const { describe, it, beforeEach } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const RateLimiterRes = require('../lib/RateLimiterRes');
|
||||
|
||||
describe('RateLimiterRes response object', () => {
|
||||
let rateLimiterRes;
|
||||
beforeEach(() => {
|
||||
rateLimiterRes = new RateLimiterRes();
|
||||
});
|
||||
|
||||
it('setup defaults on construct', () => {
|
||||
expect(rateLimiterRes.msBeforeNext === 0 && rateLimiterRes.remainingPoints === 0)
|
||||
.to.be.equal(true);
|
||||
});
|
||||
|
||||
it('msBeforeNext set and get', () => {
|
||||
rateLimiterRes.msBeforeNext = 123;
|
||||
expect(rateLimiterRes.msBeforeNext).to.equal(123);
|
||||
});
|
||||
|
||||
it('points set and get', () => {
|
||||
rateLimiterRes.remainingPoints = 4;
|
||||
expect(rateLimiterRes.remainingPoints).to.equal(4);
|
||||
});
|
||||
|
||||
it('consumed points set and get', () => {
|
||||
rateLimiterRes.consumedPoints = 5;
|
||||
expect(rateLimiterRes.consumedPoints).to.equal(5);
|
||||
});
|
||||
|
||||
it('isFirstInDuration set and get with cast', () => {
|
||||
rateLimiterRes.isFirstInDuration = 1;
|
||||
expect(rateLimiterRes.isFirstInDuration).to.equal(true);
|
||||
});
|
||||
|
||||
it('returns object on toJSON call', () => {
|
||||
rateLimiterRes.msBeforeNext = 12;
|
||||
rateLimiterRes.remainingPoints = 3;
|
||||
rateLimiterRes.consumedPoints = 2;
|
||||
rateLimiterRes.isFirstInDuration = true;
|
||||
|
||||
expect(rateLimiterRes.toJSON()).to.deep.equal({
|
||||
remainingPoints: 3,
|
||||
msBeforeNext: 12,
|
||||
consumedPoints: 2,
|
||||
isFirstInDuration: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('returns JSON string on toString call', () => {
|
||||
rateLimiterRes.msBeforeNext = 2;
|
||||
rateLimiterRes.remainingPoints = 0;
|
||||
rateLimiterRes.consumedPoints = 5;
|
||||
rateLimiterRes.isFirstInDuration = false;
|
||||
|
||||
expect(rateLimiterRes.toString()).to.equal('{"remainingPoints":0,"msBeforeNext":2,"consumedPoints":5,"isFirstInDuration":false}');
|
||||
});
|
||||
});
|
||||
122
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterStoreAbstract.test.js
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
/* eslint-disable security/detect-object-injection */
|
||||
const { describe, it } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const RateLimiterStoreAbstract = require('../lib/RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('../lib/RateLimiterRes');
|
||||
|
||||
class RateLimiterStoreMemory extends RateLimiterStoreAbstract {
|
||||
constructor(opts) {
|
||||
super(opts);
|
||||
this._inMemoryDataAsStorage = {};
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, storeResult) {
|
||||
const res = new RateLimiterRes();
|
||||
res.consumedPoints = storeResult.points;
|
||||
res.isFirstInDuration = res.consumedPoints === changedPoints;
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = storeResult.msBeforeNext;
|
||||
return res;
|
||||
}
|
||||
|
||||
_get(rlKey) {
|
||||
const result = this._inMemoryDataAsStorage[rlKey];
|
||||
|
||||
return Promise.resolve(typeof result === 'undefined' ? null : result);
|
||||
}
|
||||
|
||||
_delete(rlKey) {
|
||||
const value = this._inMemoryDataAsStorage[rlKey];
|
||||
|
||||
if (typeof value === 'undefined') {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
|
||||
delete this._inMemoryDataAsStorage[rlKey];
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
|
||||
_upsert(rlKey, points, msDuration) {
|
||||
const now = Date.now();
|
||||
const result = {
|
||||
points,
|
||||
msBeforeNext: msDuration,
|
||||
};
|
||||
|
||||
if (typeof this._inMemoryDataAsStorage[rlKey] === 'undefined') {
|
||||
this._inMemoryDataAsStorage[rlKey] = {
|
||||
points,
|
||||
expired: now + msDuration,
|
||||
};
|
||||
} else {
|
||||
const value = this._inMemoryDataAsStorage[rlKey];
|
||||
if (value.expired > now) {
|
||||
value.points += points;
|
||||
result.points = value.points;
|
||||
result.msBeforeNext = value.expired - now;
|
||||
} else {
|
||||
value.points = points;
|
||||
value.expired = now + msDuration;
|
||||
}
|
||||
}
|
||||
|
||||
return Promise.resolve(result);
|
||||
}
|
||||
}
|
||||
|
||||
describe('RateLimiterStoreAbstract with fixed window', () => {
|
||||
it('delete all in memory blocked keys', (done) => {
|
||||
const rateLimiter = new RateLimiterStoreMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
// avoid fire block method
|
||||
blockDuration: 0,
|
||||
inMemoryBlockOnConsumed: 1,
|
||||
inMemoryBlockDuration: 1,
|
||||
keyPrefix: '',
|
||||
});
|
||||
|
||||
// should start blocking
|
||||
Promise.allSettled([
|
||||
rateLimiter.consume('key1', 2),
|
||||
rateLimiter.consume('key2', 2),
|
||||
])
|
||||
.then(() => {
|
||||
expect(rateLimiter._inMemoryBlockedKeys._keys.key1).not.eq(undefined);
|
||||
expect(rateLimiter._inMemoryBlockedKeys._keys.key2).not.eq(undefined);
|
||||
|
||||
rateLimiter.deleteInMemoryBlockedAll();
|
||||
expect(rateLimiter._inMemoryBlockedKeys._keys.key1).eq(undefined);
|
||||
expect(rateLimiter._inMemoryBlockedKeys._keys.key2).eq(undefined);
|
||||
|
||||
done();
|
||||
})
|
||||
.catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
|
||||
it('delete specific key should also deleting in-memory data', (done) => {
|
||||
const rateLimiter = new RateLimiterStoreMemory({
|
||||
points: 1,
|
||||
duration: 1,
|
||||
// avoid fire block method
|
||||
blockDuration: 0,
|
||||
inMemoryBlockOnConsumed: 1,
|
||||
inMemoryBlockDuration: 1,
|
||||
keyPrefix: '',
|
||||
});
|
||||
|
||||
// should start blocking
|
||||
rateLimiter.consume('key', 2).catch(() => {
|
||||
expect(rateLimiter._inMemoryBlockedKeys._keys.key).not.eq(undefined);
|
||||
|
||||
rateLimiter.delete('key').then((isExist) => {
|
||||
expect(rateLimiter._inMemoryBlockedKeys._keys.key).eq(undefined);
|
||||
expect(isExist).eq(true);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
83
framework/node_modules/node-rate-limiter-flexible/test/RateLimiterUnion.test.js
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
// eslint-disable no-unused-expressions
|
||||
const { describe, it, beforeEach } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const RateLimiterUnion = require('../lib/RateLimiterUnion');
|
||||
const RateLimiterMemory = require('../lib/RateLimiterMemory');
|
||||
|
||||
describe('RateLimiterUnion with fixed window', () => {
|
||||
const keyPrefix1 = 'limit1';
|
||||
const keyPrefix2 = 'limit2';
|
||||
let rateLimiter;
|
||||
|
||||
beforeEach(() => {
|
||||
const limiter1 = new RateLimiterMemory({
|
||||
keyPrefix: keyPrefix1,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
});
|
||||
const limiter2 = new RateLimiterMemory({
|
||||
keyPrefix: keyPrefix2,
|
||||
points: 2,
|
||||
duration: 5,
|
||||
});
|
||||
rateLimiter = new RateLimiterUnion(limiter1, limiter2);
|
||||
});
|
||||
|
||||
it('does not allow to create union with limiters number less than 2', () => {
|
||||
try {
|
||||
new RateLimiterUnion(new RateLimiterMemory({ // eslint-disable-line no-new
|
||||
keyPrefix: keyPrefix1,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
}));
|
||||
} catch (err) {
|
||||
expect(err instanceof Error).to.equal(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('all limiters have to be instance of RateLimiterAbstract', () => {
|
||||
try {
|
||||
new RateLimiterUnion(new RateLimiterMemory({ // eslint-disable-line no-new
|
||||
keyPrefix: keyPrefix1,
|
||||
points: 1,
|
||||
duration: 1,
|
||||
}), {});
|
||||
} catch (err) {
|
||||
expect(err instanceof Error).to.equal(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('consume from all limiters', (done) => {
|
||||
rateLimiter.consume('test')
|
||||
.then((res) => {
|
||||
expect(res[keyPrefix1].remainingPoints === 0 && res[keyPrefix2].remainingPoints === 1).to.equal(true);
|
||||
done();
|
||||
})
|
||||
.catch(() => {
|
||||
done(Error('must not reject'));
|
||||
});
|
||||
});
|
||||
|
||||
it('reject consume one "limit1", which does not have enough points', (done) => {
|
||||
rateLimiter.consume('test', 2)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej[keyPrefix1].remainingPoints === 0).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('reject both do not have enough points', (done) => {
|
||||
rateLimiter.consume('test', 3)
|
||||
.then(() => {
|
||||
done(Error('must not resolve'));
|
||||
})
|
||||
.catch((rej) => {
|
||||
expect(rej[keyPrefix1].remainingPoints === 0 && rej[keyPrefix2].remainingPoints === 0).to.equal(true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
82
framework/node_modules/node-rate-limiter-flexible/test/component/BlockedKeys/BlockedKeys.test.js
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
const { describe, it, beforeEach } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const BlockedKeys = require('../../../lib/component/BlockedKeys/BlockedKeys');
|
||||
|
||||
describe('BlockedKeys', () => {
|
||||
let blockedKeys;
|
||||
beforeEach(() => {
|
||||
blockedKeys = new BlockedKeys();
|
||||
});
|
||||
|
||||
it('add blocked key', () => {
|
||||
blockedKeys.add('key', 5);
|
||||
blockedKeys.collectExpired();
|
||||
expect(blockedKeys.msBeforeExpire('key') > 0).to.equal(true);
|
||||
});
|
||||
|
||||
it('expire blocked key', (done) => {
|
||||
blockedKeys.add('key', 1);
|
||||
setTimeout(() => {
|
||||
expect(blockedKeys.msBeforeExpire('key')).to.equal(0);
|
||||
done();
|
||||
}, 1001);
|
||||
});
|
||||
|
||||
it('check not blocked key', () => {
|
||||
blockedKeys.add('key', 1);
|
||||
expect(blockedKeys.msBeforeExpire('key1')).to.equal(0);
|
||||
});
|
||||
|
||||
it('do not collect expired on add', (done) => {
|
||||
blockedKeys.add('key', 1);
|
||||
blockedKeys.add('key1', 1);
|
||||
setTimeout(() => {
|
||||
blockedKeys.add('key2', 1);
|
||||
expect(Object.keys(blockedKeys._keys).length).to.equal(3);
|
||||
done();
|
||||
}, 1001);
|
||||
});
|
||||
|
||||
it('collect expired on add if there more than 999 blocked keys', (done) => {
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
blockedKeys.add(`key${i}`, 1);
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
blockedKeys.add('key1', 1);
|
||||
expect(Object.keys(blockedKeys._keys).length === 1 && blockedKeys._addedKeysAmount === 1)
|
||||
.to.equal(true);
|
||||
done();
|
||||
}, 1001);
|
||||
});
|
||||
|
||||
it('do not collect expired when key is not blocked', (done) => {
|
||||
blockedKeys.add('key', 1);
|
||||
setTimeout(() => {
|
||||
blockedKeys.msBeforeExpire('key');
|
||||
expect(Object.keys(blockedKeys._keys).length === 1 && blockedKeys._addedKeysAmount === 1)
|
||||
.to.equal(true);
|
||||
done();
|
||||
}, 1001);
|
||||
});
|
||||
|
||||
it('collect expired when key is blocked', (done) => {
|
||||
blockedKeys.add('key', 1);
|
||||
blockedKeys.add('blocked', 2);
|
||||
setTimeout(() => {
|
||||
blockedKeys.msBeforeExpire('blocked');
|
||||
expect(Object.keys(blockedKeys._keys).length).to.equal(1);
|
||||
done();
|
||||
}, 1001);
|
||||
});
|
||||
|
||||
it('duplicated keys do not brake collectExpired and msBeforeExpire', (done) => {
|
||||
blockedKeys.add('key', 1);
|
||||
blockedKeys.add('key', 2);
|
||||
setTimeout(() => {
|
||||
blockedKeys.add('key', 3);
|
||||
expect(blockedKeys.msBeforeExpire('key') > 2000).to.equal(true);
|
||||
done();
|
||||
}, 1001);
|
||||
});
|
||||
});
|
||||
79
framework/node_modules/node-rate-limiter-flexible/test/component/MemoryStorage/MemoryStorage.test.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
const { describe, it, beforeEach } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const MemoryStorage = require('../../../lib/component/MemoryStorage/MemoryStorage');
|
||||
|
||||
describe('MemoryStorage', function MemoryStorageTest() {
|
||||
const testKey = 'test';
|
||||
const val = 34;
|
||||
let storage;
|
||||
|
||||
this.timeout(5000);
|
||||
|
||||
beforeEach(() => {
|
||||
storage = new MemoryStorage();
|
||||
});
|
||||
|
||||
it('should set and get', (done) => {
|
||||
storage.set(testKey, val, 5);
|
||||
expect(storage.get(testKey).consumedPoints).to.equal(val);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should delete record on expire', (done) => {
|
||||
storage.set(testKey, val, 1);
|
||||
setTimeout(() => {
|
||||
expect(storage.get(testKey)).to.equal(null);
|
||||
done();
|
||||
}, 2000);
|
||||
});
|
||||
|
||||
it('should incrby', (done) => {
|
||||
storage.set(testKey, val, 5);
|
||||
storage.incrby(testKey, 2);
|
||||
expect(storage.get(testKey).consumedPoints).to.equal(val + 2);
|
||||
done();
|
||||
});
|
||||
|
||||
it('incrby should create record if it is not set', (done) => {
|
||||
storage.incrby(testKey, val, 5);
|
||||
expect(storage.get(testKey).consumedPoints).to.equal(val);
|
||||
done();
|
||||
});
|
||||
|
||||
it('incrby should create record if expiresAt is not set', (done) => {
|
||||
storage.set(testKey, val)
|
||||
expect(storage.get(testKey).expiresAt).to.equal(undefined);
|
||||
storage.incrby(testKey, val, 5);
|
||||
expect(storage.get(testKey).expiresAt !== null).to.equal(true);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should delete record and return true, if it was there', () => {
|
||||
storage.set(testKey, val, 10);
|
||||
expect(storage.delete(testKey)).to.equal(true);
|
||||
expect(storage.get(testKey)).to.equal(null);
|
||||
});
|
||||
|
||||
it('return false, if there is no record to delete', () => {
|
||||
expect(storage.delete(testKey)).to.equal(false);
|
||||
});
|
||||
|
||||
it('should not fail in the absence of Timeout::unref', (done) => {
|
||||
// Node (where we most likely be running tests) provides `Timeout.prototype.unref`, however
|
||||
// MemoryStorage should run in environments where `Timeout.prototype.unref` is not provided
|
||||
// (e.g. browsers). For this test we remove `unref` from `Timeout.prototype` only for the
|
||||
// duration of this test, to verify that MemoryStorage.prototype.set won't throw.
|
||||
const handle = setTimeout(() => {}, 0);
|
||||
const isHandleObject = typeof handle === 'object' && !!handle.constructor;
|
||||
let timeoutUnref;
|
||||
if (isHandleObject) {
|
||||
timeoutUnref = handle.constructor.prototype.unref;
|
||||
delete handle.constructor.prototype.unref;
|
||||
}
|
||||
expect(() => new MemoryStorage().set('key', 0, 0.001)).to.not.throw();
|
||||
setTimeout(done, 250);
|
||||
if (isHandleObject) {
|
||||
handle.constructor.prototype.unref = timeoutUnref;
|
||||
}
|
||||
});
|
||||
});
|
||||
21
framework/node_modules/node-rate-limiter-flexible/test/component/MemoryStorage/Record.test.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
const { describe, it, beforeEach } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const Record = require('../../../lib/component/MemoryStorage/Record');
|
||||
|
||||
describe('MemoryStorage Record', () => {
|
||||
let record;
|
||||
beforeEach(() => {
|
||||
record = new Record();
|
||||
});
|
||||
|
||||
it('value set with cast to int and get', () => {
|
||||
record.value = '123';
|
||||
expect(record.value).to.equal(123);
|
||||
});
|
||||
|
||||
it('expiresAt set unix time and get Date', () => {
|
||||
const now = Date.now();
|
||||
record.expiresAt = now;
|
||||
expect(record.expiresAt.getTime()).to.equal(now);
|
||||
});
|
||||
});
|
||||
11
framework/node_modules/node-rate-limiter-flexible/test/component/RateLimiterQueueError.test.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
const { describe, it } = require('mocha');
|
||||
const { expect } = require('chai');
|
||||
const RateLimiterQueueError = require('../../lib/component/RateLimiterQueueError');
|
||||
|
||||
describe('RateLimiterQueueError', () => {
|
||||
it('supports extra argument in constructor', (done) => {
|
||||
const err = new RateLimiterQueueError('test', 'extra');
|
||||
expect(err.extra).to.equal('extra');
|
||||
done();
|
||||
});
|
||||
});
|
||||
57
framework/node_modules/node-rate-limiter-flexible/test/helper.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
// Mock eval function with almost the same behaviour as Lua script
|
||||
// It gives 99% sure, that all work as expected
|
||||
function redisEvalMock(redisMockClient) {
|
||||
return (script, numberOfKeys, rlKey, points, secDuration, callback) => {
|
||||
const multi = redisMockClient.multi();
|
||||
if (secDuration > 0) {
|
||||
multi.set(rlKey, 0, 'EX', secDuration, 'NX');
|
||||
}
|
||||
|
||||
multi.incrby(rlKey, points)
|
||||
.pttl(rlKey)
|
||||
.exec((err, res) => {
|
||||
const finalRes = secDuration > 0
|
||||
? [res[1], res[2]]
|
||||
: res;
|
||||
callback(err, finalRes);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
// emulate closed RedisClient
|
||||
class RedisClient {
|
||||
constructor(redisMockClient) {
|
||||
this._redisMockClient = redisMockClient;
|
||||
}
|
||||
multi() {
|
||||
const multi = this._redisMockClient.multi();
|
||||
multi.exec = (cb) => {
|
||||
cb(new Error('closed'), []);
|
||||
};
|
||||
|
||||
return multi;
|
||||
}
|
||||
}
|
||||
|
||||
function getRedisClientClosed(redisClient) {
|
||||
const redisClientClosedRaw = new RedisClient(redisClient);
|
||||
return new Proxy(redisClientClosedRaw, {
|
||||
get: (func, name) => {
|
||||
if (name === 'defineCommand') {
|
||||
return undefined;
|
||||
}
|
||||
if (name in redisClientClosedRaw) {
|
||||
return redisClientClosedRaw[name];
|
||||
}
|
||||
return function (...args) {
|
||||
const cb = args.pop();
|
||||
cb(Error('closed'));
|
||||
};
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
redisEvalMock,
|
||||
getRedisClientClosed,
|
||||
};
|
||||