First commit
7
framework/node_modules/Dependencies
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# Unify Dependencies
|
||||
|
||||
🗸 https://sourceforge.net/projects/better-sqlite3.mirror/
|
||||
|
||||
🗸 https://sourceforge.net/projects/uwebsockets-js.mirror/
|
||||
|
||||
🗸 https://sourceforge.net/projects/esbuild.mirror/
|
||||
5
framework/node_modules/better-sqlite3/.gitattributes
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
*.lzz linguist-language=C++
|
||||
*.cpp -diff
|
||||
*.hpp -diff
|
||||
*.c -diff
|
||||
*.h -diff
|
||||
21
framework/node_modules/better-sqlite3/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Joshua Wise
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
BIN
framework/node_modules/better-sqlite3/binaries/better-sqlite3-v11.7.1-node-v115-linux-x64.tar.gz
generated
vendored
Normal file
BIN
framework/node_modules/better-sqlite3/binaries/better-sqlite3-v8.1.0-node-v108-darwin-x64/Release/better_sqlite3.node
generated
vendored
Normal file
BIN
framework/node_modules/better-sqlite3/binaries/better-sqlite3-v8.1.0-node-v108-linux-arm64/Release/better_sqlite3.node
generated
vendored
Normal file
BIN
framework/node_modules/better-sqlite3/binaries/better-sqlite3-v8.1.0-node-v108-linux-x64/Release/better_sqlite3.node
generated
vendored
Normal file
BIN
framework/node_modules/better-sqlite3/binaries/better-sqlite3-v8.1.0-node-v108-win32-x64/Release/better_sqlite3.node
generated
vendored
Normal file
BIN
framework/node_modules/better-sqlite3/binaries/better-sqlite3-v8.1.0-node-v115-linux-x64/Release/better_sqlite3.node
generated
vendored
Executable file
25
framework/node_modules/better-sqlite3/getBinary.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
|
||||
|
||||
var platform = process.platform;
|
||||
|
||||
var version = process.versions.modules;
|
||||
|
||||
var architecture = process.arch;
|
||||
|
||||
var betterSqliteVersion = "8.1.0";
|
||||
|
||||
console.log( platform );
|
||||
|
||||
console.log( version );
|
||||
|
||||
console.log( architecture );
|
||||
|
||||
|
||||
|
||||
var filename = "better-sqlite3-v"+ betterSqliteVersion +"-node-v" + version + "-" + platform + "-" + architecture + ".tar.gz";
|
||||
|
||||
console.log(filename);
|
||||
|
||||
//better-sqlite3-v8.1.0-node-v108-linux-x64.tar.gz
|
||||
|
||||
//better-sqlite3-v8.1.0-node-v108-linux-x64.tar.gz
|
||||
100
framework/node_modules/better-sqlite3/lib/database.js
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
'use strict';
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const util = require('./util');
|
||||
const SqliteError = require('./sqlite-error');
|
||||
|
||||
let DEFAULT_ADDON;
|
||||
|
||||
function Database(filenameGiven, options) {
|
||||
if (new.target == null) {
|
||||
return new Database(filenameGiven, options);
|
||||
}
|
||||
|
||||
// Apply defaults
|
||||
let buffer;
|
||||
if (Buffer.isBuffer(filenameGiven)) {
|
||||
buffer = filenameGiven;
|
||||
filenameGiven = ':memory:';
|
||||
}
|
||||
if (filenameGiven == null) filenameGiven = '';
|
||||
if (options == null) options = {};
|
||||
|
||||
// Validate arguments
|
||||
if (typeof filenameGiven !== 'string') throw new TypeError('Expected first argument to be a string');
|
||||
if (typeof options !== 'object') throw new TypeError('Expected second argument to be an options object');
|
||||
if ('readOnly' in options) throw new TypeError('Misspelled option "readOnly" should be "readonly"');
|
||||
if ('memory' in options) throw new TypeError('Option "memory" was removed in v7.0.0 (use ":memory:" filename instead)');
|
||||
|
||||
// Interpret options
|
||||
const filename = filenameGiven.trim();
|
||||
const anonymous = filename === '' || filename === ':memory:';
|
||||
const readonly = util.getBooleanOption(options, 'readonly');
|
||||
const fileMustExist = util.getBooleanOption(options, 'fileMustExist');
|
||||
const timeout = 'timeout' in options ? options.timeout : 5000;
|
||||
const verbose = 'verbose' in options ? options.verbose : null;
|
||||
const nativeBindingPath = 'nativeBinding' in options ? options.nativeBinding : null;
|
||||
|
||||
// Validate interpreted options
|
||||
if (readonly && anonymous && !buffer) throw new TypeError('In-memory/temporary databases cannot be readonly');
|
||||
if (!Number.isInteger(timeout) || timeout < 0) throw new TypeError('Expected the "timeout" option to be a positive integer');
|
||||
if (timeout > 0x7fffffff) throw new RangeError('Option "timeout" cannot be greater than 2147483647');
|
||||
if (verbose != null && typeof verbose !== 'function') throw new TypeError('Expected the "verbose" option to be a function');
|
||||
if (nativeBindingPath != null && typeof nativeBindingPath !== 'string') throw new TypeError('Expected the "nativeBinding" option to be a string');
|
||||
|
||||
// Load the native addon
|
||||
let addon;
|
||||
|
||||
if ( nativeBindingPath == null ) {
|
||||
|
||||
//console.log("loading Better_sqlite3.node")
|
||||
|
||||
var platform = process.platform;
|
||||
|
||||
var version = process.versions.modules;
|
||||
|
||||
var architecture = process.arch;
|
||||
|
||||
var betterSqliteVersion = "8.1.0";
|
||||
|
||||
var folderName = "better-sqlite3-v"+ betterSqliteVersion +"-node-v" + version + "-" + platform + "-" + architecture;
|
||||
|
||||
|
||||
|
||||
addon = DEFAULT_ADDON || (DEFAULT_ADDON = require('../binaries/'+ folderName + "/Release/better_sqlite3.node"));
|
||||
} else {
|
||||
addon = require(path.resolve(nativeBindingPath).replace(/(\.node)?$/, '.node'));
|
||||
}
|
||||
if (!addon.isInitialized) {
|
||||
addon.setErrorConstructor(SqliteError);
|
||||
addon.isInitialized = true;
|
||||
}
|
||||
|
||||
// Make sure the specified directory exists
|
||||
if (!anonymous && !fs.existsSync(path.dirname(filename))) {
|
||||
throw new TypeError('Cannot open database because the directory does not exist');
|
||||
}
|
||||
|
||||
Object.defineProperties(this, {
|
||||
[util.cppdb]: { value: new addon.Database(filename, filenameGiven, anonymous, readonly, fileMustExist, timeout, verbose || null, buffer || null) },
|
||||
...wrappers.getters,
|
||||
});
|
||||
}
|
||||
|
||||
const wrappers = require('./methods/wrappers');
|
||||
Database.prototype.prepare = wrappers.prepare;
|
||||
Database.prototype.transaction = require('./methods/transaction');
|
||||
Database.prototype.pragma = require('./methods/pragma');
|
||||
Database.prototype.backup = require('./methods/backup');
|
||||
Database.prototype.serialize = require('./methods/serialize');
|
||||
Database.prototype.function = require('./methods/function');
|
||||
Database.prototype.aggregate = require('./methods/aggregate');
|
||||
Database.prototype.table = require('./methods/table');
|
||||
Database.prototype.loadExtension = wrappers.loadExtension;
|
||||
Database.prototype.exec = wrappers.exec;
|
||||
Database.prototype.close = wrappers.close;
|
||||
Database.prototype.defaultSafeIntegers = wrappers.defaultSafeIntegers;
|
||||
Database.prototype.unsafeMode = wrappers.unsafeMode;
|
||||
Database.prototype[util.inspect] = require('./methods/inspect');
|
||||
|
||||
module.exports = Database;
|
||||
3
framework/node_modules/better-sqlite3/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
'use strict';
|
||||
module.exports = require('./database');
|
||||
module.exports.SqliteError = require('./sqlite-error');
|
||||
43
framework/node_modules/better-sqlite3/lib/methods/aggregate.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
'use strict';
|
||||
const { getBooleanOption, cppdb } = require('../util');
|
||||
|
||||
module.exports = function defineAggregate(name, options) {
|
||||
// Validate arguments
|
||||
if (typeof name !== 'string') throw new TypeError('Expected first argument to be a string');
|
||||
if (typeof options !== 'object' || options === null) throw new TypeError('Expected second argument to be an options object');
|
||||
if (!name) throw new TypeError('User-defined function name cannot be an empty string');
|
||||
|
||||
// Interpret options
|
||||
const start = 'start' in options ? options.start : null;
|
||||
const step = getFunctionOption(options, 'step', true);
|
||||
const inverse = getFunctionOption(options, 'inverse', false);
|
||||
const result = getFunctionOption(options, 'result', false);
|
||||
const safeIntegers = 'safeIntegers' in options ? +getBooleanOption(options, 'safeIntegers') : 2;
|
||||
const deterministic = getBooleanOption(options, 'deterministic');
|
||||
const directOnly = getBooleanOption(options, 'directOnly');
|
||||
const varargs = getBooleanOption(options, 'varargs');
|
||||
let argCount = -1;
|
||||
|
||||
// Determine argument count
|
||||
if (!varargs) {
|
||||
argCount = Math.max(getLength(step), inverse ? getLength(inverse) : 0);
|
||||
if (argCount > 0) argCount -= 1;
|
||||
if (argCount > 100) throw new RangeError('User-defined functions cannot have more than 100 arguments');
|
||||
}
|
||||
|
||||
this[cppdb].aggregate(start, step, inverse, result, name, argCount, safeIntegers, deterministic, directOnly);
|
||||
return this;
|
||||
};
|
||||
|
||||
const getFunctionOption = (options, key, required) => {
|
||||
const value = key in options ? options[key] : null;
|
||||
if (typeof value === 'function') return value;
|
||||
if (value != null) throw new TypeError(`Expected the "${key}" option to be a function`);
|
||||
if (required) throw new TypeError(`Missing required option "${key}"`);
|
||||
return null;
|
||||
};
|
||||
|
||||
const getLength = ({ length }) => {
|
||||
if (Number.isInteger(length) && length >= 0) return length;
|
||||
throw new TypeError('Expected function.length to be a positive integer');
|
||||
};
|
||||
67
framework/node_modules/better-sqlite3/lib/methods/backup.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
'use strict';
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { promisify } = require('util');
|
||||
const { cppdb } = require('../util');
|
||||
const fsAccess = promisify(fs.access);
|
||||
|
||||
module.exports = async function backup(filename, options) {
|
||||
if (options == null) options = {};
|
||||
|
||||
// Validate arguments
|
||||
if (typeof filename !== 'string') throw new TypeError('Expected first argument to be a string');
|
||||
if (typeof options !== 'object') throw new TypeError('Expected second argument to be an options object');
|
||||
|
||||
// Interpret options
|
||||
filename = filename.trim();
|
||||
const attachedName = 'attached' in options ? options.attached : 'main';
|
||||
const handler = 'progress' in options ? options.progress : null;
|
||||
|
||||
// Validate interpreted options
|
||||
if (!filename) throw new TypeError('Backup filename cannot be an empty string');
|
||||
if (filename === ':memory:') throw new TypeError('Invalid backup filename ":memory:"');
|
||||
if (typeof attachedName !== 'string') throw new TypeError('Expected the "attached" option to be a string');
|
||||
if (!attachedName) throw new TypeError('The "attached" option cannot be an empty string');
|
||||
if (handler != null && typeof handler !== 'function') throw new TypeError('Expected the "progress" option to be a function');
|
||||
|
||||
// Make sure the specified directory exists
|
||||
await fsAccess(path.dirname(filename)).catch(() => {
|
||||
throw new TypeError('Cannot save backup because the directory does not exist');
|
||||
});
|
||||
|
||||
const isNewFile = await fsAccess(filename).then(() => false, () => true);
|
||||
return runBackup(this[cppdb].backup(this, attachedName, filename, isNewFile), handler || null);
|
||||
};
|
||||
|
||||
const runBackup = (backup, handler) => {
|
||||
let rate = 0;
|
||||
let useDefault = true;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
setImmediate(function step() {
|
||||
try {
|
||||
const progress = backup.transfer(rate);
|
||||
if (!progress.remainingPages) {
|
||||
backup.close();
|
||||
resolve(progress);
|
||||
return;
|
||||
}
|
||||
if (useDefault) {
|
||||
useDefault = false;
|
||||
rate = 100;
|
||||
}
|
||||
if (handler) {
|
||||
const ret = handler(progress);
|
||||
if (ret !== undefined) {
|
||||
if (typeof ret === 'number' && ret === ret) rate = Math.max(0, Math.min(0x7fffffff, Math.round(ret)));
|
||||
else throw new TypeError('Expected progress callback to return a number or undefined');
|
||||
}
|
||||
}
|
||||
setImmediate(step);
|
||||
} catch (err) {
|
||||
backup.close();
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
31
framework/node_modules/better-sqlite3/lib/methods/function.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict';
|
||||
const { getBooleanOption, cppdb } = require('../util');
|
||||
|
||||
module.exports = function defineFunction(name, options, fn) {
|
||||
// Apply defaults
|
||||
if (options == null) options = {};
|
||||
if (typeof options === 'function') { fn = options; options = {}; }
|
||||
|
||||
// Validate arguments
|
||||
if (typeof name !== 'string') throw new TypeError('Expected first argument to be a string');
|
||||
if (typeof fn !== 'function') throw new TypeError('Expected last argument to be a function');
|
||||
if (typeof options !== 'object') throw new TypeError('Expected second argument to be an options object');
|
||||
if (!name) throw new TypeError('User-defined function name cannot be an empty string');
|
||||
|
||||
// Interpret options
|
||||
const safeIntegers = 'safeIntegers' in options ? +getBooleanOption(options, 'safeIntegers') : 2;
|
||||
const deterministic = getBooleanOption(options, 'deterministic');
|
||||
const directOnly = getBooleanOption(options, 'directOnly');
|
||||
const varargs = getBooleanOption(options, 'varargs');
|
||||
let argCount = -1;
|
||||
|
||||
// Determine argument count
|
||||
if (!varargs) {
|
||||
argCount = fn.length;
|
||||
if (!Number.isInteger(argCount) || argCount < 0) throw new TypeError('Expected function.length to be a positive integer');
|
||||
if (argCount > 100) throw new RangeError('User-defined functions cannot have more than 100 arguments');
|
||||
}
|
||||
|
||||
this[cppdb].function(fn, name, argCount, safeIntegers, deterministic, directOnly);
|
||||
return this;
|
||||
};
|
||||
7
framework/node_modules/better-sqlite3/lib/methods/inspect.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
'use strict';
|
||||
const DatabaseInspection = function Database() {};
|
||||
|
||||
module.exports = function inspect(depth, opts) {
|
||||
return Object.assign(new DatabaseInspection(), this);
|
||||
};
|
||||
|
||||
12
framework/node_modules/better-sqlite3/lib/methods/pragma.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
'use strict';
|
||||
const { getBooleanOption, cppdb } = require('../util');
|
||||
|
||||
module.exports = function pragma(source, options) {
|
||||
if (options == null) options = {};
|
||||
if (typeof source !== 'string') throw new TypeError('Expected first argument to be a string');
|
||||
if (typeof options !== 'object') throw new TypeError('Expected second argument to be an options object');
|
||||
const simple = getBooleanOption(options, 'simple');
|
||||
|
||||
const stmt = this[cppdb].prepare(`PRAGMA ${source}`, this, true);
|
||||
return simple ? stmt.pluck().get() : stmt.all();
|
||||
};
|
||||
16
framework/node_modules/better-sqlite3/lib/methods/serialize.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict';
|
||||
const { cppdb } = require('../util');
|
||||
|
||||
module.exports = function serialize(options) {
|
||||
if (options == null) options = {};
|
||||
|
||||
// Validate arguments
|
||||
if (typeof options !== 'object') throw new TypeError('Expected first argument to be an options object');
|
||||
|
||||
// Interpret and validate options
|
||||
const attachedName = 'attached' in options ? options.attached : 'main';
|
||||
if (typeof attachedName !== 'string') throw new TypeError('Expected the "attached" option to be a string');
|
||||
if (!attachedName) throw new TypeError('The "attached" option cannot be an empty string');
|
||||
|
||||
return this[cppdb].serialize(attachedName);
|
||||
};
|
||||
189
framework/node_modules/better-sqlite3/lib/methods/table.js
generated
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
'use strict';
|
||||
const { cppdb } = require('../util');
|
||||
|
||||
module.exports = function defineTable(name, factory) {
|
||||
// Validate arguments
|
||||
if (typeof name !== 'string') throw new TypeError('Expected first argument to be a string');
|
||||
if (!name) throw new TypeError('Virtual table module name cannot be an empty string');
|
||||
|
||||
// Determine whether the module is eponymous-only or not
|
||||
let eponymous = false;
|
||||
if (typeof factory === 'object' && factory !== null) {
|
||||
eponymous = true;
|
||||
factory = defer(parseTableDefinition(factory, 'used', name));
|
||||
} else {
|
||||
if (typeof factory !== 'function') throw new TypeError('Expected second argument to be a function or a table definition object');
|
||||
factory = wrapFactory(factory);
|
||||
}
|
||||
|
||||
this[cppdb].table(factory, name, eponymous);
|
||||
return this;
|
||||
};
|
||||
|
||||
function wrapFactory(factory) {
|
||||
return function virtualTableFactory(moduleName, databaseName, tableName, ...args) {
|
||||
const thisObject = {
|
||||
module: moduleName,
|
||||
database: databaseName,
|
||||
table: tableName,
|
||||
};
|
||||
|
||||
// Generate a new table definition by invoking the factory
|
||||
const def = apply.call(factory, thisObject, args);
|
||||
if (typeof def !== 'object' || def === null) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" did not return a table definition object`);
|
||||
}
|
||||
|
||||
return parseTableDefinition(def, 'returned', moduleName);
|
||||
};
|
||||
}
|
||||
|
||||
function parseTableDefinition(def, verb, moduleName) {
|
||||
// Validate required properties
|
||||
if (!hasOwnProperty.call(def, 'rows')) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition without a "rows" property`);
|
||||
}
|
||||
if (!hasOwnProperty.call(def, 'columns')) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition without a "columns" property`);
|
||||
}
|
||||
|
||||
// Validate "rows" property
|
||||
const rows = def.rows;
|
||||
if (typeof rows !== 'function' || Object.getPrototypeOf(rows) !== GeneratorFunctionPrototype) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with an invalid "rows" property (should be a generator function)`);
|
||||
}
|
||||
|
||||
// Validate "columns" property
|
||||
let columns = def.columns;
|
||||
if (!Array.isArray(columns) || !(columns = [...columns]).every(x => typeof x === 'string')) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with an invalid "columns" property (should be an array of strings)`);
|
||||
}
|
||||
if (columns.length !== new Set(columns).size) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with duplicate column names`);
|
||||
}
|
||||
if (!columns.length) {
|
||||
throw new RangeError(`Virtual table module "${moduleName}" ${verb} a table definition with zero columns`);
|
||||
}
|
||||
|
||||
// Validate "parameters" property
|
||||
let parameters;
|
||||
if (hasOwnProperty.call(def, 'parameters')) {
|
||||
parameters = def.parameters;
|
||||
if (!Array.isArray(parameters) || !(parameters = [...parameters]).every(x => typeof x === 'string')) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with an invalid "parameters" property (should be an array of strings)`);
|
||||
}
|
||||
} else {
|
||||
parameters = inferParameters(rows);
|
||||
}
|
||||
if (parameters.length !== new Set(parameters).size) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with duplicate parameter names`);
|
||||
}
|
||||
if (parameters.length > 32) {
|
||||
throw new RangeError(`Virtual table module "${moduleName}" ${verb} a table definition with more than the maximum number of 32 parameters`);
|
||||
}
|
||||
for (const parameter of parameters) {
|
||||
if (columns.includes(parameter)) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with column "${parameter}" which was ambiguously defined as both a column and parameter`);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate "safeIntegers" option
|
||||
let safeIntegers = 2;
|
||||
if (hasOwnProperty.call(def, 'safeIntegers')) {
|
||||
const bool = def.safeIntegers;
|
||||
if (typeof bool !== 'boolean') {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with an invalid "safeIntegers" property (should be a boolean)`);
|
||||
}
|
||||
safeIntegers = +bool;
|
||||
}
|
||||
|
||||
// Validate "directOnly" option
|
||||
let directOnly = false;
|
||||
if (hasOwnProperty.call(def, 'directOnly')) {
|
||||
directOnly = def.directOnly;
|
||||
if (typeof directOnly !== 'boolean') {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" ${verb} a table definition with an invalid "directOnly" property (should be a boolean)`);
|
||||
}
|
||||
}
|
||||
|
||||
// Generate SQL for the virtual table definition
|
||||
const columnDefinitions = [
|
||||
...parameters.map(identifier).map(str => `${str} HIDDEN`),
|
||||
...columns.map(identifier),
|
||||
];
|
||||
return [
|
||||
`CREATE TABLE x(${columnDefinitions.join(', ')});`,
|
||||
wrapGenerator(rows, new Map(columns.map((x, i) => [x, parameters.length + i])), moduleName),
|
||||
parameters,
|
||||
safeIntegers,
|
||||
directOnly,
|
||||
];
|
||||
}
|
||||
|
||||
function wrapGenerator(generator, columnMap, moduleName) {
|
||||
return function* virtualTable(...args) {
|
||||
/*
|
||||
We must defensively clone any buffers in the arguments, because
|
||||
otherwise the generator could mutate one of them, which would cause
|
||||
us to return incorrect values for hidden columns, potentially
|
||||
corrupting the database.
|
||||
*/
|
||||
const output = args.map(x => Buffer.isBuffer(x) ? Buffer.from(x) : x);
|
||||
for (let i = 0; i < columnMap.size; ++i) {
|
||||
output.push(null); // Fill with nulls to prevent gaps in array (v8 optimization)
|
||||
}
|
||||
for (const row of generator(...args)) {
|
||||
if (Array.isArray(row)) {
|
||||
extractRowArray(row, output, columnMap.size, moduleName);
|
||||
yield output;
|
||||
} else if (typeof row === 'object' && row !== null) {
|
||||
extractRowObject(row, output, columnMap, moduleName);
|
||||
yield output;
|
||||
} else {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" yielded something that isn't a valid row object`);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function extractRowArray(row, output, columnCount, moduleName) {
|
||||
if (row.length !== columnCount) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" yielded a row with an incorrect number of columns`);
|
||||
}
|
||||
const offset = output.length - columnCount;
|
||||
for (let i = 0; i < columnCount; ++i) {
|
||||
output[i + offset] = row[i];
|
||||
}
|
||||
}
|
||||
|
||||
function extractRowObject(row, output, columnMap, moduleName) {
|
||||
let count = 0;
|
||||
for (const key of Object.keys(row)) {
|
||||
const index = columnMap.get(key);
|
||||
if (index === undefined) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" yielded a row with an undeclared column "${key}"`);
|
||||
}
|
||||
output[index] = row[key];
|
||||
count += 1;
|
||||
}
|
||||
if (count !== columnMap.size) {
|
||||
throw new TypeError(`Virtual table module "${moduleName}" yielded a row with missing columns`);
|
||||
}
|
||||
}
|
||||
|
||||
function inferParameters({ length }) {
|
||||
if (!Number.isInteger(length) || length < 0) {
|
||||
throw new TypeError('Expected function.length to be a positive integer');
|
||||
}
|
||||
const params = [];
|
||||
for (let i = 0; i < length; ++i) {
|
||||
params.push(`$${i + 1}`);
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
||||
const { hasOwnProperty } = Object.prototype;
|
||||
const { apply } = Function.prototype;
|
||||
const GeneratorFunctionPrototype = Object.getPrototypeOf(function*(){});
|
||||
const identifier = str => `"${str.replace(/"/g, '""')}"`;
|
||||
const defer = x => () => x;
|
||||
75
framework/node_modules/better-sqlite3/lib/methods/transaction.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
'use strict';
|
||||
const { cppdb } = require('../util');
|
||||
const controllers = new WeakMap();
|
||||
|
||||
module.exports = function transaction(fn) {
|
||||
if (typeof fn !== 'function') throw new TypeError('Expected first argument to be a function');
|
||||
|
||||
const db = this[cppdb];
|
||||
const controller = getController(db, this);
|
||||
const { apply } = Function.prototype;
|
||||
|
||||
// Each version of the transaction function has these same properties
|
||||
const properties = {
|
||||
default: { value: wrapTransaction(apply, fn, db, controller.default) },
|
||||
deferred: { value: wrapTransaction(apply, fn, db, controller.deferred) },
|
||||
immediate: { value: wrapTransaction(apply, fn, db, controller.immediate) },
|
||||
exclusive: { value: wrapTransaction(apply, fn, db, controller.exclusive) },
|
||||
database: { value: this, enumerable: true },
|
||||
};
|
||||
|
||||
Object.defineProperties(properties.default.value, properties);
|
||||
Object.defineProperties(properties.deferred.value, properties);
|
||||
Object.defineProperties(properties.immediate.value, properties);
|
||||
Object.defineProperties(properties.exclusive.value, properties);
|
||||
|
||||
// Return the default version of the transaction function
|
||||
return properties.default.value;
|
||||
};
|
||||
|
||||
// Return the database's cached transaction controller, or create a new one
|
||||
const getController = (db, self) => {
|
||||
let controller = controllers.get(db);
|
||||
if (!controller) {
|
||||
const shared = {
|
||||
commit: db.prepare('COMMIT', self, false),
|
||||
rollback: db.prepare('ROLLBACK', self, false),
|
||||
savepoint: db.prepare('SAVEPOINT `\t_bs3.\t`', self, false),
|
||||
release: db.prepare('RELEASE `\t_bs3.\t`', self, false),
|
||||
rollbackTo: db.prepare('ROLLBACK TO `\t_bs3.\t`', self, false),
|
||||
};
|
||||
controllers.set(db, controller = {
|
||||
default: Object.assign({ begin: db.prepare('BEGIN', self, false) }, shared),
|
||||
deferred: Object.assign({ begin: db.prepare('BEGIN DEFERRED', self, false) }, shared),
|
||||
immediate: Object.assign({ begin: db.prepare('BEGIN IMMEDIATE', self, false) }, shared),
|
||||
exclusive: Object.assign({ begin: db.prepare('BEGIN EXCLUSIVE', self, false) }, shared),
|
||||
});
|
||||
}
|
||||
return controller;
|
||||
};
|
||||
|
||||
// Return a new transaction function by wrapping the given function
|
||||
const wrapTransaction = (apply, fn, db, { begin, commit, rollback, savepoint, release, rollbackTo }) => function sqliteTransaction() {
|
||||
let before, after, undo;
|
||||
if (db.inTransaction) {
|
||||
before = savepoint;
|
||||
after = release;
|
||||
undo = rollbackTo;
|
||||
} else {
|
||||
before = begin;
|
||||
after = commit;
|
||||
undo = rollback;
|
||||
}
|
||||
before.run();
|
||||
try {
|
||||
const result = apply.call(fn, this, arguments);
|
||||
after.run();
|
||||
return result;
|
||||
} catch (ex) {
|
||||
if (db.inTransaction) {
|
||||
undo.run();
|
||||
if (undo !== rollback) after.run();
|
||||
}
|
||||
throw ex;
|
||||
}
|
||||
};
|
||||
54
framework/node_modules/better-sqlite3/lib/methods/wrappers.js
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
'use strict';
|
||||
const { cppdb } = require('../util');
|
||||
|
||||
exports.prepare = function prepare(sql) {
|
||||
return this[cppdb].prepare(sql, this, false);
|
||||
};
|
||||
|
||||
exports.exec = function exec(sql) {
|
||||
this[cppdb].exec(sql);
|
||||
return this;
|
||||
};
|
||||
|
||||
exports.close = function close() {
|
||||
this[cppdb].close();
|
||||
return this;
|
||||
};
|
||||
|
||||
exports.loadExtension = function loadExtension(...args) {
|
||||
this[cppdb].loadExtension(...args);
|
||||
return this;
|
||||
};
|
||||
|
||||
exports.defaultSafeIntegers = function defaultSafeIntegers(...args) {
|
||||
this[cppdb].defaultSafeIntegers(...args);
|
||||
return this;
|
||||
};
|
||||
|
||||
exports.unsafeMode = function unsafeMode(...args) {
|
||||
this[cppdb].unsafeMode(...args);
|
||||
return this;
|
||||
};
|
||||
|
||||
exports.getters = {
|
||||
name: {
|
||||
get: function name() { return this[cppdb].name; },
|
||||
enumerable: true,
|
||||
},
|
||||
open: {
|
||||
get: function open() { return this[cppdb].open; },
|
||||
enumerable: true,
|
||||
},
|
||||
inTransaction: {
|
||||
get: function inTransaction() { return this[cppdb].inTransaction; },
|
||||
enumerable: true,
|
||||
},
|
||||
readonly: {
|
||||
get: function readonly() { return this[cppdb].readonly; },
|
||||
enumerable: true,
|
||||
},
|
||||
memory: {
|
||||
get: function memory() { return this[cppdb].memory; },
|
||||
enumerable: true,
|
||||
},
|
||||
};
|
||||
20
framework/node_modules/better-sqlite3/lib/sqlite-error.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
'use strict';
|
||||
const descriptor = { value: 'SqliteError', writable: true, enumerable: false, configurable: true };
|
||||
|
||||
function SqliteError(message, code) {
|
||||
if (new.target !== SqliteError) {
|
||||
return new SqliteError(message, code);
|
||||
}
|
||||
if (typeof code !== 'string') {
|
||||
throw new TypeError('Expected second argument to be a string');
|
||||
}
|
||||
Error.call(this, message);
|
||||
descriptor.value = '' + message;
|
||||
Object.defineProperty(this, 'message', descriptor);
|
||||
Error.captureStackTrace(this, SqliteError);
|
||||
this.code = code;
|
||||
}
|
||||
Object.setPrototypeOf(SqliteError, Error);
|
||||
Object.setPrototypeOf(SqliteError.prototype, Error.prototype);
|
||||
Object.defineProperty(SqliteError.prototype, 'name', descriptor);
|
||||
module.exports = SqliteError;
|
||||
12
framework/node_modules/better-sqlite3/lib/util.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
exports.getBooleanOption = (options, key) => {
|
||||
let value = false;
|
||||
if (key in options && typeof (value = options[key]) !== 'boolean') {
|
||||
throw new TypeError(`Expected the "${key}" option to be a boolean`);
|
||||
}
|
||||
return value;
|
||||
};
|
||||
|
||||
exports.cppdb = Symbol();
|
||||
exports.inspect = Symbol.for('nodejs.util.inspect.custom');
|
||||
4
framework/node_modules/better-sqlite3/package.json
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"name":"better-sqlite3",
|
||||
"main": "lib/index.js"
|
||||
}
|
||||
5
framework/node_modules/better-sqlite3/unify
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
// binaries 108 = v18.17.0
|
||||
|
||||
// https://sourceforge.net/projects/better-sqlite3.mirror/files/v8.1.0/
|
||||
|
||||
// https://nodejs.org/en/download/releases
|
||||
BIN
framework/node_modules/esbuild/darwin_386
generated
vendored
Normal file
BIN
framework/node_modules/esbuild/darwin_amd64
generated
vendored
Normal file
BIN
framework/node_modules/esbuild/darwin_arm
generated
vendored
Normal file
135
framework/node_modules/esbuild/getBinaryName.js
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
|
||||
|
||||
import os from 'node:os';
|
||||
//var os = require('node:os');
|
||||
var platform = process.platform;
|
||||
|
||||
var version = process.versions.modules;
|
||||
|
||||
var architecture = process.arch;
|
||||
|
||||
var betterSqliteVersion = "8.1.0";
|
||||
|
||||
//console.log( platform );
|
||||
|
||||
//console.log( version );
|
||||
|
||||
//console.log( architecture );
|
||||
|
||||
|
||||
|
||||
var filename = "better-sqlite3-v"+ betterSqliteVersion +"-node-v" + version + "-" + platform + "-" + architecture + ".tar.gz";
|
||||
|
||||
var cpuCore = os.cpus();
|
||||
|
||||
let isApple = cpuCore[0].model.toLowerCase().includes("apple")
|
||||
|
||||
let isAmd = cpuCore[0].model.toLowerCase().includes("amd")
|
||||
|
||||
let isIntel = cpuCore[0].model.toLowerCase().includes("intel")
|
||||
|
||||
|
||||
|
||||
|
||||
if( isAmd ) {
|
||||
|
||||
switch( architecture ) {
|
||||
|
||||
case "x64":
|
||||
|
||||
var es_architecture = "amd64"
|
||||
|
||||
break
|
||||
|
||||
case "arm":
|
||||
|
||||
var es_architecture = "arm"
|
||||
|
||||
break
|
||||
|
||||
case "ia32":
|
||||
|
||||
var es_architecture = "386"
|
||||
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if( isIntel ) {
|
||||
|
||||
switch( architecture ) {
|
||||
|
||||
case "x64":
|
||||
|
||||
var es_architecture = "amd64"
|
||||
|
||||
break
|
||||
|
||||
case "arm":
|
||||
|
||||
var es_architecture = "arm"
|
||||
|
||||
break
|
||||
|
||||
case "ia32":
|
||||
|
||||
var es_architecture = "386"
|
||||
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if( isApple ) {
|
||||
|
||||
switch( architecture ) {
|
||||
|
||||
case "x64":
|
||||
|
||||
var es_architecture = "amd64"
|
||||
|
||||
break
|
||||
|
||||
case "arm":
|
||||
|
||||
var es_architecture = "arm"
|
||||
|
||||
break
|
||||
|
||||
case "ia32":
|
||||
|
||||
var es_architecture = "386"
|
||||
|
||||
break;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if( platform == "win32" ) {
|
||||
|
||||
var extensions = ".exe";
|
||||
|
||||
platform = "windows"
|
||||
|
||||
} else {
|
||||
|
||||
var extensions = "";
|
||||
|
||||
}
|
||||
|
||||
var path = "./framework/node_modules/esbuild/";
|
||||
|
||||
var binaryFilename = platform + "_" + es_architecture + extensions;
|
||||
|
||||
//var loadedModule = require("./linux_amd64");
|
||||
|
||||
export default path + binaryFilename;
|
||||
|
||||
//better-sqlite3-v8.1.0-node-v108-linux-x64.tar.gz
|
||||
|
||||
//better-sqlite3-v8.1.0-node-v108-linux-x64.tar.gz
|
||||
BIN
framework/node_modules/esbuild/linux_386
generated
vendored
Normal file
BIN
framework/node_modules/esbuild/linux_amd64
generated
vendored
Executable file
BIN
framework/node_modules/esbuild/linux_arm
generated
vendored
Normal file
8
framework/node_modules/esbuild/package.json
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"type": "module",
|
||||
"name": "Unify",
|
||||
"license-key": "",
|
||||
"dependencies": {
|
||||
"esbuild": "^0.17.8"
|
||||
}
|
||||
}
|
||||
BIN
framework/node_modules/esbuild/windows_386.exe
generated
vendored
Normal file
BIN
framework/node_modules/esbuild/windows_amd64.exe
generated
vendored
Normal file
BIN
framework/node_modules/esbuild/windows_arm.exe
generated
vendored
Normal file
142
framework/node_modules/fs-extra/index.js
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
|
||||
|
||||
import fs from "fs";
|
||||
|
||||
import path from "path";
|
||||
|
||||
import tools from "../../unify/tools.js";
|
||||
|
||||
|
||||
function copyRecursiveSync(src, dest) {
|
||||
var exists = fs.existsSync(src);
|
||||
var stats = exists && fs.statSync(src);
|
||||
var isDirectory = exists && stats.isDirectory();
|
||||
if (isDirectory) {
|
||||
fs.mkdirSync(dest);
|
||||
fs.readdirSync(src).forEach(function(childItemName) {
|
||||
|
||||
copyRecursiveSync(path.join(src, childItemName),
|
||||
path.join(dest, childItemName));
|
||||
|
||||
});
|
||||
} else {
|
||||
fs.copyFileSync(src, dest);
|
||||
}
|
||||
};
|
||||
|
||||
const deleteFolderRecursive = function (directoryPath) {
|
||||
if (fs.existsSync(directoryPath)) {
|
||||
fs.readdirSync(directoryPath).forEach((file, index) => {
|
||||
const curPath = path.join(directoryPath, file);
|
||||
if (fs.lstatSync(curPath).isDirectory()) {
|
||||
// recurse
|
||||
deleteFolderRecursive(curPath);
|
||||
} else {
|
||||
// delete file
|
||||
fs.unlinkSync(curPath);
|
||||
}
|
||||
});
|
||||
fs.rmdirSync(directoryPath);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
class fsExtra{
|
||||
|
||||
dirLower( parts, partsOriginal, level = 0 ) {
|
||||
|
||||
var absolutePath = path.resolve( parts.join("/") );
|
||||
|
||||
if ( !fs.existsSync( absolutePath ) ) {
|
||||
|
||||
parts.pop();
|
||||
|
||||
return this.dirLower( parts, partsOriginal, ++level );
|
||||
|
||||
} else {
|
||||
|
||||
return level;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async ensureDirSync( dir ) {
|
||||
|
||||
dir = tools.slash( dir );
|
||||
|
||||
var parts = dir.split("/")
|
||||
|
||||
var partsCopy = parts.slice();
|
||||
|
||||
var depth = this.dirLower( parts, parts )
|
||||
|
||||
if( depth == 0 ) {
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
for (var i = 0; i < depth; i++) {
|
||||
|
||||
var negative = depth - i - 1;
|
||||
|
||||
var pathToDir = partsCopy.slice( 0, partsCopy.length - negative );
|
||||
|
||||
fs.mkdirSync(pathToDir.join("/"));
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
copyFileSync( source, target ) {
|
||||
|
||||
var targetFile = target;
|
||||
|
||||
// If target is a directory, a new file with the same name will be created
|
||||
if ( fs.existsSync( target ) ) {
|
||||
if ( fs.lstatSync( target ).isDirectory() ) {
|
||||
targetFile = path.join( target, path.basename( source ) );
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(targetFile, fs.readFileSync(source));
|
||||
}
|
||||
|
||||
copySync( source, target ) {
|
||||
|
||||
var isDirectory = fs.lstatSync( source ).isDirectory();
|
||||
|
||||
if ( fs.existsSync( target ) ) {
|
||||
|
||||
deleteFolderRecursive( target );
|
||||
|
||||
}
|
||||
|
||||
if( isDirectory ){
|
||||
|
||||
copyRecursiveSync( source, target );
|
||||
|
||||
} else {
|
||||
|
||||
this.copyFileSync( source, target )
|
||||
|
||||
}
|
||||
|
||||
// console.log("isDirectory", isDirectory);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var a = new fsExtra();
|
||||
|
||||
fs.dirLower = a.dirLower;
|
||||
|
||||
fs.ensureDirSync = a.ensureDirSync;
|
||||
|
||||
|
||||
fs.copySync = a.copySync;
|
||||
|
||||
|
||||
|
||||
export default fs;
|
||||
5
framework/node_modules/fs-extra/package.json
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"type": "module",
|
||||
"name": "fs-extra",
|
||||
"main": "./index.js"
|
||||
}
|
||||
12
framework/node_modules/node-gpp/.gpp/sourceTest/test.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
LIGHT_WINDOWS
|
||||
|
||||
|
||||
|
||||
2
framework/node_modules/node-gpp/.gpp/sourceTest/test2.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
|
||||
Dit werkt ook
|
||||
0
framework/node_modules/node-gpp/api_example.data
generated
vendored
Normal file
5580
framework/node_modules/node-gpp/api_example.js
generated
vendored
Normal file
BIN
framework/node_modules/node-gpp/api_example.wasm
generated
vendored
Normal file
47
framework/node_modules/node-gpp/asset_dir/application.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
|
||||
|
||||
#define CLIENT
|
||||
|
||||
|
||||
|
||||
#define DARK
|
||||
|
||||
|
||||
|
||||
#define PC
|
||||
|
||||
|
||||
|
||||
#define WINDOWS
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import extender from '../../../../../../framework/unify/extender.js?disableCache=0974';
|
||||
|
||||
import leftSide from './leftSide/leftSide.js?disableCache=0974';
|
||||
|
||||
import rightSide from './rightSide/rightSide.js?disableCache=0974';
|
||||
|
||||
export default class application{
|
||||
|
||||
__className = "application";
|
||||
|
||||
__sourcePath = "news/application.js";
|
||||
|
||||
leftSide = new leftSide();
|
||||
|
||||
rightSide = new rightSide();
|
||||
|
||||
height = "100vh";
|
||||
|
||||
mode = "development"
|
||||
|
||||
os = "Windows";
|
||||
|
||||
device = "pc";
|
||||
|
||||
theme = "Dark";
|
||||
|
||||
}
|
||||
13
framework/node_modules/node-gpp/asset_dir/otherTest.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
#define A n /n
|
||||
|
||||
#define B
|
||||
|
||||
#ifdef A
|
||||
|
||||
#ifdef B
|
||||
|
||||
Mooi
|
||||
|
||||
#endif
|
||||
|
||||
#endif
|
||||
47
framework/node_modules/node-gpp/asset_dir/test.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
|
||||
|
||||
#define CLIENT
|
||||
|
||||
|
||||
|
||||
#define DARK
|
||||
|
||||
|
||||
|
||||
#define PC
|
||||
|
||||
|
||||
|
||||
#define WINDOWS
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import extender from '../../../../../../framework/unify/extender.js?disableCache=0974';
|
||||
|
||||
import leftSide from './leftSide/leftSide.js?disableCache=0974';
|
||||
|
||||
import rightSide from './rightSide/rightSide.js?disableCache=0974';
|
||||
|
||||
export default class application{
|
||||
|
||||
__className = "application";
|
||||
|
||||
__sourcePath = "news/application.js";
|
||||
|
||||
leftSide = new leftSide();
|
||||
|
||||
rightSide = new rightSide();
|
||||
|
||||
height = "100vh";
|
||||
|
||||
mode = "development"
|
||||
|
||||
os = "Windows";
|
||||
|
||||
device = "pc";
|
||||
|
||||
theme = "Dark";
|
||||
|
||||
}
|
||||
141
framework/node_modules/node-gpp/child.js
generated
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2020, 2023, The Unified Company.
|
||||
|
||||
This code is part of Unify.
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the ESA Software Community License - Strong Copyleft,
|
||||
|
||||
https://unifyjs.org
|
||||
|
||||
*/
|
||||
|
||||
|
||||
String.prototype.replaceAll = function replaceAll(search, replace) { return this.split(search).join(replace); }
|
||||
|
||||
|
||||
|
||||
var execPromises = new Array();
|
||||
|
||||
|
||||
if( process.platform == "android" ) {
|
||||
|
||||
exports.gpp = class gpp{
|
||||
|
||||
convert( a ) {
|
||||
|
||||
return a;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
var fs = require('fs');
|
||||
|
||||
var factory = require('./api_example.js');
|
||||
|
||||
var path = require('path');
|
||||
|
||||
const util = require('util');
|
||||
|
||||
const childProcess = require('child_process');
|
||||
|
||||
const { gpp } = require('./gpp.js');
|
||||
|
||||
var spawn = childProcess.spawn;
|
||||
|
||||
var batchSize = 70;
|
||||
|
||||
|
||||
var gppInstance = new gpp();
|
||||
|
||||
|
||||
|
||||
exports.gpp = class gppManager{
|
||||
|
||||
getNumberOfDots( numberOfBatches ) {
|
||||
|
||||
if( numberOfBatches < 100 ){
|
||||
|
||||
return numberOfBatches;
|
||||
|
||||
} else {
|
||||
|
||||
return 100;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
logConverter( coreNumber, inputFiles, numberOfBatches ) {
|
||||
|
||||
console.log( "Batches", numberOfBatches );
|
||||
|
||||
console.log( "Processing", inputFiles.length, "files" );
|
||||
|
||||
console.log( "Multicore support: processing batches with ", coreNumber, "cores." );
|
||||
|
||||
//console.log("[");
|
||||
|
||||
//process.stdout.moveCursor( numberOfDots + 2, -1);
|
||||
|
||||
//console.log("]");
|
||||
|
||||
console.log("");
|
||||
|
||||
}
|
||||
|
||||
async convert_files() {
|
||||
|
||||
var filesPath = path.resolve( "./framework/cache/platforms/files.json" );
|
||||
|
||||
var sourceRaw = await fs.readFileSync( filesPath, "utf8" )
|
||||
|
||||
var inputFiles = JSON.parse( sourceRaw );
|
||||
|
||||
var numberOfBatches = Math.floor( inputFiles.length / batchSize );
|
||||
|
||||
var totalFiles = batchSize * numberOfBatches + 1;
|
||||
|
||||
var os = require('os');
|
||||
|
||||
var readline = require('readline');
|
||||
|
||||
const coreNumber = os.cpus().length;
|
||||
|
||||
|
||||
var numberOfDots = this.getNumberOfDots( numberOfBatches );
|
||||
|
||||
|
||||
this.logConverter( coreNumber, inputFiles, numberOfBatches );
|
||||
|
||||
|
||||
if( process.stdout.moveCursor ) {
|
||||
|
||||
for (var i = 0; i < numberOfBatches + 1; i++) {
|
||||
|
||||
gppInstance.convert( inputFiles, i );
|
||||
|
||||
process.stdout.moveCursor( Math.floor( (i * numberOfDots) / numberOfBatches ) , -1 );
|
||||
|
||||
console.log("·");
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
await Promise.all( gppInstance.execPromises );
|
||||
|
||||
console.log("Done.");
|
||||
|
||||
return true;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
1121
framework/node_modules/node-gpp/file_packager.py
generated
vendored
Normal file
3491
framework/node_modules/node-gpp/gpp.c
generated
vendored
Normal file
86
framework/node_modules/node-gpp/gpp.js
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2020, 2023, The Unified Company.
|
||||
|
||||
This code is part of Unify.
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the ESA Software Community License - Strong Copyleft,
|
||||
|
||||
https://unifyjs.org
|
||||
|
||||
*/
|
||||
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
const util = require('util');
|
||||
|
||||
const exec = util.promisify( require('child_process').exec );
|
||||
|
||||
|
||||
exports.gpp = class gpp{
|
||||
|
||||
batchSize = 70;
|
||||
|
||||
execPromises = new Array();
|
||||
|
||||
|
||||
|
||||
constructor() {
|
||||
|
||||
}
|
||||
|
||||
async initialise() {
|
||||
|
||||
this.instance = await new factory();
|
||||
|
||||
}
|
||||
|
||||
createBatchesDirectory() {
|
||||
|
||||
if( !fs.existsSync( "./framework/cache/platforms/batches/" ) ) {
|
||||
|
||||
fs.mkdirSync("./framework/cache/platforms/batches/");
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async convert( inputFiles, i ) {
|
||||
|
||||
this.createBatchesDirectory();
|
||||
|
||||
|
||||
var file = path.resolve( "./framework/cache/platforms/batches/" + i + ".json");
|
||||
|
||||
var n = i * this.batchSize;
|
||||
|
||||
var inputFilesSliced = inputFiles.slice( n, this.batchSize + n );
|
||||
|
||||
var inputFilesJoined = inputFilesSliced.join("\n");
|
||||
|
||||
|
||||
fs.writeFileSync( file, JSON.stringify( inputFilesJoined ) );
|
||||
|
||||
await this.spawnBatch( file, i );
|
||||
|
||||
return true;
|
||||
|
||||
}
|
||||
|
||||
async spawnBatch( file, i ) {
|
||||
|
||||
var cWrapper = path.resolve("./framework/node_modules/node-gpp/instanceChild.js");
|
||||
|
||||
var args = new Array( cWrapper, "file=" + file, "index=" + i );
|
||||
|
||||
var defaults = { file:file, env: process.env };
|
||||
|
||||
var execPromise = exec("node " + cWrapper + " file=" + file + " index=" + i );
|
||||
|
||||
this.execPromises.push(execPromise);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
1
framework/node_modules/node-gpp/how to compile
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
emcc -Wimplicit-function-declaration ./gpp.c -o api_example.js -sMODULARIZE -sEXPORTED_RUNTIME_METHODS=ccall,cwrap --preload-file asset_dir -sDISABLE_EXCEPTION_CATCHING=0 -sALLOW_MEMORY_GROWTH -fsanitize=undefined -lnodefs.js
|
||||
35
framework/node_modules/node-gpp/how to install
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
|
||||
# Fetch the latest version of the emsdk (not needed the first time you clone)
|
||||
git pull
|
||||
|
||||
# Download and install the latest SDK tools.
|
||||
./emsdk install latest
|
||||
|
||||
# Make the "latest" SDK "active" for the current user. (writes .emscripten file)
|
||||
./emsdk activate latest
|
||||
|
||||
# Activate PATH and other environment variables in the current terminal
|
||||
source ./emsdk_env.sh
|
||||
|
||||
|
||||
|
||||
|
||||
./emsdk install 1.38.45
|
||||
|
||||
|
||||
|
||||
# Fetch the latest registry of available tools.
|
||||
./emsdk update
|
||||
|
||||
# Download and install the latest SDK tools.
|
||||
./emsdk install latest
|
||||
|
||||
# Set up the compiler configuration to point to the "latest" SDK.
|
||||
./emsdk activate latest
|
||||
|
||||
# Activate PATH and other environment variables in the current terminal
|
||||
source ./emsdk_env.sh
|
||||
|
||||
To build
|
||||
|
||||
emcc -Wimplicit-function-declaration ./gpp.c -o api_example.js -sMODULARIZE -sEXPORTED_RUNTIME_METHODS=ccall,cwrap --preload-file asset_dir
|
||||
102
framework/node_modules/node-gpp/instanceChild.js
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
/*
|
||||
|
||||
Copyright (c) 2020, 2023, The Unified Company.
|
||||
|
||||
This code is part of Unify.
|
||||
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the ESA Software Community License - Strong Copyleft,
|
||||
|
||||
https://unifyjs.org
|
||||
|
||||
*/
|
||||
|
||||
|
||||
var fs = require('fs');
|
||||
|
||||
var factory = require('./api_example.js');
|
||||
|
||||
//var parser = require('args-parser');
|
||||
|
||||
var path = require('path');
|
||||
|
||||
const util = require('util');
|
||||
|
||||
const fse = require('fs');
|
||||
|
||||
const fast = util.promisify(factory);
|
||||
|
||||
|
||||
var commandLineArguments = process.argv;
|
||||
|
||||
for (var i = 0; i < commandLineArguments.length; i++) {
|
||||
|
||||
var row = commandLineArguments[i]
|
||||
|
||||
if(row.includes("file")) {
|
||||
|
||||
var file = row.split("=")[1];
|
||||
|
||||
}
|
||||
|
||||
if(row.includes("index")) {
|
||||
|
||||
var index = row.split("=")[1];
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
var filePath = file;
|
||||
|
||||
var source = fs.readFileSync(filePath, "utf8")
|
||||
|
||||
var inputFiles = JSON.parse(source);
|
||||
|
||||
var i = index;
|
||||
|
||||
var batchSize = 70;
|
||||
|
||||
|
||||
class gppWrapper{
|
||||
|
||||
constructor() {
|
||||
|
||||
}
|
||||
|
||||
async initialise() {
|
||||
|
||||
this.instance = await new factory();
|
||||
|
||||
}
|
||||
|
||||
async convert( inputFiles, i ) {
|
||||
|
||||
if( !this.instance ) {
|
||||
|
||||
await this.initialise();
|
||||
|
||||
}
|
||||
|
||||
let init_esm = this.instance.cwrap( 'init_esm', null, null );
|
||||
|
||||
let updateCodeFunction = this.instance.cwrap( 'updateCode', null, [ 'int','string', 'string', 'int' ]);
|
||||
|
||||
init_esm();
|
||||
|
||||
var n = i * batchSize;
|
||||
|
||||
updateCodeFunction( inputFiles.length, inputFiles );
|
||||
|
||||
console.log( "Processed", batchSize, "files" );
|
||||
|
||||
return true;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var gppInstance = new gppWrapper();
|
||||
|
||||
gppInstance.convert( inputFiles, i )
|
||||
18
framework/node_modules/node-gpp/package.json
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
|
||||
"name": "node-gpp",
|
||||
|
||||
"version": 0.1,
|
||||
|
||||
"dependencies": {
|
||||
|
||||
"args-parser": "^1.3.0",
|
||||
|
||||
"get-cursor-position": "^2.0.0",
|
||||
|
||||
"wasi": "^0.0.6"
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
13
framework/node_modules/node-rate-limiter-flexible/.editorconfig
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_size = 2
|
||||
indent_style = space
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.md]
|
||||
insert_final_newline = false
|
||||
trim_trailing_whitespace = false
|
||||
28
framework/node_modules/node-rate-limiter-flexible/.eslintrc.json
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"plugins": [
|
||||
"node",
|
||||
"security"
|
||||
],
|
||||
"extends": [
|
||||
"plugin:node/recommended",
|
||||
"plugin:security/recommended",
|
||||
"eslint:recommended",
|
||||
"airbnb-base"
|
||||
],
|
||||
"env": {
|
||||
"node": true
|
||||
},
|
||||
"rules": {
|
||||
"no-underscore-dangle": "off",
|
||||
"no-param-reassign": "off",
|
||||
"no-plusplus": "off",
|
||||
"radix": ["error", "as-needed"],
|
||||
"consistent-return": "off",
|
||||
"class-methods-use-this": "off",
|
||||
"max-len": ["error", { "code": 140 }],
|
||||
"node/no-unpublished-require": ["error", {
|
||||
"allowModules": ["mocha", "chai", "redis-mock"]
|
||||
}],
|
||||
"node/no-unsupported-features": "off"
|
||||
}
|
||||
}
|
||||
1
framework/node_modules/node-rate-limiter-flexible/.github/FUNDING.yml
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ko_fi: animir
|
||||
4
framework/node_modules/node-rate-limiter-flexible/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
.idea
|
||||
node_modules
|
||||
coverage
|
||||
package-lock.json
|
||||
9
framework/node_modules/node-rate-limiter-flexible/.npmignore
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
.idea
|
||||
.gitignore
|
||||
.npmignore
|
||||
.travis.yml
|
||||
.eslintrc.json
|
||||
test
|
||||
coverage
|
||||
img
|
||||
.github
|
||||
10
framework/node_modules/node-rate-limiter-flexible/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
language: node_js
|
||||
node_js:
|
||||
- "8"
|
||||
- "10"
|
||||
- "12"
|
||||
- "14"
|
||||
script:
|
||||
- npm run eslint
|
||||
- npm run test
|
||||
after_success: 'npm run coveralls'
|
||||
7
framework/node_modules/node-rate-limiter-flexible/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
## ISC License (ISC)
|
||||
|
||||
Copyright 2019 Roman Voloboev
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
239
framework/node_modules/node-rate-limiter-flexible/README.md
generated
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
[](https://coveralls.io/r/animir/node-rate-limiter-flexible?branch=master)
|
||||
[](https://www.npmjs.com/package/rate-limiter-flexible)
|
||||

|
||||
[![node version][node-image]][node-url]
|
||||
[](https://github.com/denoland/deno)
|
||||
|
||||
[node-image]: https://img.shields.io/badge/node.js-%3E=_6.0-green.svg?style=flat-square
|
||||
[node-url]: http://nodejs.org/download/
|
||||
|
||||
<img src="img/rlflx-logo-small.png" width="50" alt="Logo"/>
|
||||
|
||||
## node-rate-limiter-flexible
|
||||
|
||||
**rate-limiter-flexible** counts and limits number of actions by key and protects from DDoS and brute force attacks at any scale.
|
||||
|
||||
It works with _Redis_, process _Memory_, _Cluster_ or _PM2_, _Memcached_, _MongoDB_, _MySQL_, _PostgreSQL_ and allows to control requests rate in single process or distributed environment.
|
||||
|
||||
Memory limiter also works in browser.
|
||||
|
||||
**Atomic increments.** All operations in memory or distributed environment use atomic increments against race conditions.
|
||||
|
||||
Allow **traffic bursts** with [BurstyRateLimiter](https://github.com/animir/node-rate-limiter-flexible/wiki/BurstyRateLimiter).
|
||||
|
||||
**Fast.** Average request takes `0.7ms` in Cluster and `2.5ms` in Distributed application. See [benchmarks](https://github.com/animir/node-rate-limiter-flexible#benchmark).
|
||||
|
||||
**Flexible.** Combine limiters, block key for some duration, delay actions, manage failover with insurance options, configure smart key blocking in memory and many others.
|
||||
|
||||
**Ready for growth.** It provides unified API for all limiters. Whenever your application grows, it is ready. Prepare your limiters in minutes.
|
||||
|
||||
**Friendly.** No matter which node package you prefer: `redis` or `ioredis`, `sequelize`/`typeorm` or `knex`, `memcached`, native driver or `mongoose`. It works with all of them.
|
||||
|
||||
**In memory blocks.** Avoid extra requests to store with [inMemoryBlockOnConsumed](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#inmemoryblockonconsumed).
|
||||
|
||||
**Deno compatible** See [this example](https://gist.github.com/animir/d06ca92931677f330d3f2d4c6c3108e4)
|
||||
|
||||
It uses **fixed window** as it is much faster than rolling window.
|
||||
[See comparative benchmarks with other libraries here](https://github.com/animir/node-rate-limiter-flexible/wiki/Comparative-benchmarks)
|
||||
|
||||
## Installation
|
||||
|
||||
`npm i --save rate-limiter-flexible`
|
||||
|
||||
`yarn add rate-limiter-flexible`
|
||||
|
||||
## Basic Example
|
||||
|
||||
Points can be consumed by IP address, user ID, authorisation token, API route or any other string.
|
||||
|
||||
```javascript
|
||||
const opts = {
|
||||
points: 6, // 6 points
|
||||
duration: 1, // Per second
|
||||
};
|
||||
|
||||
const rateLimiter = new RateLimiterMemory(opts);
|
||||
|
||||
rateLimiter.consume(remoteAddress, 2) // consume 2 points
|
||||
.then((rateLimiterRes) => {
|
||||
// 2 points consumed
|
||||
})
|
||||
.catch((rateLimiterRes) => {
|
||||
// Not enough points to consume
|
||||
});
|
||||
```
|
||||
|
||||
#### RateLimiterRes object
|
||||
|
||||
Both Promise resolve and reject return object of `RateLimiterRes` class if there is no any error.
|
||||
Object attributes:
|
||||
```javascript
|
||||
RateLimiterRes = {
|
||||
msBeforeNext: 250, // Number of milliseconds before next action can be done
|
||||
remainingPoints: 0, // Number of remaining points in current duration
|
||||
consumedPoints: 5, // Number of consumed points in current duration
|
||||
isFirstInDuration: false, // action is first in current duration
|
||||
}
|
||||
```
|
||||
|
||||
You may want to set next HTTP headers to response:
|
||||
```javascript
|
||||
const headers = {
|
||||
"Retry-After": rateLimiterRes.msBeforeNext / 1000,
|
||||
"X-RateLimit-Limit": opts.points,
|
||||
"X-RateLimit-Remaining": rateLimiterRes.remainingPoints,
|
||||
"X-RateLimit-Reset": new Date(Date.now() + rateLimiterRes.msBeforeNext)
|
||||
}
|
||||
```
|
||||
|
||||
### Advantages:
|
||||
* no race conditions
|
||||
* no production dependencies
|
||||
* TypeScript declaration bundled
|
||||
* allow traffic burst with [BurstyRateLimiter](https://github.com/animir/node-rate-limiter-flexible/wiki/BurstyRateLimiter)
|
||||
* Block Strategy against really powerful DDoS attacks (like 100k requests per sec) [Read about it and benchmarking here](https://github.com/animir/node-rate-limiter-flexible/wiki/In-memory-Block-Strategy)
|
||||
* Insurance Strategy as emergency solution if database / store is down [Read about Insurance Strategy here](https://github.com/animir/node-rate-limiter-flexible/wiki/Insurance-Strategy)
|
||||
* works in Cluster or PM2 without additional software [See RateLimiterCluster benchmark and detailed description here](https://github.com/animir/node-rate-limiter-flexible/wiki/Cluster)
|
||||
* useful `get`, `set`, `block`, `delete`, `penalty` and `reward` methods
|
||||
|
||||
### Middlewares, plugins and other packages
|
||||
* [Express middleware](https://github.com/animir/node-rate-limiter-flexible/wiki/Express-Middleware)
|
||||
* [Koa middleware](https://github.com/animir/node-rate-limiter-flexible/wiki/Koa-Middleware)
|
||||
* [Hapi plugin](https://github.com/animir/node-rate-limiter-flexible/wiki/Hapi-plugin)
|
||||
* GraphQL [graphql-rate-limit-directive](https://www.npmjs.com/package/graphql-rate-limit-directive)
|
||||
* NestJS try [nestjs-rate-limiter](https://www.npmjs.com/package/nestjs-rate-limiter)
|
||||
* Fastify based NestJS app try [nestjs-fastify-rate-limiter](https://www.npmjs.com/package/nestjs-fastify-rate-limiter)
|
||||
|
||||
Some copy/paste examples on Wiki:
|
||||
* [Minimal protection against password brute-force](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#minimal-protection-against-password-brute-force)
|
||||
* [Login endpoint protection](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#login-endpoint-protection)
|
||||
* [Websocket connection prevent flooding](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#websocket-single-connection-prevent-flooding)
|
||||
* [Dynamic block duration](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#dynamic-block-duration)
|
||||
* [Authorized users specific limits](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#authorized-and-not-authorized-users)
|
||||
* [Different limits for different parts of application](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#different-limits-for-different-parts-of-application)
|
||||
* [Apply Block Strategy](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#apply-in-memory-block-strategy-to-avoid-extra-requests-to-store)
|
||||
* [Setup Insurance Strategy](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#setup-insurance-strategy-for-store-limiters)
|
||||
* [Third-party API, crawler, bot rate limiting](https://github.com/animir/node-rate-limiter-flexible/wiki/Overall-example#third-party-api-crawler-bot-rate-limiting)
|
||||
|
||||
### Migration from other packages
|
||||
* [express-brute](https://github.com/animir/node-rate-limiter-flexible/wiki/ExpressBrute-migration) Bonus: race conditions fixed, prod deps removed
|
||||
* [limiter](https://github.com/animir/node-rate-limiter-flexible/wiki/RateLimiterQueue#migration-from-limiter) Bonus: multi-server support, respects queue order, native promises
|
||||
|
||||
### Docs and Examples
|
||||
|
||||
* [Options](https://github.com/animir/node-rate-limiter-flexible/wiki/Options)
|
||||
* [API methods](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods)
|
||||
* [BurstyRateLimiter](https://github.com/animir/node-rate-limiter-flexible/wiki/BurstyRateLimiter) Traffic burst support
|
||||
* [RateLimiterRedis](https://github.com/animir/node-rate-limiter-flexible/wiki/Redis)
|
||||
* [RateLimiterMemcache](https://github.com/animir/node-rate-limiter-flexible/wiki/Memcache)
|
||||
* [RateLimiterMongo](https://github.com/animir/node-rate-limiter-flexible/wiki/Mongo) (with [sharding support](https://github.com/animir/node-rate-limiter-flexible/wiki/Mongo#mongodb-sharding-options))
|
||||
* [RateLimiterMySQL](https://github.com/animir/node-rate-limiter-flexible/wiki/MySQL) (support Sequelize and Knex)
|
||||
* [RateLimiterPostgres](https://github.com/animir/node-rate-limiter-flexible/wiki/PostgreSQL) (support Sequelize, TypeORM and Knex)
|
||||
* [RateLimiterCluster](https://github.com/animir/node-rate-limiter-flexible/wiki/Cluster) ([PM2 cluster docs read here](https://github.com/animir/node-rate-limiter-flexible/wiki/PM2-cluster))
|
||||
* [RateLimiterMemory](https://github.com/animir/node-rate-limiter-flexible/wiki/Memory)
|
||||
* [RateLimiterUnion](https://github.com/animir/node-rate-limiter-flexible/wiki/RateLimiterUnion) Combine 2 or more limiters to act as single
|
||||
* [RLWrapperBlackAndWhite](https://github.com/animir/node-rate-limiter-flexible/wiki/Black-and-White-lists) Black and White lists
|
||||
* [RateLimiterQueue](https://github.com/animir/node-rate-limiter-flexible/wiki/RateLimiterQueue) Rate limiter with FIFO queue
|
||||
|
||||
### Changelog
|
||||
|
||||
See [releases](https://github.com/animir/node-rate-limiter-flexible/releases) for detailed changelog.
|
||||
|
||||
## Basic Options
|
||||
|
||||
* **points**
|
||||
|
||||
`Default: 4`
|
||||
|
||||
Maximum number of points can be consumed over duration
|
||||
|
||||
* **duration**
|
||||
|
||||
`Default: 1`
|
||||
|
||||
Number of seconds before consumed points are reset.
|
||||
|
||||
Never reset points, if `duration` is set to 0.
|
||||
|
||||
* **storeClient**
|
||||
|
||||
`Required for store limiters`
|
||||
|
||||
Have to be `redis`, `ioredis`, `memcached`, `mongodb`, `pg`, `mysql2`, `mysql` or any other related pool or connection.
|
||||
|
||||
### Other options on Wiki:
|
||||
* [keyPrefix](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#keyprefix) Make keys unique among different limiters.
|
||||
* [blockDuration](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#blockduration) Block for N seconds, if consumed more than points.
|
||||
* [inMemoryBlockOnConsumed](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#inmemoryblockonconsumed) Avoid extra requests to store.
|
||||
* [inMemoryBlockDuration](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#inmemoryblockduration)
|
||||
* [insuranceLimiter](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#insurancelimiter) Make it more stable with less efforts.
|
||||
* [storeType](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#storetype) Have to be set to `knex`, if you use it.
|
||||
* [dbName](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#dbname) Where to store points.
|
||||
* [tableName](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#tablename) Table/collection.
|
||||
* [tableCreated](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#tablecreated) Is table already created in MySQL or PostgreSQL.
|
||||
* [clearExpiredByTimeout](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#clearexpiredbytimeout) For MySQL and PostgreSQL.
|
||||
|
||||
Smooth out traffic picks:
|
||||
* [execEvenly](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#execevenly)
|
||||
* [execEvenlyMinDelayMs](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#execevenlymindelayms)
|
||||
|
||||
Specific:
|
||||
* [indexKeyPrefix](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#indexkeyprefix) Combined indexes of MongoDB.
|
||||
* [timeoutMs](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#timeoutms) For Cluster.
|
||||
* [rejectIfRedisNotReady](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#rejectifredisnotready)
|
||||
|
||||
## API
|
||||
|
||||
Read detailed description on Wiki.
|
||||
|
||||
* [consume(key, points = 1)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterconsumekey-points--1) Consume points by key.
|
||||
* [get(key)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimitergetkey) Get `RateLimiterRes` or `null`.
|
||||
* [set(key, points, secDuration)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimitersetkey-points-secduration) Set points by key.
|
||||
* [block(key, secDuration)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterblockkey-secduration) Block key for `secDuration` seconds.
|
||||
* [delete(key)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterdeletekey) Reset consumed points.
|
||||
* [deleteInMemoryBlockedAll](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterdeleteinmemoryblockedall)
|
||||
* [penalty(key, points = 1)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterpenaltykey-points--1) Increase number of consumed points in current duration.
|
||||
* [reward(key, points = 1)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimiterrewardkey-points--1) Decrease number of consumed points in current duration.
|
||||
* [getKey(key)](https://github.com/animir/node-rate-limiter-flexible/wiki/API-methods#ratelimitergetkeykey) Get internal prefixed key.
|
||||
|
||||
## Benchmark
|
||||
|
||||
Average latency during test pure NodeJS endpoint in cluster of 4 workers with everything set up on one server.
|
||||
|
||||
1000 concurrent clients with maximum 2000 requests per sec during 30 seconds.
|
||||
|
||||
```text
|
||||
1. Memory 0.34 ms
|
||||
2. Cluster 0.69 ms
|
||||
3. Redis 2.45 ms
|
||||
4. Memcached 3.89 ms
|
||||
5. Mongo 4.75 ms
|
||||
```
|
||||
|
||||
500 concurrent clients with maximum 1000 req per sec during 30 seconds
|
||||
```text
|
||||
6. PostgreSQL 7.48 ms (with connection pool max 100)
|
||||
7. MySQL 14.59 ms (with connection pool 100)
|
||||
```
|
||||
|
||||
Note, you can speed up limiters with [inMemoryBlockOnConsumed](https://github.com/animir/node-rate-limiter-flexible/wiki/Options#inmemoryblockonconsumed) option.
|
||||
|
||||
## Contribution
|
||||
|
||||
Appreciated, feel free!
|
||||
|
||||
Make sure you've launched `npm run eslint` before creating PR, all errors have to be fixed.
|
||||
|
||||
You can try to run `npm run eslint-fix` to fix some issues.
|
||||
|
||||
Any new limiter with storage have to be extended from `RateLimiterStoreAbstract`.
|
||||
It has to implement 4 methods:
|
||||
* `_getRateLimiterRes` parses raw data from store to `RateLimiterRes` object.
|
||||
* `_upsert` must be atomic. it inserts or updates value by key and returns raw data. it must support `forceExpire` mode
|
||||
to overwrite key expiration time.
|
||||
* `_get` returns raw data by key or `null` if there is no key.
|
||||
* `_delete` deletes all key related data and returns `true` on deleted, `false` if key is not found.
|
||||
|
||||
All other methods depends on store. See `RateLimiterRedis` or `RateLimiterPostgres` for example.
|
||||
|
||||
Note: all changes should be covered by tests.
|
||||
BIN
framework/node_modules/node-rate-limiter-flexible/img/chart-exec-evenly-10r-end.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/chart-exec-evenly-10r-start.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 13 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/express-brute-example.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 153 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-cluster-master.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-cluster-worker.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-memcache.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-memory.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 51 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-mongo.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-mysql.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-postgres.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/heap-redis.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
framework/node_modules/node-rate-limiter-flexible/img/rlflx-logo-small.png
generated
vendored
Normal file
|
After Width: | Height: | Size: 9.1 KiB |
29
framework/node_modules/node-rate-limiter-flexible/index.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
const RateLimiterRedis = require('./lib/RateLimiterRedis');
|
||||
const RateLimiterMongo = require('./lib/RateLimiterMongo');
|
||||
const RateLimiterMySQL = require('./lib/RateLimiterMySQL');
|
||||
const RateLimiterPostgres = require('./lib/RateLimiterPostgres');
|
||||
const {RateLimiterClusterMaster, RateLimiterClusterMasterPM2, RateLimiterCluster} = require('./lib/RateLimiterCluster');
|
||||
const RateLimiterMemory = require('./lib/RateLimiterMemory');
|
||||
const RateLimiterMemcache = require('./lib/RateLimiterMemcache');
|
||||
const RLWrapperBlackAndWhite = require('./lib/RLWrapperBlackAndWhite');
|
||||
const RateLimiterUnion = require('./lib/RateLimiterUnion');
|
||||
const RateLimiterQueue = require('./lib/RateLimiterQueue');
|
||||
const BurstyRateLimiter = require('./lib/BurstyRateLimiter');
|
||||
const RateLimiterRes = require('./lib/RateLimiterRes');
|
||||
|
||||
module.exports = {
|
||||
RateLimiterRedis,
|
||||
RateLimiterMongo,
|
||||
RateLimiterMySQL,
|
||||
RateLimiterPostgres,
|
||||
RateLimiterMemory,
|
||||
RateLimiterMemcache,
|
||||
RateLimiterClusterMaster,
|
||||
RateLimiterClusterMasterPM2,
|
||||
RateLimiterCluster,
|
||||
RLWrapperBlackAndWhite,
|
||||
RateLimiterUnion,
|
||||
RateLimiterQueue,
|
||||
BurstyRateLimiter,
|
||||
RateLimiterRes,
|
||||
};
|
||||
74
framework/node_modules/node-rate-limiter-flexible/lib/BurstyRateLimiter.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
const RateLimiterRes = require("./RateLimiterRes");
|
||||
|
||||
/**
|
||||
* Bursty rate limiter exposes only msBeforeNext time and doesn't expose points from bursty limiter by default
|
||||
* @type {BurstyRateLimiter}
|
||||
*/
|
||||
module.exports = class BurstyRateLimiter {
|
||||
constructor(rateLimiter, burstLimiter) {
|
||||
this._rateLimiter = rateLimiter;
|
||||
this._burstLimiter = burstLimiter
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge rate limiter response objects. Responses can be null
|
||||
*
|
||||
* @param {RateLimiterRes} [rlRes] Rate limiter response
|
||||
* @param {RateLimiterRes} [blRes] Bursty limiter response
|
||||
*/
|
||||
_combineRes(rlRes, blRes) {
|
||||
return new RateLimiterRes(
|
||||
rlRes.remainingPoints,
|
||||
Math.min(rlRes.msBeforeNext, blRes.msBeforeNext),
|
||||
rlRes.consumedPoints,
|
||||
rlRes.isFirstInDuration
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* @param key
|
||||
* @param pointsToConsume
|
||||
* @param options
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
consume(key, pointsToConsume = 1, options = {}) {
|
||||
return this._rateLimiter.consume(key, pointsToConsume, options)
|
||||
.catch((rlRej) => {
|
||||
if (rlRej instanceof RateLimiterRes) {
|
||||
return this._burstLimiter.consume(key, pointsToConsume, options)
|
||||
.then((blRes) => {
|
||||
return Promise.resolve(this._combineRes(rlRej, blRes))
|
||||
})
|
||||
.catch((blRej) => {
|
||||
if (blRej instanceof RateLimiterRes) {
|
||||
return Promise.reject(this._combineRes(rlRej, blRej))
|
||||
} else {
|
||||
return Promise.reject(blRej)
|
||||
}
|
||||
}
|
||||
)
|
||||
} else {
|
||||
return Promise.reject(rlRej)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* It doesn't expose available points from burstLimiter
|
||||
*
|
||||
* @param key
|
||||
* @returns {Promise<RateLimiterRes>}
|
||||
*/
|
||||
get(key) {
|
||||
return Promise.all([
|
||||
this._rateLimiter.get(key),
|
||||
this._burstLimiter.get(key),
|
||||
]).then(([rlRes, blRes]) => {
|
||||
return this._combineRes(rlRes, blRes);
|
||||
});
|
||||
}
|
||||
|
||||
get points() {
|
||||
return this._rateLimiter.points;
|
||||
}
|
||||
};
|
||||
347
framework/node_modules/node-rate-limiter-flexible/lib/ExpressBruteFlexible.js
generated
vendored
Normal file
@@ -0,0 +1,347 @@
|
||||
const {
|
||||
LIMITER_TYPES,
|
||||
ERR_UNKNOWN_LIMITER_TYPE_MESSAGE,
|
||||
} = require('./constants');
|
||||
const crypto = require('crypto');
|
||||
const {
|
||||
RateLimiterMemory,
|
||||
RateLimiterCluster,
|
||||
RateLimiterMemcache,
|
||||
RateLimiterMongo,
|
||||
RateLimiterMySQL,
|
||||
RateLimiterPostgres,
|
||||
RateLimiterRedis,
|
||||
} = require('../index');
|
||||
|
||||
function getDelayMs(count, delays, maxWait) {
|
||||
let msDelay = maxWait;
|
||||
const delayIndex = count - 1;
|
||||
if (delayIndex >= 0 && delayIndex < delays.length) {
|
||||
msDelay = delays[delayIndex];
|
||||
}
|
||||
|
||||
return msDelay;
|
||||
}
|
||||
|
||||
const ExpressBruteFlexible = function (limiterType, options) {
|
||||
ExpressBruteFlexible.instanceCount++;
|
||||
this.name = `brute${ExpressBruteFlexible.instanceCount}`;
|
||||
|
||||
this.options = Object.assign({}, ExpressBruteFlexible.defaults, options);
|
||||
if (this.options.minWait < 1) {
|
||||
this.options.minWait = 1;
|
||||
}
|
||||
|
||||
const validLimiterTypes = Object.keys(ExpressBruteFlexible.LIMITER_TYPES).map(k => ExpressBruteFlexible.LIMITER_TYPES[k]);
|
||||
if (!validLimiterTypes.includes(limiterType)) {
|
||||
throw new Error(ERR_UNKNOWN_LIMITER_TYPE_MESSAGE);
|
||||
}
|
||||
this.limiterType = limiterType;
|
||||
|
||||
this.delays = [this.options.minWait];
|
||||
while (this.delays[this.delays.length - 1] < this.options.maxWait) {
|
||||
const nextNum = this.delays[this.delays.length - 1] + (this.delays.length > 1 ? this.delays[this.delays.length - 2] : 0);
|
||||
this.delays.push(nextNum);
|
||||
}
|
||||
this.delays[this.delays.length - 1] = this.options.maxWait;
|
||||
|
||||
// set default lifetime
|
||||
if (typeof this.options.lifetime === 'undefined') {
|
||||
this.options.lifetime = Math.ceil((this.options.maxWait / 1000) * (this.delays.length + this.options.freeRetries));
|
||||
}
|
||||
|
||||
this.prevent = this.getMiddleware({
|
||||
prefix: this.options.prefix,
|
||||
});
|
||||
};
|
||||
|
||||
ExpressBruteFlexible.prototype.getMiddleware = function (options) {
|
||||
const opts = Object.assign({}, options);
|
||||
const commonKeyPrefix = opts.prefix || '';
|
||||
const freeLimiterOptions = {
|
||||
storeClient: this.options.storeClient,
|
||||
storeType: this.options.storeType,
|
||||
keyPrefix: `${commonKeyPrefix}free`,
|
||||
dbName: this.options.dbName,
|
||||
tableName: this.options.tableName,
|
||||
points: this.options.freeRetries > 0 ? this.options.freeRetries - 1 : 0,
|
||||
duration: this.options.lifetime,
|
||||
};
|
||||
|
||||
const blockLimiterOptions = {
|
||||
storeClient: this.options.storeClient,
|
||||
storeType: this.options.storeType,
|
||||
keyPrefix: `${commonKeyPrefix}block`,
|
||||
dbName: this.options.dbName,
|
||||
tableName: this.options.tableName,
|
||||
points: 1,
|
||||
duration: Math.min(this.options.lifetime, Math.ceil((this.options.maxWait / 1000))),
|
||||
};
|
||||
|
||||
const counterLimiterOptions = {
|
||||
storeClient: this.options.storeClient,
|
||||
storeType: this.options.storeType,
|
||||
keyPrefix: `${commonKeyPrefix}counter`,
|
||||
dbName: this.options.dbName,
|
||||
tableName: this.options.tableName,
|
||||
points: 1,
|
||||
duration: this.options.lifetime,
|
||||
};
|
||||
|
||||
switch (this.limiterType) {
|
||||
case 'memory':
|
||||
this.freeLimiter = new RateLimiterMemory(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterMemory(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterMemory(counterLimiterOptions);
|
||||
break;
|
||||
case 'cluster':
|
||||
this.freeLimiter = new RateLimiterCluster(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterCluster(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterCluster(counterLimiterOptions);
|
||||
break;
|
||||
case 'memcache':
|
||||
this.freeLimiter = new RateLimiterMemcache(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterMemcache(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterMemcache(counterLimiterOptions);
|
||||
break;
|
||||
case 'mongo':
|
||||
this.freeLimiter = new RateLimiterMongo(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterMongo(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterMongo(counterLimiterOptions);
|
||||
break;
|
||||
case 'mysql':
|
||||
this.freeLimiter = new RateLimiterMySQL(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterMySQL(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterMySQL(counterLimiterOptions);
|
||||
break;
|
||||
case 'postgres':
|
||||
this.freeLimiter = new RateLimiterPostgres(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterPostgres(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterPostgres(counterLimiterOptions);
|
||||
break;
|
||||
case 'redis':
|
||||
this.freeLimiter = new RateLimiterRedis(freeLimiterOptions);
|
||||
this.blockLimiter = new RateLimiterRedis(blockLimiterOptions);
|
||||
this.counterLimiter = new RateLimiterRedis(counterLimiterOptions);
|
||||
break;
|
||||
default:
|
||||
throw new Error(ERR_UNKNOWN_LIMITER_TYPE_MESSAGE);
|
||||
}
|
||||
|
||||
let keyFunc = opts.key;
|
||||
if (typeof keyFunc !== 'function') {
|
||||
keyFunc = function (req, res, next) {
|
||||
next(opts.key);
|
||||
};
|
||||
}
|
||||
|
||||
const getFailCallback = (() => (typeof opts.failCallback === 'undefined' ? this.options.failCallback : opts.failCallback));
|
||||
|
||||
return (req, res, next) => {
|
||||
const cannotIncrementErrorObjectBase = {
|
||||
req,
|
||||
res,
|
||||
next,
|
||||
message: 'Cannot increment request count',
|
||||
};
|
||||
|
||||
keyFunc(req, res, (key) => {
|
||||
if (!opts.ignoreIP) {
|
||||
key = ExpressBruteFlexible._getKey([req.ip, this.name, key]);
|
||||
} else {
|
||||
key = ExpressBruteFlexible._getKey([this.name, key]);
|
||||
}
|
||||
|
||||
// attach a simpler "reset" function to req.brute.reset
|
||||
if (this.options.attachResetToRequest) {
|
||||
let reset = ((callback) => {
|
||||
Promise.all([
|
||||
this.freeLimiter.delete(key),
|
||||
this.blockLimiter.delete(key),
|
||||
this.counterLimiter.delete(key),
|
||||
]).then(() => {
|
||||
if (typeof callback === 'function') {
|
||||
process.nextTick(() => {
|
||||
callback();
|
||||
});
|
||||
}
|
||||
}).catch((err) => {
|
||||
if (typeof callback === 'function') {
|
||||
process.nextTick(() => {
|
||||
callback(err);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (req.brute && req.brute.reset) {
|
||||
// wrap existing reset if one exists
|
||||
const oldReset = req.brute.reset;
|
||||
const newReset = reset;
|
||||
reset = function (callback) {
|
||||
oldReset(() => {
|
||||
newReset(callback);
|
||||
});
|
||||
};
|
||||
}
|
||||
req.brute = {
|
||||
reset,
|
||||
};
|
||||
}
|
||||
|
||||
this.freeLimiter.consume(key)
|
||||
.then(() => {
|
||||
if (typeof next === 'function') {
|
||||
next();
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
Promise.all([
|
||||
this.blockLimiter.get(key),
|
||||
this.counterLimiter.get(key),
|
||||
])
|
||||
.then((allRes) => {
|
||||
const [blockRes, counterRes] = allRes;
|
||||
|
||||
if (blockRes === null) {
|
||||
const msDelay = getDelayMs(
|
||||
counterRes ? counterRes.consumedPoints + 1 : 1,
|
||||
this.delays,
|
||||
// eslint-disable-next-line
|
||||
this.options.maxWait
|
||||
);
|
||||
|
||||
this.blockLimiter.penalty(key, 1, { customDuration: Math.ceil(msDelay / 1000) })
|
||||
.then((blockPenaltyRes) => {
|
||||
if (blockPenaltyRes.consumedPoints === 1) {
|
||||
this.counterLimiter.penalty(key)
|
||||
.then(() => {
|
||||
if (typeof next === 'function') {
|
||||
next();
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
this.options.handleStoreError(Object.assign({}, cannotIncrementErrorObjectBase, { parent: err }));
|
||||
});
|
||||
} else {
|
||||
const nextValidDate = new Date(Date.now() + blockPenaltyRes.msBeforeNext);
|
||||
|
||||
const failCallback = getFailCallback();
|
||||
if (typeof failCallback === 'function') {
|
||||
failCallback(req, res, next, nextValidDate);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
this.options.handleStoreError(Object.assign({}, cannotIncrementErrorObjectBase, { parent: err }));
|
||||
});
|
||||
} else {
|
||||
const nextValidDate = new Date(Date.now() + blockRes.msBeforeNext);
|
||||
|
||||
const failCallback = getFailCallback();
|
||||
if (typeof failCallback === 'function') {
|
||||
failCallback(req, res, next, nextValidDate);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
this.options.handleStoreError(Object.assign({}, cannotIncrementErrorObjectBase, { parent: err }));
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
ExpressBruteFlexible.prototype.reset = function (ip, key, callback) {
|
||||
let keyArgs = [];
|
||||
if (ip) {
|
||||
keyArgs.push(ip)
|
||||
}
|
||||
keyArgs.push(this.name);
|
||||
keyArgs.push(key);
|
||||
const ebKey = ExpressBruteFlexible._getKey(keyArgs);
|
||||
|
||||
Promise.all([
|
||||
this.freeLimiter.delete(ebKey),
|
||||
this.blockLimiter.delete(ebKey),
|
||||
this.counterLimiter.delete(ebKey),
|
||||
]).then(() => {
|
||||
if (typeof callback === 'function') {
|
||||
process.nextTick(() => {
|
||||
callback();
|
||||
});
|
||||
}
|
||||
}).catch((err) => {
|
||||
this.options.handleStoreError({
|
||||
message: 'Cannot reset request count',
|
||||
parent: err,
|
||||
key,
|
||||
ip,
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
ExpressBruteFlexible._getKey = function (arr) {
|
||||
let key = '';
|
||||
|
||||
arr.forEach((part) => {
|
||||
if (part) {
|
||||
key += crypto.createHash('sha256').update(part).digest('base64');
|
||||
}
|
||||
});
|
||||
|
||||
return crypto.createHash('sha256').update(key).digest('base64');
|
||||
};
|
||||
|
||||
const setRetryAfter = function (res, nextValidRequestDate) {
|
||||
const secondUntilNextRequest = Math.ceil((nextValidRequestDate.getTime() - Date.now()) / 1000);
|
||||
res.header('Retry-After', secondUntilNextRequest);
|
||||
};
|
||||
ExpressBruteFlexible.FailTooManyRequests = function (req, res, next, nextValidRequestDate) {
|
||||
setRetryAfter(res, nextValidRequestDate);
|
||||
res.status(429);
|
||||
res.send({
|
||||
error: {
|
||||
text: 'Too many requests in this time frame.',
|
||||
nextValidRequestDate,
|
||||
},
|
||||
});
|
||||
};
|
||||
ExpressBruteFlexible.FailForbidden = function (req, res, next, nextValidRequestDate) {
|
||||
setRetryAfter(res, nextValidRequestDate);
|
||||
res.status(403);
|
||||
res.send({
|
||||
error: {
|
||||
text: 'Too many requests in this time frame.',
|
||||
nextValidRequestDate,
|
||||
},
|
||||
});
|
||||
};
|
||||
ExpressBruteFlexible.FailMark = function (req, res, next, nextValidRequestDate) {
|
||||
res.status(429);
|
||||
setRetryAfter(res, nextValidRequestDate);
|
||||
res.nextValidRequestDate = nextValidRequestDate;
|
||||
next();
|
||||
};
|
||||
|
||||
ExpressBruteFlexible.defaults = {
|
||||
freeRetries: 2,
|
||||
attachResetToRequest: true,
|
||||
minWait: 500,
|
||||
maxWait: 1000 * 60 * 15,
|
||||
failCallback: ExpressBruteFlexible.FailTooManyRequests,
|
||||
handleStoreError(err) {
|
||||
// eslint-disable-next-line
|
||||
throw {
|
||||
message: err.message,
|
||||
parent: err.parent,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
ExpressBruteFlexible.LIMITER_TYPES = LIMITER_TYPES;
|
||||
|
||||
ExpressBruteFlexible.instanceCount = 0;
|
||||
|
||||
|
||||
module.exports = ExpressBruteFlexible;
|
||||
195
framework/node_modules/node-rate-limiter-flexible/lib/RLWrapperBlackAndWhite.js
generated
vendored
Normal file
@@ -0,0 +1,195 @@
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
module.exports = class RLWrapperBlackAndWhite {
|
||||
constructor(opts = {}) {
|
||||
this.limiter = opts.limiter;
|
||||
this.blackList = opts.blackList;
|
||||
this.whiteList = opts.whiteList;
|
||||
this.isBlackListed = opts.isBlackListed;
|
||||
this.isWhiteListed = opts.isWhiteListed;
|
||||
this.runActionAnyway = opts.runActionAnyway;
|
||||
}
|
||||
|
||||
get limiter() {
|
||||
return this._limiter;
|
||||
}
|
||||
|
||||
set limiter(value) {
|
||||
if (typeof value === 'undefined') {
|
||||
throw new Error('limiter is not set');
|
||||
}
|
||||
|
||||
this._limiter = value;
|
||||
}
|
||||
|
||||
get runActionAnyway() {
|
||||
return this._runActionAnyway;
|
||||
}
|
||||
|
||||
set runActionAnyway(value) {
|
||||
this._runActionAnyway = typeof value === 'undefined' ? false : value;
|
||||
}
|
||||
|
||||
get blackList() {
|
||||
return this._blackList;
|
||||
}
|
||||
|
||||
set blackList(value) {
|
||||
this._blackList = Array.isArray(value) ? value : [];
|
||||
}
|
||||
|
||||
get isBlackListed() {
|
||||
return this._isBlackListed;
|
||||
}
|
||||
|
||||
set isBlackListed(func) {
|
||||
if (typeof func === 'undefined') {
|
||||
func = () => false;
|
||||
}
|
||||
if (typeof func !== 'function') {
|
||||
throw new Error('isBlackListed must be function');
|
||||
}
|
||||
this._isBlackListed = func;
|
||||
}
|
||||
|
||||
get whiteList() {
|
||||
return this._whiteList;
|
||||
}
|
||||
|
||||
set whiteList(value) {
|
||||
this._whiteList = Array.isArray(value) ? value : [];
|
||||
}
|
||||
|
||||
get isWhiteListed() {
|
||||
return this._isWhiteListed;
|
||||
}
|
||||
|
||||
set isWhiteListed(func) {
|
||||
if (typeof func === 'undefined') {
|
||||
func = () => false;
|
||||
}
|
||||
if (typeof func !== 'function') {
|
||||
throw new Error('isWhiteListed must be function');
|
||||
}
|
||||
this._isWhiteListed = func;
|
||||
}
|
||||
|
||||
isBlackListedSomewhere(key) {
|
||||
return this.blackList.indexOf(key) >= 0 || this.isBlackListed(key);
|
||||
}
|
||||
|
||||
isWhiteListedSomewhere(key) {
|
||||
return this.whiteList.indexOf(key) >= 0 || this.isWhiteListed(key);
|
||||
}
|
||||
|
||||
getBlackRes() {
|
||||
return new RateLimiterRes(0, Number.MAX_SAFE_INTEGER, 0, false);
|
||||
}
|
||||
|
||||
getWhiteRes() {
|
||||
return new RateLimiterRes(Number.MAX_SAFE_INTEGER, 0, 0, false);
|
||||
}
|
||||
|
||||
rejectBlack() {
|
||||
return Promise.reject(this.getBlackRes());
|
||||
}
|
||||
|
||||
resolveBlack() {
|
||||
return Promise.resolve(this.getBlackRes());
|
||||
}
|
||||
|
||||
resolveWhite() {
|
||||
return Promise.resolve(this.getWhiteRes());
|
||||
}
|
||||
|
||||
consume(key, pointsToConsume = 1) {
|
||||
let res;
|
||||
if (this.isWhiteListedSomewhere(key)) {
|
||||
res = this.resolveWhite();
|
||||
} else if (this.isBlackListedSomewhere(key)) {
|
||||
res = this.rejectBlack();
|
||||
}
|
||||
|
||||
if (typeof res === 'undefined') {
|
||||
return this.limiter.consume(key, pointsToConsume);
|
||||
}
|
||||
|
||||
if (this.runActionAnyway) {
|
||||
this.limiter.consume(key, pointsToConsume).catch(() => {});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
block(key, secDuration) {
|
||||
let res;
|
||||
if (this.isWhiteListedSomewhere(key)) {
|
||||
res = this.resolveWhite();
|
||||
} else if (this.isBlackListedSomewhere(key)) {
|
||||
res = this.resolveBlack();
|
||||
}
|
||||
|
||||
if (typeof res === 'undefined') {
|
||||
return this.limiter.block(key, secDuration);
|
||||
}
|
||||
|
||||
if (this.runActionAnyway) {
|
||||
this.limiter.block(key, secDuration).catch(() => {});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
penalty(key, points) {
|
||||
let res;
|
||||
if (this.isWhiteListedSomewhere(key)) {
|
||||
res = this.resolveWhite();
|
||||
} else if (this.isBlackListedSomewhere(key)) {
|
||||
res = this.resolveBlack();
|
||||
}
|
||||
|
||||
if (typeof res === 'undefined') {
|
||||
return this.limiter.penalty(key, points);
|
||||
}
|
||||
|
||||
if (this.runActionAnyway) {
|
||||
this.limiter.penalty(key, points).catch(() => {});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
reward(key, points) {
|
||||
let res;
|
||||
if (this.isWhiteListedSomewhere(key)) {
|
||||
res = this.resolveWhite();
|
||||
} else if (this.isBlackListedSomewhere(key)) {
|
||||
res = this.resolveBlack();
|
||||
}
|
||||
|
||||
if (typeof res === 'undefined') {
|
||||
return this.limiter.reward(key, points);
|
||||
}
|
||||
|
||||
if (this.runActionAnyway) {
|
||||
this.limiter.reward(key, points).catch(() => {});
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
get(key) {
|
||||
let res;
|
||||
if (this.isWhiteListedSomewhere(key)) {
|
||||
res = this.resolveWhite();
|
||||
} else if (this.isBlackListedSomewhere(key)) {
|
||||
res = this.resolveBlack();
|
||||
}
|
||||
|
||||
if (typeof res === 'undefined' || this.runActionAnyway) {
|
||||
return this.limiter.get(key);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
delete(key) {
|
||||
return this.limiter.delete(key);
|
||||
}
|
||||
};
|
||||
125
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterAbstract.js
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
module.exports = class RateLimiterAbstract {
|
||||
/**
|
||||
*
|
||||
* @param opts Object Defaults {
|
||||
* points: 4, // Number of points
|
||||
* duration: 1, // Per seconds
|
||||
* blockDuration: 0, // Block if consumed more than points in current duration for blockDuration seconds
|
||||
* execEvenly: false, // Execute allowed actions evenly over duration
|
||||
* execEvenlyMinDelayMs: duration * 1000 / points, // ms, works with execEvenly=true option
|
||||
* keyPrefix: 'rlflx',
|
||||
* }
|
||||
*/
|
||||
constructor(opts = {}) {
|
||||
this.points = opts.points;
|
||||
this.duration = opts.duration;
|
||||
this.blockDuration = opts.blockDuration;
|
||||
this.execEvenly = opts.execEvenly;
|
||||
this.execEvenlyMinDelayMs = opts.execEvenlyMinDelayMs;
|
||||
this.keyPrefix = opts.keyPrefix;
|
||||
}
|
||||
|
||||
get points() {
|
||||
return this._points;
|
||||
}
|
||||
|
||||
set points(value) {
|
||||
this._points = value >= 0 ? value : 4;
|
||||
}
|
||||
|
||||
get duration() {
|
||||
return this._duration;
|
||||
}
|
||||
|
||||
set duration(value) {
|
||||
this._duration = typeof value === 'undefined' ? 1 : value;
|
||||
}
|
||||
|
||||
get msDuration() {
|
||||
return this.duration * 1000;
|
||||
}
|
||||
|
||||
get blockDuration() {
|
||||
return this._blockDuration;
|
||||
}
|
||||
|
||||
set blockDuration(value) {
|
||||
this._blockDuration = typeof value === 'undefined' ? 0 : value;
|
||||
}
|
||||
|
||||
get msBlockDuration() {
|
||||
return this.blockDuration * 1000;
|
||||
}
|
||||
|
||||
get execEvenly() {
|
||||
return this._execEvenly;
|
||||
}
|
||||
|
||||
set execEvenly(value) {
|
||||
this._execEvenly = typeof value === 'undefined' ? false : Boolean(value);
|
||||
}
|
||||
|
||||
get execEvenlyMinDelayMs() {
|
||||
return this._execEvenlyMinDelayMs;
|
||||
}
|
||||
|
||||
set execEvenlyMinDelayMs(value) {
|
||||
this._execEvenlyMinDelayMs = typeof value === 'undefined' ? Math.ceil(this.msDuration / this.points) : value;
|
||||
}
|
||||
|
||||
get keyPrefix() {
|
||||
return this._keyPrefix;
|
||||
}
|
||||
|
||||
set keyPrefix(value) {
|
||||
if (typeof value === 'undefined') {
|
||||
value = 'rlflx';
|
||||
}
|
||||
if (typeof value !== 'string') {
|
||||
throw new Error('keyPrefix must be string');
|
||||
}
|
||||
this._keyPrefix = value;
|
||||
}
|
||||
|
||||
_getKeySecDuration(options = {}) {
|
||||
return options && options.customDuration >= 0
|
||||
? options.customDuration
|
||||
: this.duration;
|
||||
}
|
||||
|
||||
getKey(key) {
|
||||
return this.keyPrefix.length > 0 ? `${this.keyPrefix}:${key}` : key;
|
||||
}
|
||||
|
||||
parseKey(rlKey) {
|
||||
return rlKey.substring(this.keyPrefix.length);
|
||||
}
|
||||
|
||||
consume() {
|
||||
throw new Error("You have to implement the method 'consume'!");
|
||||
}
|
||||
|
||||
penalty() {
|
||||
throw new Error("You have to implement the method 'penalty'!");
|
||||
}
|
||||
|
||||
reward() {
|
||||
throw new Error("You have to implement the method 'reward'!");
|
||||
}
|
||||
|
||||
get() {
|
||||
throw new Error("You have to implement the method 'get'!");
|
||||
}
|
||||
|
||||
set() {
|
||||
throw new Error("You have to implement the method 'set'!");
|
||||
}
|
||||
|
||||
block() {
|
||||
throw new Error("You have to implement the method 'block'!");
|
||||
}
|
||||
|
||||
delete() {
|
||||
throw new Error("You have to implement the method 'delete'!");
|
||||
}
|
||||
};
|
||||
367
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterCluster.js
generated
vendored
Normal file
@@ -0,0 +1,367 @@
|
||||
/**
|
||||
* Implements rate limiting in cluster using built-in IPC
|
||||
*
|
||||
* Two classes are described here: master and worker
|
||||
* Master have to be create in the master process without any options.
|
||||
* Any number of rate limiters can be created in workers, but each rate limiter must be with unique keyPrefix
|
||||
*
|
||||
* Workflow:
|
||||
* 1. master rate limiter created in master process
|
||||
* 2. worker rate limiter sends 'init' message with necessary options during creating
|
||||
* 3. master receives options and adds new rate limiter by keyPrefix if it isn't created yet
|
||||
* 4. master sends 'init' back to worker's rate limiter
|
||||
* 5. worker can process requests immediately,
|
||||
* but they will be postponed by 'workerWaitInit' until master sends 'init' to worker
|
||||
* 6. every request to worker rate limiter creates a promise
|
||||
* 7. if master doesn't response for 'timeout', promise is rejected
|
||||
* 8. master sends 'resolve' or 'reject' command to worker
|
||||
* 9. worker resolves or rejects promise depending on message from master
|
||||
*
|
||||
*/
|
||||
|
||||
const cluster = require('cluster');
|
||||
const crypto = require('crypto');
|
||||
const RateLimiterAbstract = require('./RateLimiterAbstract');
|
||||
const RateLimiterMemory = require('./RateLimiterMemory');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
const channel = 'rate_limiter_flexible';
|
||||
let masterInstance = null;
|
||||
|
||||
const masterSendToWorker = function (worker, msg, type, res) {
|
||||
let data;
|
||||
if (res === null || res === true || res === false) {
|
||||
data = res;
|
||||
} else {
|
||||
data = {
|
||||
remainingPoints: res.remainingPoints,
|
||||
msBeforeNext: res.msBeforeNext,
|
||||
consumedPoints: res.consumedPoints,
|
||||
isFirstInDuration: res.isFirstInDuration,
|
||||
};
|
||||
}
|
||||
worker.send({
|
||||
channel,
|
||||
keyPrefix: msg.keyPrefix, // which rate limiter exactly
|
||||
promiseId: msg.promiseId,
|
||||
type,
|
||||
data,
|
||||
});
|
||||
};
|
||||
|
||||
const workerWaitInit = function (payload) {
|
||||
setTimeout(() => {
|
||||
if (this._initiated) {
|
||||
process.send(payload);
|
||||
// Promise will be removed by timeout if too long
|
||||
} else if (typeof this._promises[payload.promiseId] !== 'undefined') {
|
||||
workerWaitInit.call(this, payload);
|
||||
}
|
||||
}, 30);
|
||||
};
|
||||
|
||||
const workerSendToMaster = function (func, promiseId, key, arg, opts) {
|
||||
const payload = {
|
||||
channel,
|
||||
keyPrefix: this.keyPrefix,
|
||||
func,
|
||||
promiseId,
|
||||
data: {
|
||||
key,
|
||||
arg,
|
||||
opts,
|
||||
},
|
||||
};
|
||||
|
||||
if (!this._initiated) {
|
||||
// Wait init before sending messages to master
|
||||
workerWaitInit.call(this, payload);
|
||||
} else {
|
||||
process.send(payload);
|
||||
}
|
||||
};
|
||||
|
||||
const masterProcessMsg = function (worker, msg) {
|
||||
if (!msg || msg.channel !== channel || typeof this._rateLimiters[msg.keyPrefix] === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
let promise;
|
||||
|
||||
switch (msg.func) {
|
||||
case 'consume':
|
||||
promise = this._rateLimiters[msg.keyPrefix].consume(msg.data.key, msg.data.arg, msg.data.opts);
|
||||
break;
|
||||
case 'penalty':
|
||||
promise = this._rateLimiters[msg.keyPrefix].penalty(msg.data.key, msg.data.arg, msg.data.opts);
|
||||
break;
|
||||
case 'reward':
|
||||
promise = this._rateLimiters[msg.keyPrefix].reward(msg.data.key, msg.data.arg, msg.data.opts);
|
||||
break;
|
||||
case 'block':
|
||||
promise = this._rateLimiters[msg.keyPrefix].block(msg.data.key, msg.data.arg, msg.data.opts);
|
||||
break;
|
||||
case 'get':
|
||||
promise = this._rateLimiters[msg.keyPrefix].get(msg.data.key, msg.data.opts);
|
||||
break;
|
||||
case 'delete':
|
||||
promise = this._rateLimiters[msg.keyPrefix].delete(msg.data.key, msg.data.opts);
|
||||
break;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
|
||||
if (promise) {
|
||||
promise
|
||||
.then((res) => {
|
||||
masterSendToWorker(worker, msg, 'resolve', res);
|
||||
})
|
||||
.catch((rejRes) => {
|
||||
masterSendToWorker(worker, msg, 'reject', rejRes);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const workerProcessMsg = function (msg) {
|
||||
if (!msg || msg.channel !== channel || msg.keyPrefix !== this.keyPrefix) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (this._promises[msg.promiseId]) {
|
||||
clearTimeout(this._promises[msg.promiseId].timeoutId);
|
||||
let res;
|
||||
if (msg.data === null || msg.data === true || msg.data === false) {
|
||||
res = msg.data;
|
||||
} else {
|
||||
res = new RateLimiterRes(
|
||||
msg.data.remainingPoints,
|
||||
msg.data.msBeforeNext,
|
||||
msg.data.consumedPoints,
|
||||
msg.data.isFirstInDuration // eslint-disable-line comma-dangle
|
||||
);
|
||||
}
|
||||
|
||||
switch (msg.type) {
|
||||
case 'resolve':
|
||||
this._promises[msg.promiseId].resolve(res);
|
||||
break;
|
||||
case 'reject':
|
||||
this._promises[msg.promiseId].reject(res);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`RateLimiterCluster: no such message type '${msg.type}'`);
|
||||
}
|
||||
|
||||
delete this._promises[msg.promiseId];
|
||||
}
|
||||
};
|
||||
/**
|
||||
* Prepare options to send to master
|
||||
* Master will create rate limiter depending on options
|
||||
*
|
||||
* @returns {{points: *, duration: *, blockDuration: *, execEvenly: *, execEvenlyMinDelayMs: *, keyPrefix: *}}
|
||||
*/
|
||||
const getOpts = function () {
|
||||
return {
|
||||
points: this.points,
|
||||
duration: this.duration,
|
||||
blockDuration: this.blockDuration,
|
||||
execEvenly: this.execEvenly,
|
||||
execEvenlyMinDelayMs: this.execEvenlyMinDelayMs,
|
||||
keyPrefix: this.keyPrefix,
|
||||
};
|
||||
};
|
||||
|
||||
const savePromise = function (resolve, reject) {
|
||||
const hrtime = process.hrtime();
|
||||
let promiseId = hrtime[0].toString() + hrtime[1].toString();
|
||||
|
||||
if (typeof this._promises[promiseId] !== 'undefined') {
|
||||
promiseId += crypto.randomBytes(12).toString('base64');
|
||||
}
|
||||
|
||||
this._promises[promiseId] = {
|
||||
resolve,
|
||||
reject,
|
||||
timeoutId: setTimeout(() => {
|
||||
delete this._promises[promiseId];
|
||||
reject(new Error('RateLimiterCluster timeout: no answer from master in time'));
|
||||
}, this.timeoutMs),
|
||||
};
|
||||
|
||||
return promiseId;
|
||||
};
|
||||
|
||||
class RateLimiterClusterMaster {
|
||||
constructor() {
|
||||
if (masterInstance) {
|
||||
return masterInstance;
|
||||
}
|
||||
|
||||
this._rateLimiters = {};
|
||||
|
||||
cluster.setMaxListeners(0);
|
||||
|
||||
cluster.on('message', (worker, msg) => {
|
||||
if (msg && msg.channel === channel && msg.type === 'init') {
|
||||
// If init request, check or create rate limiter by key prefix and send 'init' back to worker
|
||||
if (typeof this._rateLimiters[msg.opts.keyPrefix] === 'undefined') {
|
||||
this._rateLimiters[msg.opts.keyPrefix] = new RateLimiterMemory(msg.opts);
|
||||
}
|
||||
|
||||
worker.send({
|
||||
channel,
|
||||
type: 'init',
|
||||
keyPrefix: msg.opts.keyPrefix,
|
||||
});
|
||||
} else {
|
||||
masterProcessMsg.call(this, worker, msg);
|
||||
}
|
||||
});
|
||||
|
||||
masterInstance = this;
|
||||
}
|
||||
}
|
||||
|
||||
class RateLimiterClusterMasterPM2 {
|
||||
constructor(pm2) {
|
||||
if (masterInstance) {
|
||||
return masterInstance;
|
||||
}
|
||||
|
||||
this._rateLimiters = {};
|
||||
|
||||
pm2.launchBus((err, pm2Bus) => {
|
||||
pm2Bus.on('process:msg', (packet) => {
|
||||
const msg = packet.raw;
|
||||
if (msg && msg.channel === channel && msg.type === 'init') {
|
||||
// If init request, check or create rate limiter by key prefix and send 'init' back to worker
|
||||
if (typeof this._rateLimiters[msg.opts.keyPrefix] === 'undefined') {
|
||||
this._rateLimiters[msg.opts.keyPrefix] = new RateLimiterMemory(msg.opts);
|
||||
}
|
||||
|
||||
pm2.sendDataToProcessId(packet.process.pm_id, {
|
||||
data: {},
|
||||
topic: channel,
|
||||
channel,
|
||||
type: 'init',
|
||||
keyPrefix: msg.opts.keyPrefix,
|
||||
}, (sendErr, res) => {
|
||||
if (sendErr) {
|
||||
console.log(sendErr, res);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
const worker = {
|
||||
send: (msgData) => {
|
||||
const pm2Message = msgData;
|
||||
pm2Message.topic = channel;
|
||||
if (typeof pm2Message.data === 'undefined') {
|
||||
pm2Message.data = {};
|
||||
}
|
||||
pm2.sendDataToProcessId(packet.process.pm_id, pm2Message, (sendErr, res) => {
|
||||
if (sendErr) {
|
||||
console.log(sendErr, res);
|
||||
}
|
||||
});
|
||||
},
|
||||
};
|
||||
masterProcessMsg.call(this, worker, msg);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
masterInstance = this;
|
||||
}
|
||||
}
|
||||
|
||||
class RateLimiterClusterWorker extends RateLimiterAbstract {
|
||||
get timeoutMs() {
|
||||
return this._timeoutMs;
|
||||
}
|
||||
|
||||
set timeoutMs(value) {
|
||||
this._timeoutMs = typeof value === 'undefined' ? 5000 : Math.abs(parseInt(value));
|
||||
}
|
||||
|
||||
constructor(opts = {}) {
|
||||
super(opts);
|
||||
|
||||
process.setMaxListeners(0);
|
||||
|
||||
this.timeoutMs = opts.timeoutMs;
|
||||
|
||||
this._initiated = false;
|
||||
|
||||
process.on('message', (msg) => {
|
||||
if (msg && msg.channel === channel && msg.type === 'init' && msg.keyPrefix === this.keyPrefix) {
|
||||
this._initiated = true;
|
||||
} else {
|
||||
workerProcessMsg.call(this, msg);
|
||||
}
|
||||
});
|
||||
|
||||
// Create limiter on master with specific options
|
||||
process.send({
|
||||
channel,
|
||||
type: 'init',
|
||||
opts: getOpts.call(this),
|
||||
});
|
||||
|
||||
this._promises = {};
|
||||
}
|
||||
|
||||
consume(key, pointsToConsume = 1, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'consume', promiseId, key, pointsToConsume, options);
|
||||
});
|
||||
}
|
||||
|
||||
penalty(key, points = 1, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'penalty', promiseId, key, points, options);
|
||||
});
|
||||
}
|
||||
|
||||
reward(key, points = 1, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'reward', promiseId, key, points, options);
|
||||
});
|
||||
}
|
||||
|
||||
block(key, secDuration, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'block', promiseId, key, secDuration, options);
|
||||
});
|
||||
}
|
||||
|
||||
get(key, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'get', promiseId, key, options);
|
||||
});
|
||||
}
|
||||
|
||||
delete(key, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promiseId = savePromise.call(this, resolve, reject);
|
||||
|
||||
workerSendToMaster.call(this, 'delete', promiseId, key, options);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
RateLimiterClusterMaster,
|
||||
RateLimiterClusterMasterPM2,
|
||||
RateLimiterCluster: RateLimiterClusterWorker,
|
||||
};
|
||||
150
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterMemcache.js
generated
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
const RateLimiterStoreAbstract = require('./RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
class RateLimiterMemcache extends RateLimiterStoreAbstract {
|
||||
/**
|
||||
*
|
||||
* @param {Object} opts
|
||||
* Defaults {
|
||||
* ... see other in RateLimiterStoreAbstract
|
||||
*
|
||||
* storeClient: memcacheClient
|
||||
* }
|
||||
*/
|
||||
constructor(opts) {
|
||||
super(opts);
|
||||
|
||||
this.client = opts.storeClient;
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, result) {
|
||||
const res = new RateLimiterRes();
|
||||
res.consumedPoints = parseInt(result.consumedPoints);
|
||||
res.isFirstInDuration = result.consumedPoints === changedPoints;
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = result.msBeforeNext;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
_upsert(rlKey, points, msDuration, forceExpire = false, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nowMs = Date.now();
|
||||
const secDuration = Math.floor(msDuration / 1000);
|
||||
|
||||
if (forceExpire) {
|
||||
this.client.set(rlKey, points, secDuration, (err) => {
|
||||
if (!err) {
|
||||
this.client.set(
|
||||
`${rlKey}_expire`,
|
||||
secDuration > 0 ? nowMs + (secDuration * 1000) : -1,
|
||||
secDuration,
|
||||
() => {
|
||||
const res = {
|
||||
consumedPoints: points,
|
||||
msBeforeNext: secDuration > 0 ? secDuration * 1000 : -1,
|
||||
};
|
||||
resolve(res);
|
||||
}
|
||||
);
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.client.incr(rlKey, points, (err, consumedPoints) => {
|
||||
if (err || consumedPoints === false) {
|
||||
this.client.add(rlKey, points, secDuration, (errAddKey, createdNew) => {
|
||||
if (errAddKey || !createdNew) {
|
||||
// Try to upsert again in case of race condition
|
||||
if (typeof options.attemptNumber === 'undefined' || options.attemptNumber < 3) {
|
||||
const nextOptions = Object.assign({}, options);
|
||||
nextOptions.attemptNumber = nextOptions.attemptNumber ? (nextOptions.attemptNumber + 1) : 1;
|
||||
|
||||
this._upsert(rlKey, points, msDuration, forceExpire, nextOptions)
|
||||
.then(resUpsert => resolve(resUpsert))
|
||||
.catch(errUpsert => reject(errUpsert));
|
||||
} else {
|
||||
reject(new Error('Can not add key'));
|
||||
}
|
||||
} else {
|
||||
this.client.add(
|
||||
`${rlKey}_expire`,
|
||||
secDuration > 0 ? nowMs + (secDuration * 1000) : -1,
|
||||
secDuration,
|
||||
() => {
|
||||
const res = {
|
||||
consumedPoints: points,
|
||||
msBeforeNext: secDuration > 0 ? secDuration * 1000 : -1,
|
||||
};
|
||||
resolve(res);
|
||||
}
|
||||
);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.client.get(`${rlKey}_expire`, (errGetExpire, resGetExpireMs) => {
|
||||
if (errGetExpire) {
|
||||
reject(errGetExpire);
|
||||
} else {
|
||||
const expireMs = resGetExpireMs === false ? 0 : resGetExpireMs;
|
||||
const res = {
|
||||
consumedPoints,
|
||||
msBeforeNext: expireMs >= 0 ? Math.max(expireMs - nowMs, 0) : -1,
|
||||
};
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_get(rlKey) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nowMs = Date.now();
|
||||
|
||||
this.client.get(rlKey, (err, consumedPoints) => {
|
||||
if (!consumedPoints) {
|
||||
resolve(null);
|
||||
} else {
|
||||
this.client.get(`${rlKey}_expire`, (errGetExpire, resGetExpireMs) => {
|
||||
if (errGetExpire) {
|
||||
reject(errGetExpire);
|
||||
} else {
|
||||
const expireMs = resGetExpireMs === false ? 0 : resGetExpireMs;
|
||||
const res = {
|
||||
consumedPoints,
|
||||
msBeforeNext: expireMs >= 0 ? Math.max(expireMs - nowMs, 0) : -1,
|
||||
};
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_delete(rlKey) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.del(rlKey, (err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else if (res === false) {
|
||||
resolve(res);
|
||||
} else {
|
||||
this.client.del(`${rlKey}_expire`, (errDelExpire) => {
|
||||
if (errDelExpire) {
|
||||
reject(errDelExpire);
|
||||
} else {
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterMemcache;
|
||||
106
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterMemory.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
const RateLimiterAbstract = require('./RateLimiterAbstract');
|
||||
const MemoryStorage = require('./component/MemoryStorage/MemoryStorage');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
class RateLimiterMemory extends RateLimiterAbstract {
|
||||
constructor(opts = {}) {
|
||||
super(opts);
|
||||
|
||||
this._memoryStorage = new MemoryStorage();
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param pointsToConsume
|
||||
* @param {Object} options
|
||||
* @returns {Promise<RateLimiterRes>}
|
||||
*/
|
||||
consume(key, pointsToConsume = 1, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const rlKey = this.getKey(key);
|
||||
const secDuration = this._getKeySecDuration(options);
|
||||
let res = this._memoryStorage.incrby(rlKey, pointsToConsume, secDuration);
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
|
||||
if (res.consumedPoints > this.points) {
|
||||
// Block only first time when consumed more than points
|
||||
if (this.blockDuration > 0 && res.consumedPoints <= (this.points + pointsToConsume)) {
|
||||
// Block key
|
||||
res = this._memoryStorage.set(rlKey, res.consumedPoints, this.blockDuration);
|
||||
}
|
||||
reject(res);
|
||||
} else if (this.execEvenly && res.msBeforeNext > 0 && !res.isFirstInDuration) {
|
||||
// Execute evenly
|
||||
let delay = Math.ceil(res.msBeforeNext / (res.remainingPoints + 2));
|
||||
if (delay < this.execEvenlyMinDelayMs) {
|
||||
delay = res.consumedPoints * this.execEvenlyMinDelayMs;
|
||||
}
|
||||
|
||||
setTimeout(resolve, delay, res);
|
||||
} else {
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
penalty(key, points = 1, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve) => {
|
||||
const secDuration = this._getKeySecDuration(options);
|
||||
const res = this._memoryStorage.incrby(rlKey, points, secDuration);
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
resolve(res);
|
||||
});
|
||||
}
|
||||
|
||||
reward(key, points = 1, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve) => {
|
||||
const secDuration = this._getKeySecDuration(options);
|
||||
const res = this._memoryStorage.incrby(rlKey, -points, secDuration);
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
resolve(res);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Block any key for secDuration seconds
|
||||
*
|
||||
* @param key
|
||||
* @param secDuration
|
||||
*/
|
||||
block(key, secDuration) {
|
||||
const msDuration = secDuration * 1000;
|
||||
const initPoints = this.points + 1;
|
||||
|
||||
this._memoryStorage.set(this.getKey(key), initPoints, secDuration);
|
||||
return Promise.resolve(
|
||||
new RateLimiterRes(0, msDuration === 0 ? -1 : msDuration, initPoints)
|
||||
);
|
||||
}
|
||||
|
||||
set(key, points, secDuration) {
|
||||
const msDuration = (secDuration >= 0 ? secDuration : this.duration) * 1000;
|
||||
|
||||
this._memoryStorage.set(this.getKey(key), points, secDuration);
|
||||
return Promise.resolve(
|
||||
new RateLimiterRes(0, msDuration === 0 ? -1 : msDuration, points)
|
||||
);
|
||||
}
|
||||
|
||||
get(key) {
|
||||
const res = this._memoryStorage.get(this.getKey(key));
|
||||
if (res !== null) {
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
}
|
||||
|
||||
return Promise.resolve(res);
|
||||
}
|
||||
|
||||
delete(key) {
|
||||
return Promise.resolve(this._memoryStorage.delete(this.getKey(key)));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterMemory;
|
||||
|
||||
273
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterMongo.js
generated
vendored
Normal file
@@ -0,0 +1,273 @@
|
||||
const RateLimiterStoreAbstract = require('./RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
/**
|
||||
* Get MongoDB driver version as upsert options differ
|
||||
* @params {Object} Client instance
|
||||
* @returns {Object} Version Object containing major, feature & minor versions.
|
||||
*/
|
||||
function getDriverVersion(client) {
|
||||
try {
|
||||
const _client = client.client ? client.client : client;
|
||||
|
||||
const { version } = _client.topology.s.options.metadata.driver;
|
||||
const _v = version.split('.').map(v => parseInt(v));
|
||||
|
||||
return {
|
||||
major: _v[0],
|
||||
feature: _v[1],
|
||||
patch: _v[2],
|
||||
};
|
||||
} catch (err) {
|
||||
return { major: 0, feature: 0, patch: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
class RateLimiterMongo extends RateLimiterStoreAbstract {
|
||||
/**
|
||||
*
|
||||
* @param {Object} opts
|
||||
* Defaults {
|
||||
* indexKeyPrefix: {attr1: 1, attr2: 1}
|
||||
* ... see other in RateLimiterStoreAbstract
|
||||
*
|
||||
* mongo: MongoClient
|
||||
* }
|
||||
*/
|
||||
constructor(opts) {
|
||||
super(opts);
|
||||
|
||||
this.dbName = opts.dbName;
|
||||
this.tableName = opts.tableName;
|
||||
this.indexKeyPrefix = opts.indexKeyPrefix;
|
||||
|
||||
if (opts.mongo) {
|
||||
this.client = opts.mongo;
|
||||
} else {
|
||||
this.client = opts.storeClient;
|
||||
}
|
||||
if (typeof this.client.then === 'function') {
|
||||
// If Promise
|
||||
this.client
|
||||
.then((conn) => {
|
||||
this.client = conn;
|
||||
this._initCollection();
|
||||
this._driverVersion = getDriverVersion(this.client);
|
||||
});
|
||||
} else {
|
||||
this._initCollection();
|
||||
this._driverVersion = getDriverVersion(this.client);
|
||||
}
|
||||
}
|
||||
|
||||
get dbName() {
|
||||
return this._dbName;
|
||||
}
|
||||
|
||||
set dbName(value) {
|
||||
this._dbName = typeof value === 'undefined' ? RateLimiterMongo.getDbName() : value;
|
||||
}
|
||||
|
||||
static getDbName() {
|
||||
return 'node-rate-limiter-flexible';
|
||||
}
|
||||
|
||||
get tableName() {
|
||||
return this._tableName;
|
||||
}
|
||||
|
||||
set tableName(value) {
|
||||
this._tableName = typeof value === 'undefined' ? this.keyPrefix : value;
|
||||
}
|
||||
|
||||
get client() {
|
||||
return this._client;
|
||||
}
|
||||
|
||||
set client(value) {
|
||||
if (typeof value === 'undefined') {
|
||||
throw new Error('mongo is not set');
|
||||
}
|
||||
this._client = value;
|
||||
}
|
||||
|
||||
get indexKeyPrefix() {
|
||||
return this._indexKeyPrefix;
|
||||
}
|
||||
|
||||
set indexKeyPrefix(obj) {
|
||||
this._indexKeyPrefix = obj || {};
|
||||
}
|
||||
|
||||
_initCollection() {
|
||||
const db = typeof this.client.db === 'function'
|
||||
? this.client.db(this.dbName)
|
||||
: this.client;
|
||||
|
||||
const collection = db.collection(this.tableName);
|
||||
collection.createIndex({ expire: -1 }, { expireAfterSeconds: 0 });
|
||||
collection.createIndex(Object.assign({}, this.indexKeyPrefix, { key: 1 }), { unique: true });
|
||||
|
||||
this._collection = collection;
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, result) {
|
||||
const res = new RateLimiterRes();
|
||||
|
||||
let doc;
|
||||
if (typeof result.value === 'undefined') {
|
||||
doc = result;
|
||||
} else {
|
||||
doc = result.value;
|
||||
}
|
||||
|
||||
res.isFirstInDuration = doc.points === changedPoints;
|
||||
res.consumedPoints = doc.points;
|
||||
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = doc.expire !== null
|
||||
? Math.max(new Date(doc.expire).getTime() - Date.now(), 0)
|
||||
: -1;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
_upsert(key, points, msDuration, forceExpire = false, options = {}) {
|
||||
if (!this._collection) {
|
||||
return Promise.reject(Error('Mongo connection is not established'));
|
||||
}
|
||||
|
||||
const docAttrs = options.attrs || {};
|
||||
|
||||
let where;
|
||||
let upsertData;
|
||||
if (forceExpire) {
|
||||
where = { key };
|
||||
where = Object.assign(where, docAttrs);
|
||||
upsertData = {
|
||||
$set: {
|
||||
key,
|
||||
points,
|
||||
expire: msDuration > 0 ? new Date(Date.now() + msDuration) : null,
|
||||
},
|
||||
};
|
||||
upsertData.$set = Object.assign(upsertData.$set, docAttrs);
|
||||
} else {
|
||||
where = {
|
||||
$or: [
|
||||
{ expire: { $gt: new Date() } },
|
||||
{ expire: { $eq: null } },
|
||||
],
|
||||
key,
|
||||
};
|
||||
where = Object.assign(where, docAttrs);
|
||||
upsertData = {
|
||||
$setOnInsert: {
|
||||
key,
|
||||
expire: msDuration > 0 ? new Date(Date.now() + msDuration) : null,
|
||||
},
|
||||
$inc: { points },
|
||||
};
|
||||
upsertData.$setOnInsert = Object.assign(upsertData.$setOnInsert, docAttrs);
|
||||
}
|
||||
|
||||
// Options for collection updates differ between driver versions
|
||||
const upsertOptions = {
|
||||
upsert: true,
|
||||
};
|
||||
if ((this._driverVersion.major >= 4) ||
|
||||
(this._driverVersion.major === 3 &&
|
||||
(this._driverVersion.feature >=7) ||
|
||||
(this._driverVersion.feature >= 6 &&
|
||||
this._driverVersion.patch >= 7 )))
|
||||
{
|
||||
upsertOptions.returnDocument = 'after';
|
||||
} else {
|
||||
upsertOptions.returnOriginal = false;
|
||||
}
|
||||
|
||||
/*
|
||||
* 1. Find actual limit and increment points
|
||||
* 2. If limit expired, but Mongo doesn't clean doc by TTL yet, try to replace limit doc completely
|
||||
* 3. If 2 or more Mongo threads try to insert the new limit doc, only the first succeed
|
||||
* 4. Try to upsert from step 1. Actual limit is created now, points are incremented without problems
|
||||
*/
|
||||
return new Promise((resolve, reject) => {
|
||||
this._collection.findOneAndUpdate(
|
||||
where,
|
||||
upsertData,
|
||||
upsertOptions
|
||||
).then((res) => {
|
||||
resolve(res);
|
||||
}).catch((errUpsert) => {
|
||||
if (errUpsert && errUpsert.code === 11000) { // E11000 duplicate key error collection
|
||||
const replaceWhere = Object.assign({ // try to replace OLD limit doc
|
||||
$or: [
|
||||
{ expire: { $lte: new Date() } },
|
||||
{ expire: { $eq: null } },
|
||||
],
|
||||
key,
|
||||
}, docAttrs);
|
||||
|
||||
const replaceTo = {
|
||||
$set: Object.assign({
|
||||
key,
|
||||
points,
|
||||
expire: msDuration > 0 ? new Date(Date.now() + msDuration) : null,
|
||||
}, docAttrs)
|
||||
};
|
||||
|
||||
this._collection.findOneAndUpdate(
|
||||
replaceWhere,
|
||||
replaceTo,
|
||||
upsertOptions
|
||||
).then((res) => {
|
||||
resolve(res);
|
||||
}).catch((errReplace) => {
|
||||
if (errReplace && errReplace.code === 11000) { // E11000 duplicate key error collection
|
||||
this._upsert(key, points, msDuration, forceExpire)
|
||||
.then(res => resolve(res))
|
||||
.catch(err => reject(err));
|
||||
} else {
|
||||
reject(errReplace);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
reject(errUpsert);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_get(rlKey, options = {}) {
|
||||
if (!this._collection) {
|
||||
return Promise.reject(Error('Mongo connection is not established'));
|
||||
}
|
||||
|
||||
const docAttrs = options.attrs || {};
|
||||
|
||||
const where = Object.assign({
|
||||
key: rlKey,
|
||||
$or: [
|
||||
{ expire: { $gt: new Date() } },
|
||||
{ expire: { $eq: null } },
|
||||
],
|
||||
}, docAttrs);
|
||||
|
||||
return this._collection.findOne(where);
|
||||
}
|
||||
|
||||
_delete(rlKey, options = {}) {
|
||||
if (!this._collection) {
|
||||
return Promise.reject(Error('Mongo connection is not established'));
|
||||
}
|
||||
|
||||
const docAttrs = options.attrs || {};
|
||||
const where = Object.assign({ key: rlKey }, docAttrs);
|
||||
|
||||
return this._collection.deleteOne(where)
|
||||
.then(res => res.deletedCount > 0);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterMongo;
|
||||
379
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterMySQL.js
generated
vendored
Normal file
@@ -0,0 +1,379 @@
|
||||
const RateLimiterStoreAbstract = require('./RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
class RateLimiterMySQL extends RateLimiterStoreAbstract {
|
||||
/**
|
||||
* @callback callback
|
||||
* @param {Object} err
|
||||
*
|
||||
* @param {Object} opts
|
||||
* @param {callback} cb
|
||||
* Defaults {
|
||||
* ... see other in RateLimiterStoreAbstract
|
||||
*
|
||||
* storeClient: anySqlClient,
|
||||
* storeType: 'knex', // required only for Knex instance
|
||||
* dbName: 'string',
|
||||
* tableName: 'string',
|
||||
* }
|
||||
*/
|
||||
constructor(opts, cb = null) {
|
||||
super(opts);
|
||||
|
||||
this.client = opts.storeClient;
|
||||
this.clientType = opts.storeType;
|
||||
|
||||
this.dbName = opts.dbName;
|
||||
this.tableName = opts.tableName;
|
||||
|
||||
this.clearExpiredByTimeout = opts.clearExpiredByTimeout;
|
||||
|
||||
this.tableCreated = opts.tableCreated;
|
||||
if (!this.tableCreated) {
|
||||
this._createDbAndTable()
|
||||
.then(() => {
|
||||
this.tableCreated = true;
|
||||
if (this.clearExpiredByTimeout) {
|
||||
this._clearExpiredHourAgo();
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
if (typeof cb === 'function') {
|
||||
cb(err);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (this.clearExpiredByTimeout) {
|
||||
this._clearExpiredHourAgo();
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
clearExpired(expire) {
|
||||
return new Promise((resolve) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
conn.query(`DELETE FROM ??.?? WHERE expire < ?`, [this.dbName, this.tableName, expire], () => {
|
||||
this._releaseConnection(conn);
|
||||
resolve();
|
||||
});
|
||||
})
|
||||
.catch(() => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_clearExpiredHourAgo() {
|
||||
if (this._clearExpiredTimeoutId) {
|
||||
clearTimeout(this._clearExpiredTimeoutId);
|
||||
}
|
||||
this._clearExpiredTimeoutId = setTimeout(() => {
|
||||
this.clearExpired(Date.now() - 3600000) // Never rejected
|
||||
.then(() => {
|
||||
this._clearExpiredHourAgo();
|
||||
});
|
||||
}, 300000);
|
||||
this._clearExpiredTimeoutId.unref();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return Promise<any>
|
||||
* @private
|
||||
*/
|
||||
_getConnection() {
|
||||
switch (this.clientType) {
|
||||
case 'pool':
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.getConnection((errConn, conn) => {
|
||||
if (errConn) {
|
||||
return reject(errConn);
|
||||
}
|
||||
|
||||
resolve(conn);
|
||||
});
|
||||
});
|
||||
case 'sequelize':
|
||||
return this.client.connectionManager.getConnection();
|
||||
case 'knex':
|
||||
return this.client.client.acquireConnection();
|
||||
default:
|
||||
return Promise.resolve(this.client);
|
||||
}
|
||||
}
|
||||
|
||||
_releaseConnection(conn) {
|
||||
switch (this.clientType) {
|
||||
case 'pool':
|
||||
return conn.release();
|
||||
case 'sequelize':
|
||||
return this.client.connectionManager.releaseConnection(conn);
|
||||
case 'knex':
|
||||
return this.client.client.releaseConnection(conn);
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Promise<any>}
|
||||
* @private
|
||||
*/
|
||||
_createDbAndTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
conn.query(`CREATE DATABASE IF NOT EXISTS \`${this.dbName}\`;`, (errDb) => {
|
||||
if (errDb) {
|
||||
this._releaseConnection(conn);
|
||||
return reject(errDb);
|
||||
}
|
||||
conn.query(this._getCreateTableStmt(), (err) => {
|
||||
if (err) {
|
||||
this._releaseConnection(conn);
|
||||
return reject(err);
|
||||
}
|
||||
this._releaseConnection(conn);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_getCreateTableStmt() {
|
||||
return `CREATE TABLE IF NOT EXISTS \`${this.dbName}\`.\`${this.tableName}\` (` +
|
||||
'`key` VARCHAR(255) CHARACTER SET utf8 NOT NULL,' +
|
||||
'`points` INT(9) NOT NULL default 0,' +
|
||||
'`expire` BIGINT UNSIGNED,' +
|
||||
'PRIMARY KEY (`key`)' +
|
||||
') ENGINE = INNODB;';
|
||||
}
|
||||
|
||||
get clientType() {
|
||||
return this._clientType;
|
||||
}
|
||||
|
||||
set clientType(value) {
|
||||
if (typeof value === 'undefined') {
|
||||
if (this.client.constructor.name === 'Connection') {
|
||||
value = 'connection';
|
||||
} else if (this.client.constructor.name === 'Pool') {
|
||||
value = 'pool';
|
||||
} else if (this.client.constructor.name === 'Sequelize') {
|
||||
value = 'sequelize';
|
||||
} else {
|
||||
throw new Error('storeType is not defined');
|
||||
}
|
||||
}
|
||||
this._clientType = value.toLowerCase();
|
||||
}
|
||||
|
||||
get dbName() {
|
||||
return this._dbName;
|
||||
}
|
||||
|
||||
set dbName(value) {
|
||||
this._dbName = typeof value === 'undefined' ? 'rtlmtrflx' : value;
|
||||
}
|
||||
|
||||
get tableName() {
|
||||
return this._tableName;
|
||||
}
|
||||
|
||||
set tableName(value) {
|
||||
this._tableName = typeof value === 'undefined' ? this.keyPrefix : value;
|
||||
}
|
||||
|
||||
get tableCreated() {
|
||||
return this._tableCreated
|
||||
}
|
||||
|
||||
set tableCreated(value) {
|
||||
this._tableCreated = typeof value === 'undefined' ? false : !!value;
|
||||
}
|
||||
|
||||
get clearExpiredByTimeout() {
|
||||
return this._clearExpiredByTimeout;
|
||||
}
|
||||
|
||||
set clearExpiredByTimeout(value) {
|
||||
this._clearExpiredByTimeout = typeof value === 'undefined' ? true : Boolean(value);
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, result) {
|
||||
const res = new RateLimiterRes();
|
||||
const [row] = result;
|
||||
|
||||
res.isFirstInDuration = changedPoints === row.points;
|
||||
res.consumedPoints = res.isFirstInDuration ? changedPoints : row.points;
|
||||
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = row.expire
|
||||
? Math.max(row.expire - Date.now(), 0)
|
||||
: -1;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
_upsertTransaction(conn, key, points, msDuration, forceExpire) {
|
||||
return new Promise((resolve, reject) => {
|
||||
conn.query('BEGIN', (errBegin) => {
|
||||
if (errBegin) {
|
||||
conn.rollback();
|
||||
|
||||
return reject(errBegin);
|
||||
}
|
||||
|
||||
const dateNow = Date.now();
|
||||
const newExpire = msDuration > 0 ? dateNow + msDuration : null;
|
||||
|
||||
let q;
|
||||
let values;
|
||||
if (forceExpire) {
|
||||
q = `INSERT INTO ??.?? VALUES (?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
points = ?,
|
||||
expire = ?;`;
|
||||
values = [
|
||||
this.dbName, this.tableName, key, points, newExpire,
|
||||
points,
|
||||
newExpire,
|
||||
];
|
||||
} else {
|
||||
q = `INSERT INTO ??.?? VALUES (?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
points = IF(expire <= ?, ?, points + (?)),
|
||||
expire = IF(expire <= ?, ?, expire);`;
|
||||
values = [
|
||||
this.dbName, this.tableName, key, points, newExpire,
|
||||
dateNow, points, points,
|
||||
dateNow, newExpire,
|
||||
];
|
||||
}
|
||||
|
||||
conn.query(q, values, (errUpsert) => {
|
||||
if (errUpsert) {
|
||||
conn.rollback();
|
||||
|
||||
return reject(errUpsert);
|
||||
}
|
||||
conn.query('SELECT points, expire FROM ??.?? WHERE `key` = ?;', [this.dbName, this.tableName, key], (errSelect, res) => {
|
||||
if (errSelect) {
|
||||
conn.rollback();
|
||||
|
||||
return reject(errSelect);
|
||||
}
|
||||
|
||||
conn.query('COMMIT', (err) => {
|
||||
if (err) {
|
||||
conn.rollback();
|
||||
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(res);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_upsert(key, points, msDuration, forceExpire = false) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
this._upsertTransaction(conn, key, points, msDuration, forceExpire)
|
||||
.then((res) => {
|
||||
resolve(res);
|
||||
this._releaseConnection(conn);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
this._releaseConnection(conn);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_get(rlKey) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
conn.query(
|
||||
'SELECT points, expire FROM ??.?? WHERE `key` = ? AND (`expire` > ? OR `expire` IS NULL)',
|
||||
[this.dbName, this.tableName, rlKey, Date.now()],
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else if (res.length === 0) {
|
||||
resolve(null);
|
||||
} else {
|
||||
resolve(res);
|
||||
}
|
||||
|
||||
this._releaseConnection(conn);
|
||||
} // eslint-disable-line
|
||||
);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_delete(rlKey) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
conn.query(
|
||||
'DELETE FROM ??.?? WHERE `key` = ?',
|
||||
[this.dbName, this.tableName, rlKey],
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(res.affectedRows > 0);
|
||||
}
|
||||
|
||||
this._releaseConnection(conn);
|
||||
} // eslint-disable-line
|
||||
);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterMySQL;
|
||||
312
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterPostgres.js
generated
vendored
Normal file
@@ -0,0 +1,312 @@
|
||||
const RateLimiterStoreAbstract = require('./RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
class RateLimiterPostgres extends RateLimiterStoreAbstract {
|
||||
/**
|
||||
* @callback callback
|
||||
* @param {Object} err
|
||||
*
|
||||
* @param {Object} opts
|
||||
* @param {callback} cb
|
||||
* Defaults {
|
||||
* ... see other in RateLimiterStoreAbstract
|
||||
*
|
||||
* storeClient: postgresClient,
|
||||
* storeType: 'knex', // required only for Knex instance
|
||||
* tableName: 'string',
|
||||
* }
|
||||
*/
|
||||
constructor(opts, cb = null) {
|
||||
super(opts);
|
||||
|
||||
this.client = opts.storeClient;
|
||||
this.clientType = opts.storeType;
|
||||
|
||||
this.tableName = opts.tableName;
|
||||
|
||||
this.clearExpiredByTimeout = opts.clearExpiredByTimeout;
|
||||
|
||||
this.tableCreated = opts.tableCreated;
|
||||
if (!this.tableCreated) {
|
||||
this._createTable()
|
||||
.then(() => {
|
||||
this.tableCreated = true;
|
||||
if (this.clearExpiredByTimeout) {
|
||||
this._clearExpiredHourAgo();
|
||||
}
|
||||
if (typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
if (typeof cb === 'function') {
|
||||
cb(err);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (typeof cb === 'function') {
|
||||
cb();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
clearExpired(expire) {
|
||||
return new Promise((resolve) => {
|
||||
const q = {
|
||||
name: 'rlflx-clear-expired',
|
||||
text: `DELETE FROM ${this.tableName} WHERE expire < $1`,
|
||||
values: [expire],
|
||||
};
|
||||
this._query(q)
|
||||
.then(() => {
|
||||
resolve();
|
||||
})
|
||||
.catch(() => {
|
||||
// Deleting expired query is not critical
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all rows expired 1 hour ago once per 5 minutes
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_clearExpiredHourAgo() {
|
||||
if (this._clearExpiredTimeoutId) {
|
||||
clearTimeout(this._clearExpiredTimeoutId);
|
||||
}
|
||||
this._clearExpiredTimeoutId = setTimeout(() => {
|
||||
this.clearExpired(Date.now() - 3600000) // Never rejected
|
||||
.then(() => {
|
||||
this._clearExpiredHourAgo();
|
||||
});
|
||||
}, 300000);
|
||||
this._clearExpiredTimeoutId.unref();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return Promise<any>
|
||||
* @private
|
||||
*/
|
||||
_getConnection() {
|
||||
switch (this.clientType) {
|
||||
case 'pool':
|
||||
return Promise.resolve(this.client);
|
||||
case 'sequelize':
|
||||
return this.client.connectionManager.getConnection();
|
||||
case 'knex':
|
||||
return this.client.client.acquireConnection();
|
||||
case 'typeorm':
|
||||
return Promise.resolve(this.client.driver.master);
|
||||
default:
|
||||
return Promise.resolve(this.client);
|
||||
}
|
||||
}
|
||||
|
||||
_releaseConnection(conn) {
|
||||
switch (this.clientType) {
|
||||
case 'pool':
|
||||
return true;
|
||||
case 'sequelize':
|
||||
return this.client.connectionManager.releaseConnection(conn);
|
||||
case 'knex':
|
||||
return this.client.client.releaseConnection(conn);
|
||||
case 'typeorm':
|
||||
return true;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Promise<any>}
|
||||
* @private
|
||||
*/
|
||||
_createTable() {
|
||||
return new Promise((resolve, reject) => {
|
||||
this._query({
|
||||
text: this._getCreateTableStmt(),
|
||||
})
|
||||
.then(() => {
|
||||
resolve();
|
||||
})
|
||||
.catch((err) => {
|
||||
if (err.code === '23505') {
|
||||
// Error: duplicate key value violates unique constraint "pg_type_typname_nsp_index"
|
||||
// Postgres doesn't handle concurrent table creation
|
||||
// It is supposed, that table is created by another worker
|
||||
resolve();
|
||||
} else {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_getCreateTableStmt() {
|
||||
return `CREATE TABLE IF NOT EXISTS ${this.tableName} (
|
||||
key varchar(255) PRIMARY KEY,
|
||||
points integer NOT NULL DEFAULT 0,
|
||||
expire bigint
|
||||
);`;
|
||||
}
|
||||
|
||||
get clientType() {
|
||||
return this._clientType;
|
||||
}
|
||||
|
||||
set clientType(value) {
|
||||
const constructorName = this.client.constructor.name;
|
||||
|
||||
if (typeof value === 'undefined') {
|
||||
if (constructorName === 'Client') {
|
||||
value = 'client';
|
||||
} else if (
|
||||
constructorName === 'Pool' ||
|
||||
constructorName === 'BoundPool'
|
||||
) {
|
||||
value = 'pool';
|
||||
} else if (constructorName === 'Sequelize') {
|
||||
value = 'sequelize';
|
||||
} else {
|
||||
throw new Error('storeType is not defined');
|
||||
}
|
||||
}
|
||||
|
||||
this._clientType = value.toLowerCase();
|
||||
}
|
||||
|
||||
get tableName() {
|
||||
return this._tableName;
|
||||
}
|
||||
|
||||
set tableName(value) {
|
||||
this._tableName = typeof value === 'undefined' ? this.keyPrefix : value;
|
||||
}
|
||||
|
||||
get tableCreated() {
|
||||
return this._tableCreated
|
||||
}
|
||||
|
||||
set tableCreated(value) {
|
||||
this._tableCreated = typeof value === 'undefined' ? false : !!value;
|
||||
}
|
||||
|
||||
get clearExpiredByTimeout() {
|
||||
return this._clearExpiredByTimeout;
|
||||
}
|
||||
|
||||
set clearExpiredByTimeout(value) {
|
||||
this._clearExpiredByTimeout = typeof value === 'undefined' ? true : Boolean(value);
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, result) {
|
||||
const res = new RateLimiterRes();
|
||||
const row = result.rows[0];
|
||||
|
||||
res.isFirstInDuration = changedPoints === row.points;
|
||||
res.consumedPoints = res.isFirstInDuration ? changedPoints : row.points;
|
||||
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = row.expire
|
||||
? Math.max(row.expire - Date.now(), 0)
|
||||
: -1;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
_query(q) {
|
||||
const prefix = this.tableName.toLowerCase();
|
||||
const queryObj = { name: `${prefix}:${q.name}`, text: q.text, values: q.values };
|
||||
return new Promise((resolve, reject) => {
|
||||
this._getConnection()
|
||||
.then((conn) => {
|
||||
conn.query(queryObj)
|
||||
.then((res) => {
|
||||
resolve(res);
|
||||
this._releaseConnection(conn);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
this._releaseConnection(conn);
|
||||
});
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_upsert(key, points, msDuration, forceExpire = false) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
const newExpire = msDuration > 0 ? Date.now() + msDuration : null;
|
||||
const expireQ = forceExpire
|
||||
? ' $3 '
|
||||
: ` CASE
|
||||
WHEN ${this.tableName}.expire <= $4 THEN $3
|
||||
ELSE ${this.tableName}.expire
|
||||
END `;
|
||||
|
||||
return this._query({
|
||||
name: forceExpire ? 'rlflx-upsert-force' : 'rlflx-upsert',
|
||||
text: `
|
||||
INSERT INTO ${this.tableName} VALUES ($1, $2, $3)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
points = CASE
|
||||
WHEN (${this.tableName}.expire <= $4 OR 1=${forceExpire ? 1 : 0}) THEN $2
|
||||
ELSE ${this.tableName}.points + ($2)
|
||||
END,
|
||||
expire = ${expireQ}
|
||||
RETURNING points, expire;`,
|
||||
values: [key, points, newExpire, Date.now()],
|
||||
});
|
||||
}
|
||||
|
||||
_get(rlKey) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this._query({
|
||||
name: 'rlflx-get',
|
||||
text: `
|
||||
SELECT points, expire FROM ${this.tableName} WHERE key = $1 AND (expire > $2 OR expire IS NULL);`,
|
||||
values: [rlKey, Date.now()],
|
||||
})
|
||||
.then((res) => {
|
||||
if (res.rowCount === 0) {
|
||||
res = null;
|
||||
}
|
||||
resolve(res);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_delete(rlKey) {
|
||||
if (!this.tableCreated) {
|
||||
return Promise.reject(Error('Table is not created yet'));
|
||||
}
|
||||
|
||||
return this._query({
|
||||
name: 'rlflx-delete',
|
||||
text: `DELETE FROM ${this.tableName} WHERE key = $1`,
|
||||
values: [rlKey],
|
||||
})
|
||||
.then(res => res.rowCount > 0);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterPostgres;
|
||||
127
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterQueue.js
generated
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
const RateLimiterQueueError = require('./component/RateLimiterQueueError')
|
||||
const MAX_QUEUE_SIZE = 4294967295;
|
||||
const KEY_DEFAULT = 'limiter';
|
||||
|
||||
module.exports = class RateLimiterQueue {
|
||||
constructor(limiterFlexible, opts = {
|
||||
maxQueueSize: MAX_QUEUE_SIZE,
|
||||
}) {
|
||||
this._queueLimiters = {
|
||||
KEY_DEFAULT: new RateLimiterQueueInternal(limiterFlexible, opts)
|
||||
};
|
||||
this._limiterFlexible = limiterFlexible;
|
||||
this._maxQueueSize = opts.maxQueueSize
|
||||
}
|
||||
|
||||
getTokensRemaining(key = KEY_DEFAULT) {
|
||||
if (this._queueLimiters[key]) {
|
||||
return this._queueLimiters[key].getTokensRemaining()
|
||||
} else {
|
||||
return Promise.resolve(this._limiterFlexible.points)
|
||||
}
|
||||
}
|
||||
|
||||
removeTokens(tokens, key = KEY_DEFAULT) {
|
||||
if (!this._queueLimiters[key]) {
|
||||
this._queueLimiters[key] = new RateLimiterQueueInternal(
|
||||
this._limiterFlexible, {
|
||||
key,
|
||||
maxQueueSize: this._maxQueueSize,
|
||||
})
|
||||
}
|
||||
|
||||
return this._queueLimiters[key].removeTokens(tokens)
|
||||
}
|
||||
};
|
||||
|
||||
class RateLimiterQueueInternal {
|
||||
|
||||
constructor(limiterFlexible, opts = {
|
||||
maxQueueSize: MAX_QUEUE_SIZE,
|
||||
key: KEY_DEFAULT,
|
||||
}) {
|
||||
this._key = opts.key;
|
||||
this._waitTimeout = null;
|
||||
this._queue = [];
|
||||
this._limiterFlexible = limiterFlexible;
|
||||
|
||||
this._maxQueueSize = opts.maxQueueSize
|
||||
}
|
||||
|
||||
getTokensRemaining() {
|
||||
return this._limiterFlexible.get(this._key)
|
||||
.then((rlRes) => {
|
||||
return rlRes !== null ? rlRes.remainingPoints : this._limiterFlexible.points;
|
||||
})
|
||||
}
|
||||
|
||||
removeTokens(tokens) {
|
||||
const _this = this;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (tokens > _this._limiterFlexible.points) {
|
||||
reject(new RateLimiterQueueError(`Requested tokens ${tokens} exceeds maximum ${_this._limiterFlexible.points} tokens per interval`));
|
||||
return
|
||||
}
|
||||
|
||||
if (_this._queue.length > 0) {
|
||||
_this._queueRequest.call(_this, resolve, reject, tokens);
|
||||
} else {
|
||||
_this._limiterFlexible.consume(_this._key, tokens)
|
||||
.then((res) => {
|
||||
resolve(res.remainingPoints);
|
||||
})
|
||||
.catch((rej) => {
|
||||
if (rej instanceof Error) {
|
||||
reject(rej);
|
||||
} else {
|
||||
_this._queueRequest.call(_this, resolve, reject, tokens);
|
||||
if (_this._waitTimeout === null) {
|
||||
_this._waitTimeout = setTimeout(_this._processFIFO.bind(_this), rej.msBeforeNext);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
_queueRequest(resolve, reject, tokens) {
|
||||
const _this = this;
|
||||
if (_this._queue.length < _this._maxQueueSize) {
|
||||
_this._queue.push({resolve, reject, tokens});
|
||||
} else {
|
||||
reject(new RateLimiterQueueError(`Number of requests reached it's maximum ${_this._maxQueueSize}`))
|
||||
}
|
||||
}
|
||||
|
||||
_processFIFO() {
|
||||
const _this = this;
|
||||
|
||||
if (_this._waitTimeout !== null) {
|
||||
clearTimeout(_this._waitTimeout);
|
||||
_this._waitTimeout = null;
|
||||
}
|
||||
|
||||
if (_this._queue.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const item = _this._queue.shift();
|
||||
_this._limiterFlexible.consume(_this._key, item.tokens)
|
||||
.then((res) => {
|
||||
item.resolve(res.remainingPoints);
|
||||
_this._processFIFO.call(_this);
|
||||
})
|
||||
.catch((rej) => {
|
||||
if (rej instanceof Error) {
|
||||
item.reject(rej);
|
||||
_this._processFIFO.call(_this);
|
||||
} else {
|
||||
_this._queue.unshift(item);
|
||||
if (_this._waitTimeout === null) {
|
||||
_this._waitTimeout = setTimeout(_this._processFIFO.bind(_this), rej.msBeforeNext);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
173
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterRedis.js
generated
vendored
Normal file
@@ -0,0 +1,173 @@
|
||||
const RateLimiterStoreAbstract = require('./RateLimiterStoreAbstract');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
const incrTtlLuaScript = `redis.call('set', KEYS[1], 0, 'EX', ARGV[2], 'NX') \
|
||||
local consumed = redis.call('incrby', KEYS[1], ARGV[1]) \
|
||||
local ttl = redis.call('pttl', KEYS[1]) \
|
||||
if ttl == -1 then \
|
||||
redis.call('expire', KEYS[1], ARGV[2]) \
|
||||
ttl = 1000 * ARGV[2] \
|
||||
end \
|
||||
return {consumed, ttl} \
|
||||
`;
|
||||
|
||||
class RateLimiterRedis extends RateLimiterStoreAbstract {
|
||||
/**
|
||||
*
|
||||
* @param {Object} opts
|
||||
* Defaults {
|
||||
* ... see other in RateLimiterStoreAbstract
|
||||
*
|
||||
* redis: RedisClient
|
||||
* rejectIfRedisNotReady: boolean = false - reject / invoke insuranceLimiter immediately when redis connection is not "ready"
|
||||
* }
|
||||
*/
|
||||
constructor(opts) {
|
||||
super(opts);
|
||||
if (opts.redis) {
|
||||
this.client = opts.redis;
|
||||
} else {
|
||||
this.client = opts.storeClient;
|
||||
}
|
||||
|
||||
this._rejectIfRedisNotReady = !!opts.rejectIfRedisNotReady;
|
||||
|
||||
if (typeof this.client.defineCommand === 'function') {
|
||||
this.client.defineCommand("rlflxIncr", {
|
||||
numberOfKeys: 1,
|
||||
lua: incrTtlLuaScript,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prevent actual redis call if redis connection is not ready
|
||||
* Because of different connection state checks for ioredis and node-redis, only this clients would be actually checked.
|
||||
* For any other clients all the requests would be passed directly to redis client
|
||||
* @return {boolean}
|
||||
* @private
|
||||
*/
|
||||
_isRedisReady() {
|
||||
if (!this._rejectIfRedisNotReady) {
|
||||
return true;
|
||||
}
|
||||
// ioredis client
|
||||
if (this.client.status && this.client.status !== 'ready') {
|
||||
return false;
|
||||
}
|
||||
// node-redis client
|
||||
if (typeof this.client.isReady === 'function' && !this.client.isReady()) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
_getRateLimiterRes(rlKey, changedPoints, result) {
|
||||
let [consumed, resTtlMs] = result;
|
||||
// Support ioredis results format
|
||||
if (Array.isArray(consumed)) {
|
||||
[, consumed] = consumed;
|
||||
[, resTtlMs] = resTtlMs;
|
||||
}
|
||||
|
||||
const res = new RateLimiterRes();
|
||||
res.consumedPoints = parseInt(consumed);
|
||||
res.isFirstInDuration = res.consumedPoints === changedPoints;
|
||||
res.remainingPoints = Math.max(this.points - res.consumedPoints, 0);
|
||||
res.msBeforeNext = resTtlMs;
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
_upsert(rlKey, points, msDuration, forceExpire = false) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!this._isRedisReady()) {
|
||||
return reject(new Error('Redis connection is not ready'));
|
||||
}
|
||||
|
||||
const secDuration = Math.floor(msDuration / 1000);
|
||||
const multi = this.client.multi();
|
||||
if (forceExpire) {
|
||||
if (secDuration > 0) {
|
||||
multi.set(rlKey, points, 'EX', secDuration);
|
||||
} else {
|
||||
multi.set(rlKey, points);
|
||||
}
|
||||
|
||||
multi.pttl(rlKey)
|
||||
.exec((err, res) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(res);
|
||||
});
|
||||
} else {
|
||||
if (secDuration > 0) {
|
||||
const incrCallback = function(err, result) {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(result);
|
||||
};
|
||||
|
||||
if (typeof this.client.rlflxIncr === 'function') {
|
||||
this.client.rlflxIncr(rlKey, points, secDuration, incrCallback);
|
||||
} else {
|
||||
this.client.eval(incrTtlLuaScript, 1, rlKey, points, secDuration, incrCallback);
|
||||
}
|
||||
} else {
|
||||
multi.incrby(rlKey, points)
|
||||
.pttl(rlKey)
|
||||
.exec((err, res) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
return resolve(res);
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_get(rlKey) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!this._isRedisReady()) {
|
||||
return reject(new Error('Redis connection is not ready'));
|
||||
}
|
||||
|
||||
this.client
|
||||
.multi()
|
||||
.get(rlKey)
|
||||
.pttl(rlKey)
|
||||
.exec((err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
const [points] = res;
|
||||
if (points === null) {
|
||||
return resolve(null)
|
||||
}
|
||||
|
||||
resolve(res);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_delete(rlKey) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.client.del(rlKey, (err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
resolve(res > 0);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RateLimiterRedis;
|
||||
64
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterRes.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
module.exports = class RateLimiterRes {
|
||||
constructor(remainingPoints, msBeforeNext, consumedPoints, isFirstInDuration) {
|
||||
this.remainingPoints = typeof remainingPoints === 'undefined' ? 0 : remainingPoints; // Remaining points in current duration
|
||||
this.msBeforeNext = typeof msBeforeNext === 'undefined' ? 0 : msBeforeNext; // Milliseconds before next action
|
||||
this.consumedPoints = typeof consumedPoints === 'undefined' ? 0 : consumedPoints; // Consumed points in current duration
|
||||
this.isFirstInDuration = typeof isFirstInDuration === 'undefined' ? false : isFirstInDuration;
|
||||
}
|
||||
|
||||
get msBeforeNext() {
|
||||
return this._msBeforeNext;
|
||||
}
|
||||
|
||||
set msBeforeNext(ms) {
|
||||
this._msBeforeNext = ms;
|
||||
return this;
|
||||
}
|
||||
|
||||
get remainingPoints() {
|
||||
return this._remainingPoints;
|
||||
}
|
||||
|
||||
set remainingPoints(p) {
|
||||
this._remainingPoints = p;
|
||||
return this;
|
||||
}
|
||||
|
||||
get consumedPoints() {
|
||||
return this._consumedPoints;
|
||||
}
|
||||
|
||||
set consumedPoints(p) {
|
||||
this._consumedPoints = p;
|
||||
return this;
|
||||
}
|
||||
|
||||
get isFirstInDuration() {
|
||||
return this._isFirstInDuration;
|
||||
}
|
||||
|
||||
set isFirstInDuration(value) {
|
||||
this._isFirstInDuration = Boolean(value);
|
||||
}
|
||||
|
||||
_getDecoratedProperties() {
|
||||
return {
|
||||
remainingPoints: this.remainingPoints,
|
||||
msBeforeNext: this.msBeforeNext,
|
||||
consumedPoints: this.consumedPoints,
|
||||
isFirstInDuration: this.isFirstInDuration,
|
||||
};
|
||||
}
|
||||
|
||||
[Symbol.for("nodejs.util.inspect.custom")]() {
|
||||
return this._getDecoratedProperties();
|
||||
}
|
||||
|
||||
toString() {
|
||||
return JSON.stringify(this._getDecoratedProperties());
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return this._getDecoratedProperties();
|
||||
}
|
||||
};
|
||||
442
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterStoreAbstract.js
generated
vendored
Normal file
@@ -0,0 +1,442 @@
|
||||
const RateLimiterAbstract = require('./RateLimiterAbstract');
|
||||
const BlockedKeys = require('./component/BlockedKeys');
|
||||
const RateLimiterRes = require('./RateLimiterRes');
|
||||
|
||||
module.exports = class RateLimiterStoreAbstract extends RateLimiterAbstract {
|
||||
/**
|
||||
*
|
||||
* @param opts Object Defaults {
|
||||
* ... see other in RateLimiterAbstract
|
||||
*
|
||||
* inMemoryBlockOnConsumed: 40, // Number of points when key is blocked
|
||||
* inMemoryBlockDuration: 10, // Block duration in seconds
|
||||
* insuranceLimiter: RateLimiterAbstract
|
||||
* }
|
||||
*/
|
||||
constructor(opts = {}) {
|
||||
super(opts);
|
||||
|
||||
this.inMemoryBlockOnConsumed = opts.inMemoryBlockOnConsumed || opts.inmemoryBlockOnConsumed;
|
||||
this.inMemoryBlockDuration = opts.inMemoryBlockDuration || opts.inmemoryBlockDuration;
|
||||
this.insuranceLimiter = opts.insuranceLimiter;
|
||||
this._inMemoryBlockedKeys = new BlockedKeys();
|
||||
}
|
||||
|
||||
get client() {
|
||||
return this._client;
|
||||
}
|
||||
|
||||
set client(value) {
|
||||
if (typeof value === 'undefined') {
|
||||
throw new Error('storeClient is not set');
|
||||
}
|
||||
this._client = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Have to be launched after consume
|
||||
* It blocks key and execute evenly depending on result from store
|
||||
*
|
||||
* It uses _getRateLimiterRes function to prepare RateLimiterRes from store result
|
||||
*
|
||||
* @param resolve
|
||||
* @param reject
|
||||
* @param rlKey
|
||||
* @param changedPoints
|
||||
* @param storeResult
|
||||
* @param {Object} options
|
||||
* @private
|
||||
*/
|
||||
_afterConsume(resolve, reject, rlKey, changedPoints, storeResult, options = {}) {
|
||||
const res = this._getRateLimiterRes(rlKey, changedPoints, storeResult);
|
||||
|
||||
if (this.inMemoryBlockOnConsumed > 0 && !(this.inMemoryBlockDuration > 0)
|
||||
&& res.consumedPoints >= this.inMemoryBlockOnConsumed
|
||||
) {
|
||||
this._inMemoryBlockedKeys.addMs(rlKey, res.msBeforeNext);
|
||||
if (res.consumedPoints > this.points) {
|
||||
return reject(res);
|
||||
} else {
|
||||
return resolve(res)
|
||||
}
|
||||
} else if (res.consumedPoints > this.points) {
|
||||
let blockPromise = Promise.resolve();
|
||||
// Block only first time when consumed more than points
|
||||
if (this.blockDuration > 0 && res.consumedPoints <= (this.points + changedPoints)) {
|
||||
res.msBeforeNext = this.msBlockDuration;
|
||||
blockPromise = this._block(rlKey, res.consumedPoints, this.msBlockDuration, options);
|
||||
}
|
||||
|
||||
if (this.inMemoryBlockOnConsumed > 0 && res.consumedPoints >= this.inMemoryBlockOnConsumed) {
|
||||
// Block key for this.inMemoryBlockDuration seconds
|
||||
this._inMemoryBlockedKeys.add(rlKey, this.inMemoryBlockDuration);
|
||||
res.msBeforeNext = this.msInMemoryBlockDuration;
|
||||
}
|
||||
|
||||
blockPromise
|
||||
.then(() => {
|
||||
reject(res);
|
||||
})
|
||||
.catch((err) => {
|
||||
reject(err);
|
||||
});
|
||||
} else if (this.execEvenly && res.msBeforeNext > 0 && !res.isFirstInDuration) {
|
||||
let delay = Math.ceil(res.msBeforeNext / (res.remainingPoints + 2));
|
||||
if (delay < this.execEvenlyMinDelayMs) {
|
||||
delay = res.consumedPoints * this.execEvenlyMinDelayMs;
|
||||
}
|
||||
|
||||
setTimeout(resolve, delay, res);
|
||||
} else {
|
||||
resolve(res);
|
||||
}
|
||||
}
|
||||
|
||||
_handleError(err, funcName, resolve, reject, key, data = false, options = {}) {
|
||||
if (!(this.insuranceLimiter instanceof RateLimiterAbstract)) {
|
||||
reject(err);
|
||||
} else {
|
||||
this.insuranceLimiter[funcName](key, data, options)
|
||||
.then((res) => {
|
||||
resolve(res);
|
||||
})
|
||||
.catch((res) => {
|
||||
reject(res);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @returns {BlockedKeys}
|
||||
* @private
|
||||
*/
|
||||
get _inmemoryBlockedKeys() {
|
||||
return this._inMemoryBlockedKeys
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @param rlKey
|
||||
* @returns {number}
|
||||
*/
|
||||
getInmemoryBlockMsBeforeExpire(rlKey) {
|
||||
return this.getInMemoryBlockMsBeforeExpire(rlKey)
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @returns {number|number}
|
||||
*/
|
||||
get inmemoryBlockOnConsumed() {
|
||||
return this.inMemoryBlockOnConsumed;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @param value
|
||||
*/
|
||||
set inmemoryBlockOnConsumed(value) {
|
||||
this.inMemoryBlockOnConsumed = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @returns {number|number}
|
||||
*/
|
||||
get inmemoryBlockDuration() {
|
||||
return this.inMemoryBlockDuration;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @param value
|
||||
*/
|
||||
set inmemoryBlockDuration(value) {
|
||||
this.inMemoryBlockDuration = value
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use camelCase version
|
||||
* @returns {number}
|
||||
*/
|
||||
get msInmemoryBlockDuration() {
|
||||
return this.inMemoryBlockDuration * 1000;
|
||||
}
|
||||
|
||||
getInMemoryBlockMsBeforeExpire(rlKey) {
|
||||
if (this.inMemoryBlockOnConsumed > 0) {
|
||||
return this._inMemoryBlockedKeys.msBeforeExpire(rlKey);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
get inMemoryBlockOnConsumed() {
|
||||
return this._inMemoryBlockOnConsumed;
|
||||
}
|
||||
|
||||
set inMemoryBlockOnConsumed(value) {
|
||||
this._inMemoryBlockOnConsumed = value ? parseInt(value) : 0;
|
||||
if (this.inMemoryBlockOnConsumed > 0 && this.points > this.inMemoryBlockOnConsumed) {
|
||||
throw new Error('inMemoryBlockOnConsumed option must be greater or equal "points" option');
|
||||
}
|
||||
}
|
||||
|
||||
get inMemoryBlockDuration() {
|
||||
return this._inMemoryBlockDuration;
|
||||
}
|
||||
|
||||
set inMemoryBlockDuration(value) {
|
||||
this._inMemoryBlockDuration = value ? parseInt(value) : 0;
|
||||
if (this.inMemoryBlockDuration > 0 && this.inMemoryBlockOnConsumed === 0) {
|
||||
throw new Error('inMemoryBlockOnConsumed option must be set up');
|
||||
}
|
||||
}
|
||||
|
||||
get msInMemoryBlockDuration() {
|
||||
return this._inMemoryBlockDuration * 1000;
|
||||
}
|
||||
|
||||
get insuranceLimiter() {
|
||||
return this._insuranceLimiter;
|
||||
}
|
||||
|
||||
set insuranceLimiter(value) {
|
||||
if (typeof value !== 'undefined' && !(value instanceof RateLimiterAbstract)) {
|
||||
throw new Error('insuranceLimiter must be instance of RateLimiterAbstract');
|
||||
}
|
||||
this._insuranceLimiter = value;
|
||||
if (this._insuranceLimiter) {
|
||||
this._insuranceLimiter.blockDuration = this.blockDuration;
|
||||
this._insuranceLimiter.execEvenly = this.execEvenly;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Block any key for secDuration seconds
|
||||
*
|
||||
* @param key
|
||||
* @param secDuration
|
||||
* @param {Object} options
|
||||
*
|
||||
* @return Promise<RateLimiterRes>
|
||||
*/
|
||||
block(key, secDuration, options = {}) {
|
||||
const msDuration = secDuration * 1000;
|
||||
return this._block(this.getKey(key), this.points + 1, msDuration, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set points by key for any duration
|
||||
*
|
||||
* @param key
|
||||
* @param points
|
||||
* @param secDuration
|
||||
* @param {Object} options
|
||||
*
|
||||
* @return Promise<RateLimiterRes>
|
||||
*/
|
||||
set(key, points, secDuration, options = {}) {
|
||||
const msDuration = (secDuration >= 0 ? secDuration : this.duration) * 1000;
|
||||
return this._block(this.getKey(key), points, msDuration, options);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param pointsToConsume
|
||||
* @param {Object} options
|
||||
* @returns Promise<RateLimiterRes>
|
||||
*/
|
||||
consume(key, pointsToConsume = 1, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const rlKey = this.getKey(key);
|
||||
|
||||
const inMemoryBlockMsBeforeExpire = this.getInMemoryBlockMsBeforeExpire(rlKey);
|
||||
if (inMemoryBlockMsBeforeExpire > 0) {
|
||||
return reject(new RateLimiterRes(0, inMemoryBlockMsBeforeExpire));
|
||||
}
|
||||
|
||||
this._upsert(rlKey, pointsToConsume, this._getKeySecDuration(options) * 1000, false, options)
|
||||
.then((res) => {
|
||||
this._afterConsume(resolve, reject, rlKey, pointsToConsume, res);
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'consume', resolve, reject, key, pointsToConsume, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param points
|
||||
* @param {Object} options
|
||||
* @returns Promise<RateLimiterRes>
|
||||
*/
|
||||
penalty(key, points = 1, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
this._upsert(rlKey, points, this._getKeySecDuration(options) * 1000, false, options)
|
||||
.then((res) => {
|
||||
resolve(this._getRateLimiterRes(rlKey, points, res));
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'penalty', resolve, reject, key, points, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param points
|
||||
* @param {Object} options
|
||||
* @returns Promise<RateLimiterRes>
|
||||
*/
|
||||
reward(key, points = 1, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
this._upsert(rlKey, -points, this._getKeySecDuration(options) * 1000, false, options)
|
||||
.then((res) => {
|
||||
resolve(this._getRateLimiterRes(rlKey, -points, res));
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'reward', resolve, reject, key, points, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param {Object} options
|
||||
* @returns Promise<RateLimiterRes>|null
|
||||
*/
|
||||
get(key, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
this._get(rlKey, options)
|
||||
.then((res) => {
|
||||
if (res === null || typeof res === 'undefined') {
|
||||
resolve(null);
|
||||
} else {
|
||||
resolve(this._getRateLimiterRes(rlKey, 0, res));
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'get', resolve, reject, key, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @param {Object} options
|
||||
* @returns Promise<boolean>
|
||||
*/
|
||||
delete(key, options = {}) {
|
||||
const rlKey = this.getKey(key);
|
||||
return new Promise((resolve, reject) => {
|
||||
this._delete(rlKey, options)
|
||||
.then((res) => {
|
||||
this._inMemoryBlockedKeys.delete(rlKey);
|
||||
resolve(res);
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'delete', resolve, reject, key, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup keys no-matter expired or not.
|
||||
*/
|
||||
deleteInMemoryBlockedAll() {
|
||||
this._inMemoryBlockedKeys.delete();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get RateLimiterRes object filled depending on storeResult, which specific for exact store
|
||||
*
|
||||
* @param rlKey
|
||||
* @param changedPoints
|
||||
* @param storeResult
|
||||
* @private
|
||||
*/
|
||||
_getRateLimiterRes(rlKey, changedPoints, storeResult) { // eslint-disable-line no-unused-vars
|
||||
throw new Error("You have to implement the method '_getRateLimiterRes'!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Block key for this.msBlockDuration milliseconds
|
||||
* Usually, it just prolongs lifetime of key
|
||||
*
|
||||
* @param rlKey
|
||||
* @param initPoints
|
||||
* @param msDuration
|
||||
* @param {Object} options
|
||||
*
|
||||
* @return Promise<any>
|
||||
*/
|
||||
_block(rlKey, initPoints, msDuration, options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this._upsert(rlKey, initPoints, msDuration, true, options)
|
||||
.then(() => {
|
||||
resolve(new RateLimiterRes(0, msDuration > 0 ? msDuration : -1, initPoints));
|
||||
})
|
||||
.catch((err) => {
|
||||
this._handleError(err, 'block', resolve, reject, this.parseKey(rlKey), msDuration / 1000, options);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Have to be implemented in every limiter
|
||||
* Resolve with raw result from Store OR null if rlKey is not set
|
||||
* or Reject with error
|
||||
*
|
||||
* @param rlKey
|
||||
* @param {Object} options
|
||||
* @private
|
||||
*
|
||||
* @return Promise<any>
|
||||
*/
|
||||
_get(rlKey, options = {}) { // eslint-disable-line no-unused-vars
|
||||
throw new Error("You have to implement the method '_get'!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Have to be implemented
|
||||
* Resolve with true OR false if rlKey doesn't exist
|
||||
* or Reject with error
|
||||
*
|
||||
* @param rlKey
|
||||
* @param {Object} options
|
||||
* @private
|
||||
*
|
||||
* @return Promise<any>
|
||||
*/
|
||||
_delete(rlKey, options = {}) { // eslint-disable-line no-unused-vars
|
||||
throw new Error("You have to implement the method '_delete'!");
|
||||
}
|
||||
|
||||
/**
|
||||
* Have to be implemented
|
||||
* Resolve with object used for {@link _getRateLimiterRes} to generate {@link RateLimiterRes}
|
||||
*
|
||||
* @param {string} rlKey
|
||||
* @param {number} points
|
||||
* @param {number} msDuration
|
||||
* @param {boolean} forceExpire
|
||||
* @param {Object} options
|
||||
* @abstract
|
||||
*
|
||||
* @return Promise<Object>
|
||||
*/
|
||||
_upsert(rlKey, points, msDuration, forceExpire = false, options = {}) {
|
||||
throw new Error("You have to implement the method '_upsert'!");
|
||||
}
|
||||
};
|
||||
51
framework/node_modules/node-rate-limiter-flexible/lib/RateLimiterUnion.js
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
const RateLimiterAbstract = require('./RateLimiterAbstract');
|
||||
|
||||
module.exports = class RateLimiterUnion {
|
||||
constructor(...limiters) {
|
||||
if (limiters.length < 1) {
|
||||
throw new Error('RateLimiterUnion: at least one limiter have to be passed');
|
||||
}
|
||||
limiters.forEach((limiter) => {
|
||||
if (!(limiter instanceof RateLimiterAbstract)) {
|
||||
throw new Error('RateLimiterUnion: all limiters have to be instance of RateLimiterAbstract');
|
||||
}
|
||||
});
|
||||
|
||||
this._limiters = limiters;
|
||||
}
|
||||
|
||||
consume(key, points = 1) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const promises = [];
|
||||
this._limiters.forEach((limiter) => {
|
||||
promises.push(limiter.consume(key, points).catch(rej => ({ rejected: true, rej })));
|
||||
});
|
||||
|
||||
Promise.all(promises)
|
||||
.then((res) => {
|
||||
const resObj = {};
|
||||
let rejected = false;
|
||||
|
||||
res.forEach((item) => {
|
||||
if (item.rejected === true) {
|
||||
rejected = true;
|
||||
}
|
||||
});
|
||||
|
||||
for (let i = 0; i < res.length; i++) {
|
||||
if (rejected && res[i].rejected === true) {
|
||||
resObj[this._limiters[i].keyPrefix] = res[i].rej;
|
||||
} else if (!rejected) {
|
||||
resObj[this._limiters[i].keyPrefix] = res[i];
|
||||
}
|
||||
}
|
||||
|
||||
if (rejected) {
|
||||
reject(resObj);
|
||||
} else {
|
||||
resolve(resObj);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
75
framework/node_modules/node-rate-limiter-flexible/lib/component/BlockedKeys/BlockedKeys.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
module.exports = class BlockedKeys {
|
||||
constructor() {
|
||||
this._keys = {}; // {'key': 1526279430331}
|
||||
this._addedKeysAmount = 0;
|
||||
}
|
||||
|
||||
collectExpired() {
|
||||
const now = Date.now();
|
||||
|
||||
Object.keys(this._keys).forEach((key) => {
|
||||
if (this._keys[key] <= now) {
|
||||
delete this._keys[key];
|
||||
}
|
||||
});
|
||||
|
||||
this._addedKeysAmount = Object.keys(this._keys).length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add new blocked key
|
||||
*
|
||||
* @param key String
|
||||
* @param sec Number
|
||||
*/
|
||||
add(key, sec) {
|
||||
this.addMs(key, sec * 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add new blocked key for ms
|
||||
*
|
||||
* @param key String
|
||||
* @param ms Number
|
||||
*/
|
||||
addMs(key, ms) {
|
||||
this._keys[key] = Date.now() + ms;
|
||||
this._addedKeysAmount++;
|
||||
if (this._addedKeysAmount > 999) {
|
||||
this.collectExpired();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 0 means not blocked
|
||||
*
|
||||
* @param key
|
||||
* @returns {number}
|
||||
*/
|
||||
msBeforeExpire(key) {
|
||||
const expire = this._keys[key];
|
||||
|
||||
if (expire && expire >= Date.now()) {
|
||||
this.collectExpired();
|
||||
const now = Date.now();
|
||||
return expire >= now ? expire - now : 0;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* If key is not given, delete all data in memory
|
||||
*
|
||||
* @param {string|undefined} key
|
||||
*/
|
||||
delete(key) {
|
||||
if (key) {
|
||||
delete this._keys[key];
|
||||
} else {
|
||||
Object.keys(this._keys).forEach((key) => {
|
||||
delete this._keys[key];
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
3
framework/node_modules/node-rate-limiter-flexible/lib/component/BlockedKeys/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
const BlockedKeys = require('./BlockedKeys');
|
||||
|
||||
module.exports = BlockedKeys;
|
||||
83
framework/node_modules/node-rate-limiter-flexible/lib/component/MemoryStorage/MemoryStorage.js
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
const Record = require('./Record');
|
||||
const RateLimiterRes = require('../../RateLimiterRes');
|
||||
|
||||
module.exports = class MemoryStorage {
|
||||
constructor() {
|
||||
/**
|
||||
* @type {Object.<string, Record>}
|
||||
* @private
|
||||
*/
|
||||
this._storage = {};
|
||||
}
|
||||
|
||||
incrby(key, value, durationSec) {
|
||||
if (this._storage[key]) {
|
||||
const msBeforeExpires = this._storage[key].expiresAt
|
||||
? this._storage[key].expiresAt.getTime() - new Date().getTime()
|
||||
: -1;
|
||||
if (msBeforeExpires !== 0) {
|
||||
// Change value
|
||||
this._storage[key].value = this._storage[key].value + value;
|
||||
|
||||
return new RateLimiterRes(0, msBeforeExpires, this._storage[key].value, false);
|
||||
}
|
||||
|
||||
return this.set(key, value, durationSec);
|
||||
}
|
||||
return this.set(key, value, durationSec);
|
||||
}
|
||||
|
||||
set(key, value, durationSec) {
|
||||
const durationMs = durationSec * 1000;
|
||||
|
||||
if (this._storage[key] && this._storage[key].timeoutId) {
|
||||
clearTimeout(this._storage[key].timeoutId);
|
||||
}
|
||||
|
||||
this._storage[key] = new Record(
|
||||
value,
|
||||
durationMs > 0 ? new Date(Date.now() + durationMs) : null
|
||||
);
|
||||
if (durationMs > 0) {
|
||||
this._storage[key].timeoutId = setTimeout(() => {
|
||||
delete this._storage[key];
|
||||
}, durationMs);
|
||||
if (this._storage[key].timeoutId.unref) {
|
||||
this._storage[key].timeoutId.unref();
|
||||
}
|
||||
}
|
||||
|
||||
return new RateLimiterRes(0, durationMs === 0 ? -1 : durationMs, this._storage[key].value, true);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @returns {*}
|
||||
*/
|
||||
get(key) {
|
||||
if (this._storage[key]) {
|
||||
const msBeforeExpires = this._storage[key].expiresAt
|
||||
? this._storage[key].expiresAt.getTime() - new Date().getTime()
|
||||
: -1;
|
||||
return new RateLimiterRes(0, msBeforeExpires, this._storage[key].value, false);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param key
|
||||
* @returns {boolean}
|
||||
*/
|
||||
delete(key) {
|
||||
if (this._storage[key]) {
|
||||
if (this._storage[key].timeoutId) {
|
||||
clearTimeout(this._storage[key].timeoutId);
|
||||
}
|
||||
delete this._storage[key];
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
40
framework/node_modules/node-rate-limiter-flexible/lib/component/MemoryStorage/Record.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
module.exports = class Record {
|
||||
/**
|
||||
*
|
||||
* @param value int
|
||||
* @param expiresAt Date|int
|
||||
* @param timeoutId
|
||||
*/
|
||||
constructor(value, expiresAt, timeoutId = null) {
|
||||
this.value = value;
|
||||
this.expiresAt = expiresAt;
|
||||
this.timeoutId = timeoutId;
|
||||
}
|
||||
|
||||
get value() {
|
||||
return this._value;
|
||||
}
|
||||
|
||||
set value(value) {
|
||||
this._value = parseInt(value);
|
||||
}
|
||||
|
||||
get expiresAt() {
|
||||
return this._expiresAt;
|
||||
}
|
||||
|
||||
set expiresAt(value) {
|
||||
if (!(value instanceof Date) && Number.isInteger(value)) {
|
||||
value = new Date(value);
|
||||
}
|
||||
this._expiresAt = value;
|
||||
}
|
||||
|
||||
get timeoutId() {
|
||||
return this._timeoutId;
|
||||
}
|
||||
|
||||
set timeoutId(value) {
|
||||
this._timeoutId = value;
|
||||
}
|
||||
};
|
||||
3
framework/node_modules/node-rate-limiter-flexible/lib/component/MemoryStorage/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
const MemoryStorage = require('./MemoryStorage');
|
||||
|
||||
module.exports = MemoryStorage;
|
||||
13
framework/node_modules/node-rate-limiter-flexible/lib/component/RateLimiterQueueError.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
module.exports = class RateLimiterQueueError extends Error {
|
||||
constructor(message, extra) {
|
||||
super();
|
||||
if (Error.captureStackTrace) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
}
|
||||
this.name = 'CustomError';
|
||||
this.message = message;
|
||||
if (extra) {
|
||||
this.extra = extra;
|
||||
}
|
||||
}
|
||||
};
|
||||
9
framework/node_modules/node-rate-limiter-flexible/lib/component/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export class RateLimiterQueueError extends Error {
|
||||
|
||||
constructor(message?: string, extra?: string);
|
||||
|
||||
readonly name: string;
|
||||
readonly message: string;
|
||||
readonly extra: string;
|
||||
|
||||
}
|
||||
16
framework/node_modules/node-rate-limiter-flexible/lib/constants.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
const LIMITER_TYPES = {
|
||||
MEMORY: 'memory',
|
||||
CLUSTER: 'cluster',
|
||||
MEMCACHE: 'memcache',
|
||||
MONGO: 'mongo',
|
||||
REDIS: 'redis',
|
||||
MYSQL: 'mysql',
|
||||
POSTGRES: 'postgres',
|
||||
};
|
||||
|
||||
const ERR_UNKNOWN_LIMITER_TYPE_MESSAGE = 'Unknown limiter type. Use one of LIMITER_TYPES constants.';
|
||||
|
||||
module.exports = {
|
||||
LIMITER_TYPES,
|
||||
ERR_UNKNOWN_LIMITER_TYPE_MESSAGE,
|
||||
};
|
||||
392
framework/node_modules/node-rate-limiter-flexible/lib/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,392 @@
|
||||
export interface IRateLimiterRes {
|
||||
msBeforeNext?: number;
|
||||
remainingPoints?: number;
|
||||
consumedPoints?: number;
|
||||
isFirstInDuration?: boolean;
|
||||
}
|
||||
|
||||
export class RateLimiterRes {
|
||||
constructor(
|
||||
remainingPoints?: number,
|
||||
msBeforeNext?: number,
|
||||
consumedPoints?: number,
|
||||
isFirstInDuration?: boolean
|
||||
);
|
||||
|
||||
readonly msBeforeNext: number;
|
||||
readonly remainingPoints: number;
|
||||
readonly consumedPoints: number;
|
||||
readonly isFirstInDuration: boolean;
|
||||
|
||||
toString(): string;
|
||||
toJSON(): {
|
||||
remainingPoints: number;
|
||||
msBeforeNext: number;
|
||||
consumedPoints: number;
|
||||
isFirstInDuration: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
export class RateLimiterAbstract {
|
||||
constructor(opts: IRateLimiterOptions);
|
||||
|
||||
/**
|
||||
* Maximum number of points can be consumed over duration. Limiter compares this number with
|
||||
* number of consumed points by key to decide if an operation should be rejected or resolved.
|
||||
*/
|
||||
points: number;
|
||||
|
||||
/**
|
||||
* Number of seconds before consumed points are reset.
|
||||
* Keys never expire, if duration is 0.
|
||||
*/
|
||||
duration: number;
|
||||
|
||||
/**
|
||||
* duration in milliseconds
|
||||
*/
|
||||
get msDuration(): number;
|
||||
|
||||
/**
|
||||
* If positive number and consumed more than points in current duration, block for blockDuration
|
||||
* seconds.
|
||||
*/
|
||||
blockDuration: number;
|
||||
|
||||
/**
|
||||
* blockDuration in milliseconds
|
||||
*/
|
||||
get msBlockDuration(): number;
|
||||
|
||||
/**
|
||||
* Delay action to be executed evenly over duration First action in duration is executed without
|
||||
* delay. All next allowed actions in current duration are delayed by formula
|
||||
* msBeforeDurationEnd / (remainingPoints + 2) with minimum delay of duration * 1000 / points.
|
||||
* It allows to cut off load peaks similar way to Leaky Bucket.
|
||||
*
|
||||
* Note: it isn't recommended to use it for long duration and few points, as it may delay action
|
||||
* for too long with default execEvenlyMinDelayMs.
|
||||
*/
|
||||
execEvenly: boolean;
|
||||
|
||||
/**
|
||||
* Sets minimum delay in milliseconds, when action is delayed with execEvenly
|
||||
*/
|
||||
execEvenlyMinDelayMs: number;
|
||||
|
||||
/**
|
||||
* If you need to create several limiters for different purpose.
|
||||
* Set to empty string '', if keys should be stored without prefix.
|
||||
*/
|
||||
keyPrefix: string;
|
||||
|
||||
/**
|
||||
* Returns internal key prefixed with keyPrefix option as it is saved in store.
|
||||
*/
|
||||
getKey(key: string | number): string;
|
||||
|
||||
/**
|
||||
* Returns internal key without the keyPrefix.
|
||||
*/
|
||||
parseKey(rlKey: string): string;
|
||||
|
||||
/**
|
||||
* @param key is usually IP address or some unique client id
|
||||
* @param pointsToConsume number of points consumed. default: 1
|
||||
* @param options is object with additional settings:
|
||||
* - customDuration expire in seconds for this operation only overwrites limiter's duration. It doesn't work, if key already created.
|
||||
* @returns Returns Promise, which:
|
||||
* - `resolved` with `RateLimiterRes` when point(s) is consumed, so action can be done
|
||||
* - `rejected` only for store and database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
* - `rejected` when there is no points to be consumed, where reject reason `rejRes` is `RateLimiterRes` object
|
||||
* - `rejected` when key is blocked (if block strategy is set up), where reject reason `rejRes` is `RateLimiterRes` object
|
||||
*/
|
||||
consume(
|
||||
key: string | number,
|
||||
pointsToConsume?: number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
/**
|
||||
* Fine key by points number of points for one duration.
|
||||
*
|
||||
* Note: Depending on time penalty may go to next durations
|
||||
*
|
||||
* @returns Returns Promise, which:
|
||||
* - `resolved` with RateLimiterRes
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
penalty(
|
||||
key: string | number,
|
||||
points?: number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
/**
|
||||
* Reward key by points number of points for one duration.
|
||||
* Note: Depending on time reward may go to next durations
|
||||
* @returns Promise, which:
|
||||
* - `resolved` with RateLimiterRes
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
reward(
|
||||
key: string | number,
|
||||
points?: number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
/**
|
||||
* Get RateLimiterRes in current duration. It always returns RateLimiterRes.isFirstInDuration=false.
|
||||
* @param key is usually IP address or some unique client id
|
||||
* @param options
|
||||
* @returns Promise, which:
|
||||
* - `resolved` with RateLimiterRes if key is set
|
||||
* - `resolved` with null if key is NOT set or expired
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
get(
|
||||
key: string | number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes | null>;
|
||||
|
||||
/**
|
||||
* Set points to key for secDuration seconds.
|
||||
* Store it forever, if secDuration is 0.
|
||||
* @param key
|
||||
* @param points
|
||||
* @param secDuration
|
||||
* @param options
|
||||
* @returns Promise, which:
|
||||
* - `resolved` with RateLimiterRes
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
set(
|
||||
key: string | number,
|
||||
points: number,
|
||||
secDuration: number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
/**
|
||||
* Block key by setting consumed points to points + 1 for secDuration seconds.
|
||||
*
|
||||
* It force updates expire, if there is already key.
|
||||
*
|
||||
* Blocked key never expires, if secDuration is 0.
|
||||
* @returns Promise, which:
|
||||
* - `resolved` with RateLimiterRes
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
block(
|
||||
key: string | number,
|
||||
secDuration: number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
/**
|
||||
* Delete all data related to key.
|
||||
*
|
||||
* For example, previously blocked key is not blocked after delete as there is no data anymore.
|
||||
* @returns Promise, which:
|
||||
* - `resolved` with boolean, true if data is removed by key, false if there is no such key.
|
||||
* - `rejected` only for database limiters if insuranceLimiter isn't setup: when some error happened, where reject reason `rejRes` is Error object
|
||||
* - `rejected` only for RateLimiterCluster if insuranceLimiter isn't setup: when timeoutMs exceeded, where reject reason `rejRes` is Error object
|
||||
*/
|
||||
delete(
|
||||
key: string | number,
|
||||
options?: { [key: string]: any }
|
||||
): Promise<boolean>;
|
||||
}
|
||||
|
||||
export class RateLimiterStoreAbstract extends RateLimiterAbstract {
|
||||
constructor(opts: IRateLimiterStoreOptions);
|
||||
|
||||
/**
|
||||
* Cleanup keys blocked in current process memory
|
||||
*/
|
||||
deleteInMemoryBlockedAll(): void;
|
||||
}
|
||||
|
||||
interface IRateLimiterOptions {
|
||||
keyPrefix?: string;
|
||||
points?: number;
|
||||
duration?: number;
|
||||
execEvenly?: boolean;
|
||||
execEvenlyMinDelayMs?: number;
|
||||
blockDuration?: number;
|
||||
}
|
||||
|
||||
interface IRateLimiterClusterOptions extends IRateLimiterOptions {
|
||||
timeoutMs?: number;
|
||||
}
|
||||
|
||||
interface IRateLimiterStoreOptions extends IRateLimiterOptions {
|
||||
storeClient: any;
|
||||
storeType?: string;
|
||||
inMemoryBlockOnConsumed?: number;
|
||||
inMemoryBlockDuration?: number;
|
||||
/**
|
||||
* @deprecated Use camelCased inMemoryBlockOnConsumed option
|
||||
*/
|
||||
inmemoryBlockOnConsumed?: number;
|
||||
/**
|
||||
* @deprecated Use camelCased inMemoryBlockOnConsumed option
|
||||
*/
|
||||
inmemoryBlockDuration?: number;
|
||||
insuranceLimiter?: RateLimiterAbstract;
|
||||
dbName?: string;
|
||||
tableName?: string;
|
||||
tableCreated?: boolean;
|
||||
}
|
||||
|
||||
interface IRateLimiterStoreNoAutoExpiryOptions extends IRateLimiterStoreOptions {
|
||||
clearExpiredByTimeout?: boolean;
|
||||
}
|
||||
|
||||
interface IRateLimiterMongoOptions extends IRateLimiterStoreOptions {
|
||||
indexKeyPrefix?: {
|
||||
[key: string]: any;
|
||||
};
|
||||
}
|
||||
|
||||
interface IRateLimiterRedisOptions extends IRateLimiterStoreOptions {
|
||||
rejectIfRedisNotReady?: boolean;
|
||||
}
|
||||
|
||||
interface ICallbackReady {
|
||||
(error?: Error): void;
|
||||
}
|
||||
|
||||
interface IRLWrapperBlackAndWhiteOptions {
|
||||
limiter: RateLimiterAbstract;
|
||||
blackList?: string[] | number[];
|
||||
whiteList?: string[] | number[];
|
||||
isBlackListed?(key: any): boolean;
|
||||
isWhiteListed?(key: any): boolean;
|
||||
runActionAnyway?: boolean;
|
||||
}
|
||||
|
||||
export class RateLimiterMemory extends RateLimiterAbstract {
|
||||
constructor(opts: IRateLimiterOptions);
|
||||
}
|
||||
|
||||
export class RateLimiterCluster extends RateLimiterAbstract {
|
||||
constructor(opts: IRateLimiterClusterOptions);
|
||||
}
|
||||
|
||||
export class RateLimiterClusterMaster {
|
||||
constructor();
|
||||
}
|
||||
|
||||
export class RateLimiterClusterMasterPM2 {
|
||||
constructor(pm2: any);
|
||||
}
|
||||
|
||||
export class RateLimiterRedis extends RateLimiterStoreAbstract {
|
||||
constructor(opts: IRateLimiterRedisOptions);
|
||||
}
|
||||
|
||||
export interface IRateLimiterMongoFunctionOptions {
|
||||
attrs: { [key: string]: any };
|
||||
}
|
||||
|
||||
export class RateLimiterMongo extends RateLimiterStoreAbstract {
|
||||
constructor(opts: IRateLimiterMongoOptions);
|
||||
indexKeyPrefix(): Object;
|
||||
indexKeyPrefix(obj?: Object): void;
|
||||
|
||||
consume(
|
||||
key: string | number,
|
||||
pointsToConsume?: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
penalty(
|
||||
key: string | number,
|
||||
points?: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
reward(
|
||||
key: string | number,
|
||||
points?: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
block(
|
||||
key: string | number,
|
||||
secDuration: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
get(
|
||||
key: string | number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes | null>;
|
||||
|
||||
set(
|
||||
key: string | number,
|
||||
points: number,
|
||||
secDuration: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
|
||||
delete(
|
||||
key: string | number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<boolean>;
|
||||
}
|
||||
|
||||
export class RateLimiterMySQL extends RateLimiterStoreAbstract {
|
||||
constructor(opts: IRateLimiterStoreNoAutoExpiryOptions, cb?: ICallbackReady);
|
||||
}
|
||||
|
||||
export class RateLimiterPostgres extends RateLimiterStoreAbstract {
|
||||
constructor(opts: IRateLimiterStoreNoAutoExpiryOptions, cb?: ICallbackReady);
|
||||
}
|
||||
|
||||
export class RateLimiterMemcache extends RateLimiterStoreAbstract {}
|
||||
|
||||
export class RateLimiterUnion {
|
||||
constructor(...limiters: RateLimiterAbstract[]);
|
||||
|
||||
consume(key: string | number, points?: number): Promise<RateLimiterRes[]>;
|
||||
}
|
||||
|
||||
export class RLWrapperBlackAndWhite extends RateLimiterAbstract {
|
||||
constructor(opts: IRLWrapperBlackAndWhiteOptions);
|
||||
}
|
||||
|
||||
interface IRateLimiterQueueOpts {
|
||||
maxQueueSize?: number;
|
||||
}
|
||||
|
||||
export class RateLimiterQueue {
|
||||
constructor(
|
||||
limiterFlexible: RateLimiterAbstract | BurstyRateLimiter,
|
||||
opts?: IRateLimiterQueueOpts
|
||||
);
|
||||
|
||||
getTokensRemaining(key?: string | number): Promise<number>;
|
||||
|
||||
removeTokens(tokens: number, key?: string | number): Promise<number>;
|
||||
}
|
||||
|
||||
export class BurstyRateLimiter {
|
||||
constructor(
|
||||
rateLimiter: RateLimiterAbstract,
|
||||
burstLimiter: RateLimiterAbstract
|
||||
);
|
||||
|
||||
consume(
|
||||
key: string | number,
|
||||
pointsToConsume?: number,
|
||||
options?: IRateLimiterMongoFunctionOptions
|
||||
): Promise<RateLimiterRes>;
|
||||
}
|
||||
59
framework/node_modules/node-rate-limiter-flexible/package.json
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
{
|
||||
"name": "rate-limiter-flexible",
|
||||
"version": "2.4.1",
|
||||
"description": "Node.js rate limiter by key and protection from DDoS and Brute-Force attacks in process Memory, Redis, MongoDb, Memcached, MySQL, PostgreSQL, Cluster or PM",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "istanbul -v cover -- _mocha --recursive",
|
||||
"debug-test": "mocha --inspect-brk lib/**/**.test.js",
|
||||
"coveralls": "cat ./coverage/lcov.info | coveralls",
|
||||
"eslint": "eslint --quiet lib/**/**.js test/**/**.js",
|
||||
"eslint-fix": "eslint --fix lib/**/**.js test/**/**.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/animir/node-rate-limiter-flexible.git"
|
||||
},
|
||||
"keywords": [
|
||||
"authorization",
|
||||
"security",
|
||||
"rate",
|
||||
"limit",
|
||||
"ratelimter",
|
||||
"brute",
|
||||
"force",
|
||||
"bruteforce",
|
||||
"throttle",
|
||||
"koa",
|
||||
"express",
|
||||
"hapi",
|
||||
"auth",
|
||||
"ddos",
|
||||
"queue"
|
||||
],
|
||||
"author": "animir <animirr@gmail.com>",
|
||||
"license": "ISC",
|
||||
"bugs": {
|
||||
"url": "https://github.com/animir/node-rate-limiter-flexible/issues"
|
||||
},
|
||||
"homepage": "https://github.com/animir/node-rate-limiter-flexible#readme",
|
||||
"types": "./lib/index.d.ts",
|
||||
"devDependencies": {
|
||||
"chai": "^4.1.2",
|
||||
"coveralls": "^3.0.1",
|
||||
"eslint": "^4.19.1",
|
||||
"eslint-config-airbnb-base": "^12.1.0",
|
||||
"eslint-plugin-import": "^2.7.0",
|
||||
"eslint-plugin-node": "^6.0.1",
|
||||
"eslint-plugin-security": "^1.4.0",
|
||||
"istanbul": "^0.4.5",
|
||||
"memcached-mock": "^0.1.0",
|
||||
"mocha": "^5.1.1",
|
||||
"redis-mock": "^0.48.0",
|
||||
"sinon": "^5.0.10"
|
||||
},
|
||||
"browser": {
|
||||
"cluster": false,
|
||||
"crypto": false
|
||||
}
|
||||
}
|
||||