setup integration test

This commit is contained in:
softprops 2019-10-20 17:50:35 -04:00
parent 1a522d88d8
commit 845942e04a
555 changed files with 103819 additions and 1 deletions

66
node_modules/bottleneck/lib/Batcher.js generated vendored Normal file
View file

@ -0,0 +1,66 @@
"use strict";
var Batcher, Events, parser;
parser = require("./parser");
Events = require("./Events");
Batcher = function () {
class Batcher {
constructor(options = {}) {
this.options = options;
parser.load(this.options, this.defaults, this);
this.Events = new Events(this);
this._arr = [];
this._resetPromise();
this._lastFlush = Date.now();
}
_resetPromise() {
return this._promise = new this.Promise((res, rej) => {
return this._resolve = res;
});
}
_flush() {
clearTimeout(this._timeout);
this._lastFlush = Date.now();
this._resolve();
this.Events.trigger("batch", this._arr);
this._arr = [];
return this._resetPromise();
}
add(data) {
var ret;
this._arr.push(data);
ret = this._promise;
if (this._arr.length === this.maxSize) {
this._flush();
} else if (this.maxTime != null && this._arr.length === 1) {
this._timeout = setTimeout(() => {
return this._flush();
}, this.maxTime);
}
return ret;
}
}
;
Batcher.prototype.defaults = {
maxTime: null,
maxSize: null,
Promise: Promise
};
return Batcher;
}.call(void 0);
module.exports = Batcher;

594
node_modules/bottleneck/lib/Bottleneck.js generated vendored Normal file
View file

@ -0,0 +1,594 @@
"use strict";
function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
function _toArray(arr) { return _arrayWithHoles(arr) || _iterableToArray(arr) || _nonIterableRest(); }
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); }
function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
var Bottleneck,
DEFAULT_PRIORITY,
Events,
Job,
LocalDatastore,
NUM_PRIORITIES,
Queues,
RedisDatastore,
States,
Sync,
parser,
splice = [].splice;
NUM_PRIORITIES = 10;
DEFAULT_PRIORITY = 5;
parser = require("./parser");
Queues = require("./Queues");
Job = require("./Job");
LocalDatastore = require("./LocalDatastore");
RedisDatastore = require("./RedisDatastore");
Events = require("./Events");
States = require("./States");
Sync = require("./Sync");
Bottleneck = function () {
class Bottleneck {
constructor(options = {}, ...invalid) {
var storeInstanceOptions, storeOptions;
this._addToQueue = this._addToQueue.bind(this);
this._validateOptions(options, invalid);
parser.load(options, this.instanceDefaults, this);
this._queues = new Queues(NUM_PRIORITIES);
this._scheduled = {};
this._states = new States(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : []));
this._limiter = null;
this.Events = new Events(this);
this._submitLock = new Sync("submit", this.Promise);
this._registerLock = new Sync("register", this.Promise);
storeOptions = parser.load(options, this.storeDefaults, {});
this._store = function () {
if (this.datastore === "redis" || this.datastore === "ioredis" || this.connection != null) {
storeInstanceOptions = parser.load(options, this.redisStoreDefaults, {});
return new RedisDatastore(this, storeOptions, storeInstanceOptions);
} else if (this.datastore === "local") {
storeInstanceOptions = parser.load(options, this.localStoreDefaults, {});
return new LocalDatastore(this, storeOptions, storeInstanceOptions);
} else {
throw new Bottleneck.prototype.BottleneckError(`Invalid datastore type: ${this.datastore}`);
}
}.call(this);
this._queues.on("leftzero", () => {
var ref;
return (ref = this._store.heartbeat) != null ? typeof ref.ref === "function" ? ref.ref() : void 0 : void 0;
});
this._queues.on("zero", () => {
var ref;
return (ref = this._store.heartbeat) != null ? typeof ref.unref === "function" ? ref.unref() : void 0 : void 0;
});
}
_validateOptions(options, invalid) {
if (!(options != null && typeof options === "object" && invalid.length === 0)) {
throw new Bottleneck.prototype.BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1.");
}
}
ready() {
return this._store.ready;
}
clients() {
return this._store.clients;
}
channel() {
return `b_${this.id}`;
}
channel_client() {
return `b_${this.id}_${this._store.clientId}`;
}
publish(message) {
return this._store.__publish__(message);
}
disconnect(flush = true) {
return this._store.__disconnect__(flush);
}
chain(_limiter) {
this._limiter = _limiter;
return this;
}
queued(priority) {
return this._queues.queued(priority);
}
clusterQueued() {
return this._store.__queued__();
}
empty() {
return this.queued() === 0 && this._submitLock.isEmpty();
}
running() {
return this._store.__running__();
}
done() {
return this._store.__done__();
}
jobStatus(id) {
return this._states.jobStatus(id);
}
jobs(status) {
return this._states.statusJobs(status);
}
counts() {
return this._states.statusCounts();
}
_randomIndex() {
return Math.random().toString(36).slice(2);
}
check(weight = 1) {
return this._store.__check__(weight);
}
_clearGlobalState(index) {
if (this._scheduled[index] != null) {
clearTimeout(this._scheduled[index].expiration);
delete this._scheduled[index];
return true;
} else {
return false;
}
}
_free(index, job, options, eventInfo) {
var _this = this;
return _asyncToGenerator(function* () {
var e, running;
try {
var _ref = yield _this._store.__free__(index, options.weight);
running = _ref.running;
_this.Events.trigger("debug", `Freed ${options.id}`, eventInfo);
if (running === 0 && _this.empty()) {
return _this.Events.trigger("idle");
}
} catch (error1) {
e = error1;
return _this.Events.trigger("error", e);
}
})();
}
_run(index, job, wait) {
var clearGlobalState, free, run;
job.doRun();
clearGlobalState = this._clearGlobalState.bind(this, index);
run = this._run.bind(this, index, job);
free = this._free.bind(this, index, job);
return this._scheduled[index] = {
timeout: setTimeout(() => {
return job.doExecute(this._limiter, clearGlobalState, run, free);
}, wait),
expiration: job.options.expiration != null ? setTimeout(function () {
return job.doExpire(clearGlobalState, run, free);
}, wait + job.options.expiration) : void 0,
job: job
};
}
_drainOne(capacity) {
return this._registerLock.schedule(() => {
var args, index, next, options, queue;
if (this.queued() === 0) {
return this.Promise.resolve(null);
}
queue = this._queues.getFirst();
var _next2 = next = queue.first();
options = _next2.options;
args = _next2.args;
if (capacity != null && options.weight > capacity) {
return this.Promise.resolve(null);
}
this.Events.trigger("debug", `Draining ${options.id}`, {
args,
options
});
index = this._randomIndex();
return this._store.__register__(index, options.weight, options.expiration).then(({
success,
wait,
reservoir
}) => {
var empty;
this.Events.trigger("debug", `Drained ${options.id}`, {
success,
args,
options
});
if (success) {
queue.shift();
empty = this.empty();
if (empty) {
this.Events.trigger("empty");
}
if (reservoir === 0) {
this.Events.trigger("depleted", empty);
}
this._run(index, next, wait);
return this.Promise.resolve(options.weight);
} else {
return this.Promise.resolve(null);
}
});
});
}
_drainAll(capacity, total = 0) {
return this._drainOne(capacity).then(drained => {
var newCapacity;
if (drained != null) {
newCapacity = capacity != null ? capacity - drained : capacity;
return this._drainAll(newCapacity, total + drained);
} else {
return this.Promise.resolve(total);
}
}).catch(e => {
return this.Events.trigger("error", e);
});
}
_dropAllQueued(message) {
return this._queues.shiftAll(function (job) {
return job.doDrop({
message
});
});
}
stop(options = {}) {
var done, waitForExecuting;
options = parser.load(options, this.stopDefaults);
waitForExecuting = at => {
var finished;
finished = () => {
var counts;
counts = this._states.counts;
return counts[0] + counts[1] + counts[2] + counts[3] === at;
};
return new this.Promise((resolve, reject) => {
if (finished()) {
return resolve();
} else {
return this.on("done", () => {
if (finished()) {
this.removeAllListeners("done");
return resolve();
}
});
}
});
};
done = options.dropWaitingJobs ? (this._run = function (index, next) {
return next.doDrop({
message: options.dropErrorMessage
});
}, this._drainOne = () => {
return this.Promise.resolve(null);
}, this._registerLock.schedule(() => {
return this._submitLock.schedule(() => {
var k, ref, v;
ref = this._scheduled;
for (k in ref) {
v = ref[k];
if (this.jobStatus(v.job.options.id) === "RUNNING") {
clearTimeout(v.timeout);
clearTimeout(v.expiration);
v.job.doDrop({
message: options.dropErrorMessage
});
}
}
this._dropAllQueued(options.dropErrorMessage);
return waitForExecuting(0);
});
})) : this.schedule({
priority: NUM_PRIORITIES - 1,
weight: 0
}, () => {
return waitForExecuting(1);
});
this._receive = function (job) {
return job._reject(new Bottleneck.prototype.BottleneckError(options.enqueueErrorMessage));
};
this.stop = () => {
return this.Promise.reject(new Bottleneck.prototype.BottleneckError("stop() has already been called"));
};
return done;
}
_addToQueue(job) {
var _this2 = this;
return _asyncToGenerator(function* () {
var args, blocked, error, options, reachedHWM, shifted, strategy;
args = job.args;
options = job.options;
try {
var _ref2 = yield _this2._store.__submit__(_this2.queued(), options.weight);
reachedHWM = _ref2.reachedHWM;
blocked = _ref2.blocked;
strategy = _ref2.strategy;
} catch (error1) {
error = error1;
_this2.Events.trigger("debug", `Could not queue ${options.id}`, {
args,
options,
error
});
job.doDrop({
error
});
return false;
}
if (blocked) {
job.doDrop();
return true;
} else if (reachedHWM) {
shifted = strategy === Bottleneck.prototype.strategy.LEAK ? _this2._queues.shiftLastFrom(options.priority) : strategy === Bottleneck.prototype.strategy.OVERFLOW_PRIORITY ? _this2._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck.prototype.strategy.OVERFLOW ? job : void 0;
if (shifted != null) {
shifted.doDrop();
}
if (shifted == null || strategy === Bottleneck.prototype.strategy.OVERFLOW) {
if (shifted == null) {
job.doDrop();
}
return reachedHWM;
}
}
job.doQueue(reachedHWM, blocked);
_this2._queues.push(job);
yield _this2._drainAll();
return reachedHWM;
})();
}
_receive(job) {
if (this._states.jobStatus(job.options.id) != null) {
job._reject(new Bottleneck.prototype.BottleneckError(`A job with the same id already exists (id=${job.options.id})`));
return false;
} else {
job.doReceive();
return this._submitLock.schedule(this._addToQueue, job);
}
}
submit(...args) {
var cb, fn, job, options, ref, ref1, task;
if (typeof args[0] === "function") {
var _ref3, _ref4, _splice$call, _splice$call2;
ref = args, (_ref3 = ref, _ref4 = _toArray(_ref3), fn = _ref4[0], args = _ref4.slice(1), _ref3), (_splice$call = splice.call(args, -1), _splice$call2 = _slicedToArray(_splice$call, 1), cb = _splice$call2[0], _splice$call);
options = parser.load({}, this.jobDefaults);
} else {
var _ref5, _ref6, _splice$call3, _splice$call4;
ref1 = args, (_ref5 = ref1, _ref6 = _toArray(_ref5), options = _ref6[0], fn = _ref6[1], args = _ref6.slice(2), _ref5), (_splice$call3 = splice.call(args, -1), _splice$call4 = _slicedToArray(_splice$call3, 1), cb = _splice$call4[0], _splice$call3);
options = parser.load(options, this.jobDefaults);
}
task = (...args) => {
return new this.Promise(function (resolve, reject) {
return fn(...args, function (...args) {
return (args[0] != null ? reject : resolve)(args);
});
});
};
job = new Job(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise);
job.promise.then(function (args) {
return typeof cb === "function" ? cb(...args) : void 0;
}).catch(function (args) {
if (Array.isArray(args)) {
return typeof cb === "function" ? cb(...args) : void 0;
} else {
return typeof cb === "function" ? cb(args) : void 0;
}
});
return this._receive(job);
}
schedule(...args) {
var job, options, task;
if (typeof args[0] === "function") {
var _args = args;
var _args2 = _toArray(_args);
task = _args2[0];
args = _args2.slice(1);
options = {};
} else {
var _args3 = args;
var _args4 = _toArray(_args3);
options = _args4[0];
task = _args4[1];
args = _args4.slice(2);
}
job = new Job(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise);
this._receive(job);
return job.promise;
}
wrap(fn) {
var schedule, wrapped;
schedule = this.schedule.bind(this);
wrapped = function wrapped(...args) {
return schedule(fn.bind(this), ...args);
};
wrapped.withOptions = function (options, ...args) {
return schedule(options, fn, ...args);
};
return wrapped;
}
updateSettings(options = {}) {
var _this3 = this;
return _asyncToGenerator(function* () {
yield _this3._store.__updateSettings__(parser.overwrite(options, _this3.storeDefaults));
parser.overwrite(options, _this3.instanceDefaults, _this3);
return _this3;
})();
}
currentReservoir() {
return this._store.__currentReservoir__();
}
incrementReservoir(incr = 0) {
return this._store.__incrementReservoir__(incr);
}
}
;
Bottleneck.default = Bottleneck;
Bottleneck.Events = Events;
Bottleneck.version = Bottleneck.prototype.version = require("./version.json").version;
Bottleneck.strategy = Bottleneck.prototype.strategy = {
LEAK: 1,
OVERFLOW: 2,
OVERFLOW_PRIORITY: 4,
BLOCK: 3
};
Bottleneck.BottleneckError = Bottleneck.prototype.BottleneckError = require("./BottleneckError");
Bottleneck.Group = Bottleneck.prototype.Group = require("./Group");
Bottleneck.RedisConnection = Bottleneck.prototype.RedisConnection = require("./RedisConnection");
Bottleneck.IORedisConnection = Bottleneck.prototype.IORedisConnection = require("./IORedisConnection");
Bottleneck.Batcher = Bottleneck.prototype.Batcher = require("./Batcher");
Bottleneck.prototype.jobDefaults = {
priority: DEFAULT_PRIORITY,
weight: 1,
expiration: null,
id: "<no-id>"
};
Bottleneck.prototype.storeDefaults = {
maxConcurrent: null,
minTime: 0,
highWater: null,
strategy: Bottleneck.prototype.strategy.LEAK,
penalty: null,
reservoir: null,
reservoirRefreshInterval: null,
reservoirRefreshAmount: null,
reservoirIncreaseInterval: null,
reservoirIncreaseAmount: null,
reservoirIncreaseMaximum: null
};
Bottleneck.prototype.localStoreDefaults = {
Promise: Promise,
timeout: null,
heartbeatInterval: 250
};
Bottleneck.prototype.redisStoreDefaults = {
Promise: Promise,
timeout: null,
heartbeatInterval: 5000,
clientTimeout: 10000,
Redis: null,
clientOptions: {},
clusterNodes: null,
clearDatastore: false,
connection: null
};
Bottleneck.prototype.instanceDefaults = {
datastore: "local",
connection: null,
id: "<no-id>",
rejectOnDrop: true,
trackDoneStatus: false,
Promise: Promise
};
Bottleneck.prototype.stopDefaults = {
enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.",
dropWaitingJobs: true,
dropErrorMessage: "This limiter has been stopped."
};
return Bottleneck;
}.call(void 0);
module.exports = Bottleneck;

5
node_modules/bottleneck/lib/BottleneckError.js generated vendored Normal file
View file

@ -0,0 +1,5 @@
"use strict";
var BottleneckError;
BottleneckError = class BottleneckError extends Error {};
module.exports = BottleneckError;

107
node_modules/bottleneck/lib/DLList.js generated vendored Normal file
View file

@ -0,0 +1,107 @@
"use strict";
var DLList;
DLList = class DLList {
constructor(incr, decr) {
this.incr = incr;
this.decr = decr;
this._first = null;
this._last = null;
this.length = 0;
}
push(value) {
var node;
this.length++;
if (typeof this.incr === "function") {
this.incr();
}
node = {
value,
prev: this._last,
next: null
};
if (this._last != null) {
this._last.next = node;
this._last = node;
} else {
this._first = this._last = node;
}
return void 0;
}
shift() {
var value;
if (this._first == null) {
return;
} else {
this.length--;
if (typeof this.decr === "function") {
this.decr();
}
}
value = this._first.value;
if ((this._first = this._first.next) != null) {
this._first.prev = null;
} else {
this._last = null;
}
return value;
}
first() {
if (this._first != null) {
return this._first.value;
}
}
getArray() {
var node, ref, results;
node = this._first;
results = [];
while (node != null) {
results.push((ref = node, node = node.next, ref.value));
}
return results;
}
forEachShift(cb) {
var node;
node = this.shift();
while (node != null) {
cb(node), node = this.shift();
}
return void 0;
}
debug() {
var node, ref, ref1, ref2, results;
node = this._first;
results = [];
while (node != null) {
results.push((ref = node, node = node.next, {
value: ref.value,
prev: (ref1 = ref.prev) != null ? ref1.value : void 0,
next: (ref2 = ref.next) != null ? ref2.value : void 0
}));
}
return results;
}
};
module.exports = DLList;

128
node_modules/bottleneck/lib/Events.js generated vendored Normal file
View file

@ -0,0 +1,128 @@
"use strict";
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
var Events;
Events = class Events {
constructor(instance) {
this.instance = instance;
this._events = {};
if (this.instance.on != null || this.instance.once != null || this.instance.removeAllListeners != null) {
throw new Error("An Emitter already exists for this object");
}
this.instance.on = (name, cb) => {
return this._addListener(name, "many", cb);
};
this.instance.once = (name, cb) => {
return this._addListener(name, "once", cb);
};
this.instance.removeAllListeners = (name = null) => {
if (name != null) {
return delete this._events[name];
} else {
return this._events = {};
}
};
}
_addListener(name, status, cb) {
var base;
if ((base = this._events)[name] == null) {
base[name] = [];
}
this._events[name].push({
cb,
status
});
return this.instance;
}
listenerCount(name) {
if (this._events[name] != null) {
return this._events[name].length;
} else {
return 0;
}
}
trigger(name, ...args) {
var _this = this;
return _asyncToGenerator(function* () {
var e, promises;
try {
if (name !== "debug") {
_this.trigger("debug", `Event triggered: ${name}`, args);
}
if (_this._events[name] == null) {
return;
}
_this._events[name] = _this._events[name].filter(function (listener) {
return listener.status !== "none";
});
promises = _this._events[name].map(
/*#__PURE__*/
function () {
var _ref = _asyncToGenerator(function* (listener) {
var e, returned;
if (listener.status === "none") {
return;
}
if (listener.status === "once") {
listener.status = "none";
}
try {
returned = typeof listener.cb === "function" ? listener.cb(...args) : void 0;
if (typeof (returned != null ? returned.then : void 0) === "function") {
return yield returned;
} else {
return returned;
}
} catch (error) {
e = error;
if ("name" !== "error") {
_this.trigger("error", e);
}
return null;
}
});
return function (_x) {
return _ref.apply(this, arguments);
};
}());
return (yield Promise.all(promises)).find(function (x) {
return x != null;
});
} catch (error) {
e = error;
if ("name" !== "error") {
_this.trigger("error", e);
}
return null;
}
})();
}
};
module.exports = Events;

198
node_modules/bottleneck/lib/Group.js generated vendored Normal file
View file

@ -0,0 +1,198 @@
"use strict";
function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
var Events, Group, IORedisConnection, RedisConnection, Scripts, parser;
parser = require("./parser");
Events = require("./Events");
RedisConnection = require("./RedisConnection");
IORedisConnection = require("./IORedisConnection");
Scripts = require("./Scripts");
Group = function () {
class Group {
constructor(limiterOptions = {}) {
this.deleteKey = this.deleteKey.bind(this);
this.limiterOptions = limiterOptions;
parser.load(this.limiterOptions, this.defaults, this);
this.Events = new Events(this);
this.instances = {};
this.Bottleneck = require("./Bottleneck");
this._startAutoCleanup();
this.sharedConnection = this.connection != null;
if (this.connection == null) {
if (this.limiterOptions.datastore === "redis") {
this.connection = new RedisConnection(Object.assign({}, this.limiterOptions, {
Events: this.Events
}));
} else if (this.limiterOptions.datastore === "ioredis") {
this.connection = new IORedisConnection(Object.assign({}, this.limiterOptions, {
Events: this.Events
}));
}
}
}
key(key = "") {
var ref;
return (ref = this.instances[key]) != null ? ref : (() => {
var limiter;
limiter = this.instances[key] = new this.Bottleneck(Object.assign(this.limiterOptions, {
id: `${this.id}-${key}`,
timeout: this.timeout,
connection: this.connection
}));
this.Events.trigger("created", limiter, key);
return limiter;
})();
}
deleteKey(key = "") {
var _this = this;
return _asyncToGenerator(function* () {
var deleted, instance;
instance = _this.instances[key];
if (_this.connection) {
deleted = yield _this.connection.__runCommand__(['del', ...Scripts.allKeys(`${_this.id}-${key}`)]);
}
if (instance != null) {
delete _this.instances[key];
yield instance.disconnect();
}
return instance != null || deleted > 0;
})();
}
limiters() {
var k, ref, results, v;
ref = this.instances;
results = [];
for (k in ref) {
v = ref[k];
results.push({
key: k,
limiter: v
});
}
return results;
}
keys() {
return Object.keys(this.instances);
}
clusterKeys() {
var _this2 = this;
return _asyncToGenerator(function* () {
var cursor, end, found, i, k, keys, len, next, start;
if (_this2.connection == null) {
return _this2.Promise.resolve(_this2.keys());
}
keys = [];
cursor = null;
start = `b_${_this2.id}-`.length;
end = "_settings".length;
while (cursor !== 0) {
var _ref = yield _this2.connection.__runCommand__(["scan", cursor != null ? cursor : 0, "match", `b_${_this2.id}-*_settings`, "count", 10000]);
var _ref2 = _slicedToArray(_ref, 2);
next = _ref2[0];
found = _ref2[1];
cursor = ~~next;
for (i = 0, len = found.length; i < len; i++) {
k = found[i];
keys.push(k.slice(start, -end));
}
}
return keys;
})();
}
_startAutoCleanup() {
var _this3 = this;
var base;
clearInterval(this.interval);
return typeof (base = this.interval = setInterval(
/*#__PURE__*/
_asyncToGenerator(function* () {
var e, k, ref, results, time, v;
time = Date.now();
ref = _this3.instances;
results = [];
for (k in ref) {
v = ref[k];
try {
if (yield v._store.__groupCheck__(time)) {
results.push(_this3.deleteKey(k));
} else {
results.push(void 0);
}
} catch (error) {
e = error;
results.push(v.Events.trigger("error", e));
}
}
return results;
}), this.timeout / 2)).unref === "function" ? base.unref() : void 0;
}
updateSettings(options = {}) {
parser.overwrite(options, this.defaults, this);
parser.overwrite(options, options, this.limiterOptions);
if (options.timeout != null) {
return this._startAutoCleanup();
}
}
disconnect(flush = true) {
var ref;
if (!this.sharedConnection) {
return (ref = this.connection) != null ? ref.disconnect(flush) : void 0;
}
}
}
;
Group.prototype.defaults = {
timeout: 1000 * 60 * 5,
connection: null,
Promise: Promise,
id: "group-key"
};
return Group;
}.call(void 0);
module.exports = Group;

186
node_modules/bottleneck/lib/IORedisConnection.js generated vendored Normal file
View file

@ -0,0 +1,186 @@
"use strict";
function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
var Events, IORedisConnection, Scripts, parser;
parser = require("./parser");
Events = require("./Events");
Scripts = require("./Scripts");
IORedisConnection = function () {
class IORedisConnection {
constructor(options = {}) {
parser.load(options, this.defaults, this);
if (this.Redis == null) {
this.Redis = eval("require")("ioredis"); // Obfuscated or else Webpack/Angular will try to inline the optional ioredis module. To override this behavior: pass the ioredis module to Bottleneck as the 'Redis' option.
}
if (this.Events == null) {
this.Events = new Events(this);
}
this.terminated = false;
if (this.clusterNodes != null) {
this.client = new this.Redis.Cluster(this.clusterNodes, this.clientOptions);
this.subscriber = new this.Redis.Cluster(this.clusterNodes, this.clientOptions);
} else if (this.client != null && this.client.duplicate == null) {
this.subscriber = new this.Redis.Cluster(this.client.startupNodes, this.client.options);
} else {
if (this.client == null) {
this.client = new this.Redis(this.clientOptions);
}
this.subscriber = this.client.duplicate();
}
this.limiters = {};
this.ready = this.Promise.all([this._setup(this.client, false), this._setup(this.subscriber, true)]).then(() => {
this._loadScripts();
return {
client: this.client,
subscriber: this.subscriber
};
});
}
_setup(client, sub) {
client.setMaxListeners(0);
return new this.Promise((resolve, reject) => {
client.on("error", e => {
return this.Events.trigger("error", e);
});
if (sub) {
client.on("message", (channel, message) => {
var ref;
return (ref = this.limiters[channel]) != null ? ref._store.onMessage(channel, message) : void 0;
});
}
if (client.status === "ready") {
return resolve();
} else {
return client.once("ready", resolve);
}
});
}
_loadScripts() {
return Scripts.names.forEach(name => {
return this.client.defineCommand(name, {
lua: Scripts.payload(name)
});
});
}
__runCommand__(cmd) {
var _this = this;
return _asyncToGenerator(function* () {
var _, deleted;
yield _this.ready;
var _ref = yield _this.client.pipeline([cmd]).exec();
var _ref2 = _slicedToArray(_ref, 1);
var _ref2$ = _slicedToArray(_ref2[0], 2);
_ = _ref2$[0];
deleted = _ref2$[1];
return deleted;
})();
}
__addLimiter__(instance) {
return this.Promise.all([instance.channel(), instance.channel_client()].map(channel => {
return new this.Promise((resolve, reject) => {
return this.subscriber.subscribe(channel, () => {
this.limiters[channel] = instance;
return resolve();
});
});
}));
}
__removeLimiter__(instance) {
var _this2 = this;
return [instance.channel(), instance.channel_client()].forEach(
/*#__PURE__*/
function () {
var _ref3 = _asyncToGenerator(function* (channel) {
if (!_this2.terminated) {
yield _this2.subscriber.unsubscribe(channel);
}
return delete _this2.limiters[channel];
});
return function (_x) {
return _ref3.apply(this, arguments);
};
}());
}
__scriptArgs__(name, id, args, cb) {
var keys;
keys = Scripts.keys(name, id);
return [keys.length].concat(keys, args, cb);
}
__scriptFn__(name) {
return this.client[name].bind(this.client);
}
disconnect(flush = true) {
var i, k, len, ref;
ref = Object.keys(this.limiters);
for (i = 0, len = ref.length; i < len; i++) {
k = ref[i];
clearInterval(this.limiters[k]._store.heartbeat);
}
this.limiters = {};
this.terminated = true;
if (flush) {
return this.Promise.all([this.client.quit(), this.subscriber.quit()]);
} else {
this.client.disconnect();
this.subscriber.disconnect();
return this.Promise.resolve();
}
}
}
;
IORedisConnection.prototype.datastore = "ioredis";
IORedisConnection.prototype.defaults = {
Redis: null,
clientOptions: {},
clusterNodes: null,
client: null,
Promise: Promise,
Events: null
};
return IORedisConnection;
}.call(void 0);
module.exports = IORedisConnection;

215
node_modules/bottleneck/lib/Job.js generated vendored Normal file
View file

@ -0,0 +1,215 @@
"use strict";
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
var BottleneckError, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser;
NUM_PRIORITIES = 10;
DEFAULT_PRIORITY = 5;
parser = require("./parser");
BottleneckError = require("./BottleneckError");
Job = class Job {
constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states, Promise) {
this.task = task;
this.args = args;
this.rejectOnDrop = rejectOnDrop;
this.Events = Events;
this._states = _states;
this.Promise = Promise;
this.options = parser.load(options, jobDefaults);
this.options.priority = this._sanitizePriority(this.options.priority);
if (this.options.id === jobDefaults.id) {
this.options.id = `${this.options.id}-${this._randomIndex()}`;
}
this.promise = new this.Promise((_resolve, _reject) => {
this._resolve = _resolve;
this._reject = _reject;
});
this.retryCount = 0;
}
_sanitizePriority(priority) {
var sProperty;
sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority;
if (sProperty < 0) {
return 0;
} else if (sProperty > NUM_PRIORITIES - 1) {
return NUM_PRIORITIES - 1;
} else {
return sProperty;
}
}
_randomIndex() {
return Math.random().toString(36).slice(2);
}
doDrop({
error,
message = "This job has been dropped by Bottleneck"
} = {}) {
if (this._states.remove(this.options.id)) {
if (this.rejectOnDrop) {
this._reject(error != null ? error : new BottleneckError(message));
}
this.Events.trigger("dropped", {
args: this.args,
options: this.options,
task: this.task,
promise: this.promise
});
return true;
} else {
return false;
}
}
_assertStatus(expected) {
var status;
status = this._states.jobStatus(this.options.id);
if (!(status === expected || expected === "DONE" && status === null)) {
throw new BottleneckError(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`);
}
}
doReceive() {
this._states.start(this.options.id);
return this.Events.trigger("received", {
args: this.args,
options: this.options
});
}
doQueue(reachedHWM, blocked) {
this._assertStatus("RECEIVED");
this._states.next(this.options.id);
return this.Events.trigger("queued", {
args: this.args,
options: this.options,
reachedHWM,
blocked
});
}
doRun() {
if (this.retryCount === 0) {
this._assertStatus("QUEUED");
this._states.next(this.options.id);
} else {
this._assertStatus("EXECUTING");
}
return this.Events.trigger("scheduled", {
args: this.args,
options: this.options
});
}
doExecute(chained, clearGlobalState, run, free) {
var _this = this;
return _asyncToGenerator(function* () {
var error, eventInfo, passed;
if (_this.retryCount === 0) {
_this._assertStatus("RUNNING");
_this._states.next(_this.options.id);
} else {
_this._assertStatus("EXECUTING");
}
eventInfo = {
args: _this.args,
options: _this.options,
retryCount: _this.retryCount
};
_this.Events.trigger("executing", eventInfo);
try {
passed = yield chained != null ? chained.schedule(_this.options, _this.task, ..._this.args) : _this.task(..._this.args);
if (clearGlobalState()) {
_this.doDone(eventInfo);
yield free(_this.options, eventInfo);
_this._assertStatus("DONE");
return _this._resolve(passed);
}
} catch (error1) {
error = error1;
return _this._onFailure(error, eventInfo, clearGlobalState, run, free);
}
})();
}
doExpire(clearGlobalState, run, free) {
var error, eventInfo;
if (this._states.jobStatus(this.options.id === "RUNNING")) {
this._states.next(this.options.id);
}
this._assertStatus("EXECUTING");
eventInfo = {
args: this.args,
options: this.options,
retryCount: this.retryCount
};
error = new BottleneckError(`This job timed out after ${this.options.expiration} ms.`);
return this._onFailure(error, eventInfo, clearGlobalState, run, free);
}
_onFailure(error, eventInfo, clearGlobalState, run, free) {
var _this2 = this;
return _asyncToGenerator(function* () {
var retry, retryAfter;
if (clearGlobalState()) {
retry = yield _this2.Events.trigger("failed", error, eventInfo);
if (retry != null) {
retryAfter = ~~retry;
_this2.Events.trigger("retry", `Retrying ${_this2.options.id} after ${retryAfter} ms`, eventInfo);
_this2.retryCount++;
return run(retryAfter);
} else {
_this2.doDone(eventInfo);
yield free(_this2.options, eventInfo);
_this2._assertStatus("DONE");
return _this2._reject(error);
}
}
})();
}
doDone(eventInfo) {
this._assertStatus("EXECUTING");
this._states.next(this.options.id);
return this.Events.trigger("done", eventInfo);
}
};
module.exports = Job;

287
node_modules/bottleneck/lib/LocalDatastore.js generated vendored Normal file
View file

@ -0,0 +1,287 @@
"use strict";
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
var BottleneckError, LocalDatastore, parser;
parser = require("./parser");
BottleneckError = require("./BottleneckError");
LocalDatastore = class LocalDatastore {
constructor(instance, storeOptions, storeInstanceOptions) {
this.instance = instance;
this.storeOptions = storeOptions;
this.clientId = this.instance._randomIndex();
parser.load(storeInstanceOptions, storeInstanceOptions, this);
this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now();
this._running = 0;
this._done = 0;
this._unblockTime = 0;
this.ready = this.Promise.resolve();
this.clients = {};
this._startHeartbeat();
}
_startHeartbeat() {
var base;
if (this.heartbeat == null && (this.storeOptions.reservoirRefreshInterval != null && this.storeOptions.reservoirRefreshAmount != null || this.storeOptions.reservoirIncreaseInterval != null && this.storeOptions.reservoirIncreaseAmount != null)) {
return typeof (base = this.heartbeat = setInterval(() => {
var amount, incr, maximum, now, reservoir;
now = Date.now();
if (this.storeOptions.reservoirRefreshInterval != null && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) {
this._lastReservoirRefresh = now;
this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount;
this.instance._drainAll(this.computeCapacity());
}
if (this.storeOptions.reservoirIncreaseInterval != null && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) {
var _this$storeOptions = this.storeOptions;
amount = _this$storeOptions.reservoirIncreaseAmount;
maximum = _this$storeOptions.reservoirIncreaseMaximum;
reservoir = _this$storeOptions.reservoir;
this._lastReservoirIncrease = now;
incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount;
if (incr > 0) {
this.storeOptions.reservoir += incr;
return this.instance._drainAll(this.computeCapacity());
}
}
}, this.heartbeatInterval)).unref === "function" ? base.unref() : void 0;
} else {
return clearInterval(this.heartbeat);
}
}
__publish__(message) {
var _this = this;
return _asyncToGenerator(function* () {
yield _this.yieldLoop();
return _this.instance.Events.trigger("message", message.toString());
})();
}
__disconnect__(flush) {
var _this2 = this;
return _asyncToGenerator(function* () {
yield _this2.yieldLoop();
clearInterval(_this2.heartbeat);
return _this2.Promise.resolve();
})();
}
yieldLoop(t = 0) {
return new this.Promise(function (resolve, reject) {
return setTimeout(resolve, t);
});
}
computePenalty() {
var ref;
return (ref = this.storeOptions.penalty) != null ? ref : 15 * this.storeOptions.minTime || 5000;
}
__updateSettings__(options) {
var _this3 = this;
return _asyncToGenerator(function* () {
yield _this3.yieldLoop();
parser.overwrite(options, options, _this3.storeOptions);
_this3._startHeartbeat();
_this3.instance._drainAll(_this3.computeCapacity());
return true;
})();
}
__running__() {
var _this4 = this;
return _asyncToGenerator(function* () {
yield _this4.yieldLoop();
return _this4._running;
})();
}
__queued__() {
var _this5 = this;
return _asyncToGenerator(function* () {
yield _this5.yieldLoop();
return _this5.instance.queued();
})();
}
__done__() {
var _this6 = this;
return _asyncToGenerator(function* () {
yield _this6.yieldLoop();
return _this6._done;
})();
}
__groupCheck__(time) {
var _this7 = this;
return _asyncToGenerator(function* () {
yield _this7.yieldLoop();
return _this7._nextRequest + _this7.timeout < time;
})();
}
computeCapacity() {
var maxConcurrent, reservoir;
var _this$storeOptions2 = this.storeOptions;
maxConcurrent = _this$storeOptions2.maxConcurrent;
reservoir = _this$storeOptions2.reservoir;
if (maxConcurrent != null && reservoir != null) {
return Math.min(maxConcurrent - this._running, reservoir);
} else if (maxConcurrent != null) {
return maxConcurrent - this._running;
} else if (reservoir != null) {
return reservoir;
} else {
return null;
}
}
conditionsCheck(weight) {
var capacity;
capacity = this.computeCapacity();
return capacity == null || weight <= capacity;
}
__incrementReservoir__(incr) {
var _this8 = this;
return _asyncToGenerator(function* () {
var reservoir;
yield _this8.yieldLoop();
reservoir = _this8.storeOptions.reservoir += incr;
_this8.instance._drainAll(_this8.computeCapacity());
return reservoir;
})();
}
__currentReservoir__() {
var _this9 = this;
return _asyncToGenerator(function* () {
yield _this9.yieldLoop();
return _this9.storeOptions.reservoir;
})();
}
isBlocked(now) {
return this._unblockTime >= now;
}
check(weight, now) {
return this.conditionsCheck(weight) && this._nextRequest - now <= 0;
}
__check__(weight) {
var _this10 = this;
return _asyncToGenerator(function* () {
var now;
yield _this10.yieldLoop();
now = Date.now();
return _this10.check(weight, now);
})();
}
__register__(index, weight, expiration) {
var _this11 = this;
return _asyncToGenerator(function* () {
var now, wait;
yield _this11.yieldLoop();
now = Date.now();
if (_this11.conditionsCheck(weight)) {
_this11._running += weight;
if (_this11.storeOptions.reservoir != null) {
_this11.storeOptions.reservoir -= weight;
}
wait = Math.max(_this11._nextRequest - now, 0);
_this11._nextRequest = now + wait + _this11.storeOptions.minTime;
return {
success: true,
wait,
reservoir: _this11.storeOptions.reservoir
};
} else {
return {
success: false
};
}
})();
}
strategyIsBlock() {
return this.storeOptions.strategy === 3;
}
__submit__(queueLength, weight) {
var _this12 = this;
return _asyncToGenerator(function* () {
var blocked, now, reachedHWM;
yield _this12.yieldLoop();
if (_this12.storeOptions.maxConcurrent != null && weight > _this12.storeOptions.maxConcurrent) {
throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${_this12.storeOptions.maxConcurrent}`);
}
now = Date.now();
reachedHWM = _this12.storeOptions.highWater != null && queueLength === _this12.storeOptions.highWater && !_this12.check(weight, now);
blocked = _this12.strategyIsBlock() && (reachedHWM || _this12.isBlocked(now));
if (blocked) {
_this12._unblockTime = now + _this12.computePenalty();
_this12._nextRequest = _this12._unblockTime + _this12.storeOptions.minTime;
_this12.instance._dropAllQueued();
}
return {
reachedHWM,
blocked,
strategy: _this12.storeOptions.strategy
};
})();
}
__free__(index, weight) {
var _this13 = this;
return _asyncToGenerator(function* () {
yield _this13.yieldLoop();
_this13._running -= weight;
_this13._done += weight;
_this13.instance._drainAll(_this13.computeCapacity());
return {
running: _this13._running
};
})();
}
};
module.exports = LocalDatastore;

77
node_modules/bottleneck/lib/Queues.js generated vendored Normal file
View file

@ -0,0 +1,77 @@
"use strict";
var DLList, Events, Queues;
DLList = require("./DLList");
Events = require("./Events");
Queues = class Queues {
constructor(num_priorities) {
var i;
this.Events = new Events(this);
this._length = 0;
this._lists = function () {
var j, ref, results;
results = [];
for (i = j = 1, ref = num_priorities; 1 <= ref ? j <= ref : j >= ref; i = 1 <= ref ? ++j : --j) {
results.push(new DLList(() => {
return this.incr();
}, () => {
return this.decr();
}));
}
return results;
}.call(this);
}
incr() {
if (this._length++ === 0) {
return this.Events.trigger("leftzero");
}
}
decr() {
if (--this._length === 0) {
return this.Events.trigger("zero");
}
}
push(job) {
return this._lists[job.options.priority].push(job);
}
queued(priority) {
if (priority != null) {
return this._lists[priority].length;
} else {
return this._length;
}
}
shiftAll(fn) {
return this._lists.forEach(function (list) {
return list.forEachShift(fn);
});
}
getFirst(arr = this._lists) {
var j, len, list;
for (j = 0, len = arr.length; j < len; j++) {
list = arr[j];
if (list.length > 0) {
return list;
}
}
return [];
}
shiftLastFrom(priority) {
return this.getFirst(this._lists.slice(priority).reverse()).shift();
}
};
module.exports = Queues;

193
node_modules/bottleneck/lib/RedisConnection.js generated vendored Normal file
View file

@ -0,0 +1,193 @@
"use strict";
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
var Events, RedisConnection, Scripts, parser;
parser = require("./parser");
Events = require("./Events");
Scripts = require("./Scripts");
RedisConnection = function () {
class RedisConnection {
constructor(options = {}) {
parser.load(options, this.defaults, this);
if (this.Redis == null) {
this.Redis = eval("require")("redis"); // Obfuscated or else Webpack/Angular will try to inline the optional redis module. To override this behavior: pass the redis module to Bottleneck as the 'Redis' option.
}
if (this.Events == null) {
this.Events = new Events(this);
}
this.terminated = false;
if (this.client == null) {
this.client = this.Redis.createClient(this.clientOptions);
}
this.subscriber = this.client.duplicate();
this.limiters = {};
this.shas = {};
this.ready = this.Promise.all([this._setup(this.client, false), this._setup(this.subscriber, true)]).then(() => {
return this._loadScripts();
}).then(() => {
return {
client: this.client,
subscriber: this.subscriber
};
});
}
_setup(client, sub) {
client.setMaxListeners(0);
return new this.Promise((resolve, reject) => {
client.on("error", e => {
return this.Events.trigger("error", e);
});
if (sub) {
client.on("message", (channel, message) => {
var ref;
return (ref = this.limiters[channel]) != null ? ref._store.onMessage(channel, message) : void 0;
});
}
if (client.ready) {
return resolve();
} else {
return client.once("ready", resolve);
}
});
}
_loadScript(name) {
return new this.Promise((resolve, reject) => {
var payload;
payload = Scripts.payload(name);
return this.client.multi([["script", "load", payload]]).exec((err, replies) => {
if (err != null) {
return reject(err);
}
this.shas[name] = replies[0];
return resolve(replies[0]);
});
});
}
_loadScripts() {
return this.Promise.all(Scripts.names.map(k => {
return this._loadScript(k);
}));
}
__runCommand__(cmd) {
var _this = this;
return _asyncToGenerator(function* () {
yield _this.ready;
return new _this.Promise((resolve, reject) => {
return _this.client.multi([cmd]).exec_atomic(function (err, replies) {
if (err != null) {
return reject(err);
} else {
return resolve(replies[0]);
}
});
});
})();
}
__addLimiter__(instance) {
return this.Promise.all([instance.channel(), instance.channel_client()].map(channel => {
return new this.Promise((resolve, reject) => {
var handler;
handler = chan => {
if (chan === channel) {
this.subscriber.removeListener("subscribe", handler);
this.limiters[channel] = instance;
return resolve();
}
};
this.subscriber.on("subscribe", handler);
return this.subscriber.subscribe(channel);
});
}));
}
__removeLimiter__(instance) {
var _this2 = this;
return this.Promise.all([instance.channel(), instance.channel_client()].map(
/*#__PURE__*/
function () {
var _ref = _asyncToGenerator(function* (channel) {
if (!_this2.terminated) {
yield new _this2.Promise((resolve, reject) => {
return _this2.subscriber.unsubscribe(channel, function (err, chan) {
if (err != null) {
return reject(err);
}
if (chan === channel) {
return resolve();
}
});
});
}
return delete _this2.limiters[channel];
});
return function (_x) {
return _ref.apply(this, arguments);
};
}()));
}
__scriptArgs__(name, id, args, cb) {
var keys;
keys = Scripts.keys(name, id);
return [this.shas[name], keys.length].concat(keys, args, cb);
}
__scriptFn__(name) {
return this.client.evalsha.bind(this.client);
}
disconnect(flush = true) {
var i, k, len, ref;
ref = Object.keys(this.limiters);
for (i = 0, len = ref.length; i < len; i++) {
k = ref[i];
clearInterval(this.limiters[k]._store.heartbeat);
}
this.limiters = {};
this.terminated = true;
this.client.end(flush);
this.subscriber.end(flush);
return this.Promise.resolve();
}
}
;
RedisConnection.prototype.datastore = "redis";
RedisConnection.prototype.defaults = {
Redis: null,
clientOptions: {},
client: null,
Promise: Promise,
Events: null
};
return RedisConnection;
}.call(void 0);
module.exports = RedisConnection;

352
node_modules/bottleneck/lib/RedisDatastore.js generated vendored Normal file
View file

@ -0,0 +1,352 @@
"use strict";
function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); }
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); }
function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
var BottleneckError, IORedisConnection, RedisConnection, RedisDatastore, parser;
parser = require("./parser");
BottleneckError = require("./BottleneckError");
RedisConnection = require("./RedisConnection");
IORedisConnection = require("./IORedisConnection");
RedisDatastore = class RedisDatastore {
constructor(instance, storeOptions, storeInstanceOptions) {
this.instance = instance;
this.storeOptions = storeOptions;
this.originalId = this.instance.id;
this.clientId = this.instance._randomIndex();
parser.load(storeInstanceOptions, storeInstanceOptions, this);
this.clients = {};
this.capacityPriorityCounters = {};
this.sharedConnection = this.connection != null;
if (this.connection == null) {
this.connection = this.instance.datastore === "redis" ? new RedisConnection({
Redis: this.Redis,
clientOptions: this.clientOptions,
Promise: this.Promise,
Events: this.instance.Events
}) : this.instance.datastore === "ioredis" ? new IORedisConnection({
Redis: this.Redis,
clientOptions: this.clientOptions,
clusterNodes: this.clusterNodes,
Promise: this.Promise,
Events: this.instance.Events
}) : void 0;
}
this.instance.connection = this.connection;
this.instance.datastore = this.connection.datastore;
this.ready = this.connection.ready.then(clients => {
this.clients = clients;
return this.runScript("init", this.prepareInitSettings(this.clearDatastore));
}).then(() => {
return this.connection.__addLimiter__(this.instance);
}).then(() => {
return this.runScript("register_client", [this.instance.queued()]);
}).then(() => {
var base;
if (typeof (base = this.heartbeat = setInterval(() => {
return this.runScript("heartbeat", []).catch(e => {
return this.instance.Events.trigger("error", e);
});
}, this.heartbeatInterval)).unref === "function") {
base.unref();
}
return this.clients;
});
}
__publish__(message) {
var _this = this;
return _asyncToGenerator(function* () {
var client;
var _ref = yield _this.ready;
client = _ref.client;
return client.publish(_this.instance.channel(), `message:${message.toString()}`);
})();
}
onMessage(channel, message) {
var _this2 = this;
return _asyncToGenerator(function* () {
var capacity, counter, data, drained, e, newCapacity, pos, priorityClient, rawCapacity, type;
try {
pos = message.indexOf(":");
var _ref2 = [message.slice(0, pos), message.slice(pos + 1)];
type = _ref2[0];
data = _ref2[1];
if (type === "capacity") {
return yield _this2.instance._drainAll(data.length > 0 ? ~~data : void 0);
} else if (type === "capacity-priority") {
var _data$split = data.split(":");
var _data$split2 = _slicedToArray(_data$split, 3);
rawCapacity = _data$split2[0];
priorityClient = _data$split2[1];
counter = _data$split2[2];
capacity = rawCapacity.length > 0 ? ~~rawCapacity : void 0;
if (priorityClient === _this2.clientId) {
drained = yield _this2.instance._drainAll(capacity);
newCapacity = capacity != null ? capacity - (drained || 0) : "";
return yield _this2.clients.client.publish(_this2.instance.channel(), `capacity-priority:${newCapacity}::${counter}`);
} else if (priorityClient === "") {
clearTimeout(_this2.capacityPriorityCounters[counter]);
delete _this2.capacityPriorityCounters[counter];
return _this2.instance._drainAll(capacity);
} else {
return _this2.capacityPriorityCounters[counter] = setTimeout(
/*#__PURE__*/
_asyncToGenerator(function* () {
var e;
try {
delete _this2.capacityPriorityCounters[counter];
yield _this2.runScript("blacklist_client", [priorityClient]);
return yield _this2.instance._drainAll(capacity);
} catch (error) {
e = error;
return _this2.instance.Events.trigger("error", e);
}
}), 1000);
}
} else if (type === "message") {
return _this2.instance.Events.trigger("message", data);
} else if (type === "blocked") {
return yield _this2.instance._dropAllQueued();
}
} catch (error) {
e = error;
return _this2.instance.Events.trigger("error", e);
}
})();
}
__disconnect__(flush) {
clearInterval(this.heartbeat);
if (this.sharedConnection) {
return this.connection.__removeLimiter__(this.instance);
} else {
return this.connection.disconnect(flush);
}
}
runScript(name, args) {
var _this3 = this;
return _asyncToGenerator(function* () {
if (!(name === "init" || name === "register_client")) {
yield _this3.ready;
}
return new _this3.Promise((resolve, reject) => {
var all_args, arr;
all_args = [Date.now(), _this3.clientId].concat(args);
_this3.instance.Events.trigger("debug", `Calling Redis script: ${name}.lua`, all_args);
arr = _this3.connection.__scriptArgs__(name, _this3.originalId, all_args, function (err, replies) {
if (err != null) {
return reject(err);
}
return resolve(replies);
});
return _this3.connection.__scriptFn__(name)(...arr);
}).catch(e => {
if (e.message === "SETTINGS_KEY_NOT_FOUND") {
if (name === "heartbeat") {
return _this3.Promise.resolve();
} else {
return _this3.runScript("init", _this3.prepareInitSettings(false)).then(() => {
return _this3.runScript(name, args);
});
}
} else if (e.message === "UNKNOWN_CLIENT") {
return _this3.runScript("register_client", [_this3.instance.queued()]).then(() => {
return _this3.runScript(name, args);
});
} else {
return _this3.Promise.reject(e);
}
});
})();
}
prepareArray(arr) {
var i, len, results, x;
results = [];
for (i = 0, len = arr.length; i < len; i++) {
x = arr[i];
results.push(x != null ? x.toString() : "");
}
return results;
}
prepareObject(obj) {
var arr, k, v;
arr = [];
for (k in obj) {
v = obj[k];
arr.push(k, v != null ? v.toString() : "");
}
return arr;
}
prepareInitSettings(clear) {
var args;
args = this.prepareObject(Object.assign({}, this.storeOptions, {
id: this.originalId,
version: this.instance.version,
groupTimeout: this.timeout,
clientTimeout: this.clientTimeout
}));
args.unshift(clear ? 1 : 0, this.instance.version);
return args;
}
convertBool(b) {
return !!b;
}
__updateSettings__(options) {
var _this4 = this;
return _asyncToGenerator(function* () {
yield _this4.runScript("update_settings", _this4.prepareObject(options));
return parser.overwrite(options, options, _this4.storeOptions);
})();
}
__running__() {
return this.runScript("running", []);
}
__queued__() {
return this.runScript("queued", []);
}
__done__() {
return this.runScript("done", []);
}
__groupCheck__() {
var _this5 = this;
return _asyncToGenerator(function* () {
return _this5.convertBool((yield _this5.runScript("group_check", [])));
})();
}
__incrementReservoir__(incr) {
return this.runScript("increment_reservoir", [incr]);
}
__currentReservoir__() {
return this.runScript("current_reservoir", []);
}
__check__(weight) {
var _this6 = this;
return _asyncToGenerator(function* () {
return _this6.convertBool((yield _this6.runScript("check", _this6.prepareArray([weight]))));
})();
}
__register__(index, weight, expiration) {
var _this7 = this;
return _asyncToGenerator(function* () {
var reservoir, success, wait;
var _ref4 = yield _this7.runScript("register", _this7.prepareArray([index, weight, expiration]));
var _ref5 = _slicedToArray(_ref4, 3);
success = _ref5[0];
wait = _ref5[1];
reservoir = _ref5[2];
return {
success: _this7.convertBool(success),
wait,
reservoir
};
})();
}
__submit__(queueLength, weight) {
var _this8 = this;
return _asyncToGenerator(function* () {
var blocked, e, maxConcurrent, overweight, reachedHWM, strategy;
try {
var _ref6 = yield _this8.runScript("submit", _this8.prepareArray([queueLength, weight]));
var _ref7 = _slicedToArray(_ref6, 3);
reachedHWM = _ref7[0];
blocked = _ref7[1];
strategy = _ref7[2];
return {
reachedHWM: _this8.convertBool(reachedHWM),
blocked: _this8.convertBool(blocked),
strategy
};
} catch (error) {
e = error;
if (e.message.indexOf("OVERWEIGHT") === 0) {
var _e$message$split = e.message.split(":");
var _e$message$split2 = _slicedToArray(_e$message$split, 3);
overweight = _e$message$split2[0];
weight = _e$message$split2[1];
maxConcurrent = _e$message$split2[2];
throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${maxConcurrent}`);
} else {
throw e;
}
}
})();
}
__free__(index, weight) {
var _this9 = this;
return _asyncToGenerator(function* () {
var running;
running = yield _this9.runScript("free", _this9.prepareArray([index]));
return {
running
};
})();
}
};
module.exports = RedisDatastore;

162
node_modules/bottleneck/lib/Scripts.js generated vendored Normal file
View file

@ -0,0 +1,162 @@
"use strict";
var headers, lua, templates;
lua = require("./lua.json");
headers = {
refs: lua["refs.lua"],
validate_keys: lua["validate_keys.lua"],
validate_client: lua["validate_client.lua"],
refresh_expiration: lua["refresh_expiration.lua"],
process_tick: lua["process_tick.lua"],
conditions_check: lua["conditions_check.lua"],
get_time: lua["get_time.lua"]
};
exports.allKeys = function (id) {
return [
/*
HASH
*/
`b_${id}_settings`,
/*
HASH
job index -> weight
*/
`b_${id}_job_weights`,
/*
ZSET
job index -> expiration
*/
`b_${id}_job_expirations`,
/*
HASH
job index -> client
*/
`b_${id}_job_clients`,
/*
ZSET
client -> sum running
*/
`b_${id}_client_running`,
/*
HASH
client -> num queued
*/
`b_${id}_client_num_queued`,
/*
ZSET
client -> last job registered
*/
`b_${id}_client_last_registered`,
/*
ZSET
client -> last seen
*/
`b_${id}_client_last_seen`];
};
templates = {
init: {
keys: exports.allKeys,
headers: ["process_tick"],
refresh_expiration: true,
code: lua["init.lua"]
},
group_check: {
keys: exports.allKeys,
headers: [],
refresh_expiration: false,
code: lua["group_check.lua"]
},
register_client: {
keys: exports.allKeys,
headers: ["validate_keys"],
refresh_expiration: false,
code: lua["register_client.lua"]
},
blacklist_client: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client"],
refresh_expiration: false,
code: lua["blacklist_client.lua"]
},
heartbeat: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client", "process_tick"],
refresh_expiration: false,
code: lua["heartbeat.lua"]
},
update_settings: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client", "process_tick"],
refresh_expiration: true,
code: lua["update_settings.lua"]
},
running: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client", "process_tick"],
refresh_expiration: false,
code: lua["running.lua"]
},
queued: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client"],
refresh_expiration: false,
code: lua["queued.lua"]
},
done: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client", "process_tick"],
refresh_expiration: false,
code: lua["done.lua"]
},
check: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"],
refresh_expiration: false,
code: lua["check.lua"]
},
submit: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"],
refresh_expiration: true,
code: lua["submit.lua"]
},
register: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"],
refresh_expiration: true,
code: lua["register.lua"]
},
free: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client", "process_tick"],
refresh_expiration: true,
code: lua["free.lua"]
},
current_reservoir: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client", "process_tick"],
refresh_expiration: false,
code: lua["current_reservoir.lua"]
},
increment_reservoir: {
keys: exports.allKeys,
headers: ["validate_keys", "validate_client", "process_tick"],
refresh_expiration: true,
code: lua["increment_reservoir.lua"]
}
};
exports.names = Object.keys(templates);
exports.keys = function (name, id) {
return templates[name].keys(id);
};
exports.payload = function (name) {
var template;
template = templates[name];
return Array.prototype.concat(headers.refs, template.headers.map(function (h) {
return headers[h];
}), template.refresh_expiration ? headers.refresh_expiration : "", template.code).join("\n");
};

88
node_modules/bottleneck/lib/States.js generated vendored Normal file
View file

@ -0,0 +1,88 @@
"use strict";
var BottleneckError, States;
BottleneckError = require("./BottleneckError");
States = class States {
constructor(status1) {
this.status = status1;
this._jobs = {};
this.counts = this.status.map(function () {
return 0;
});
}
next(id) {
var current, next;
current = this._jobs[id];
next = current + 1;
if (current != null && next < this.status.length) {
this.counts[current]--;
this.counts[next]++;
return this._jobs[id]++;
} else if (current != null) {
this.counts[current]--;
return delete this._jobs[id];
}
}
start(id) {
var initial;
initial = 0;
this._jobs[id] = initial;
return this.counts[initial]++;
}
remove(id) {
var current;
current = this._jobs[id];
if (current != null) {
this.counts[current]--;
delete this._jobs[id];
}
return current != null;
}
jobStatus(id) {
var ref;
return (ref = this.status[this._jobs[id]]) != null ? ref : null;
}
statusJobs(status) {
var k, pos, ref, results, v;
if (status != null) {
pos = this.status.indexOf(status);
if (pos < 0) {
throw new BottleneckError(`status must be one of ${this.status.join(', ')}`);
}
ref = this._jobs;
results = [];
for (k in ref) {
v = ref[k];
if (v === pos) {
results.push(k);
}
}
return results;
} else {
return Object.keys(this._jobs);
}
}
statusCounts() {
return this.counts.reduce((acc, v, i) => {
acc[this.status[i]] = v;
return acc;
}, {});
}
};
module.exports = States;

80
node_modules/bottleneck/lib/Sync.js generated vendored Normal file
View file

@ -0,0 +1,80 @@
"use strict";
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
var DLList, Sync;
DLList = require("./DLList");
Sync = class Sync {
constructor(name, Promise) {
this.schedule = this.schedule.bind(this);
this.name = name;
this.Promise = Promise;
this._running = 0;
this._queue = new DLList();
}
isEmpty() {
return this._queue.length === 0;
}
_tryToRun() {
var _this = this;
return _asyncToGenerator(function* () {
var args, cb, error, reject, resolve, returned, task;
if (_this._running < 1 && _this._queue.length > 0) {
_this._running++;
var _this$_queue$shift = _this._queue.shift();
task = _this$_queue$shift.task;
args = _this$_queue$shift.args;
resolve = _this$_queue$shift.resolve;
reject = _this$_queue$shift.reject;
cb = yield _asyncToGenerator(function* () {
try {
returned = yield task(...args);
return function () {
return resolve(returned);
};
} catch (error1) {
error = error1;
return function () {
return reject(error);
};
}
})();
_this._running--;
_this._tryToRun();
return cb();
}
})();
}
schedule(task, ...args) {
var promise, reject, resolve;
resolve = reject = null;
promise = new this.Promise(function (_resolve, _reject) {
resolve = _resolve;
return reject = _reject;
});
this._queue.push({
task,
args,
resolve,
reject
});
this._tryToRun();
return promise;
}
};
module.exports = Sync;

5
node_modules/bottleneck/lib/es5.js generated vendored Normal file
View file

@ -0,0 +1,5 @@
"use strict";
require("regenerator-runtime/runtime");
module.exports = require("./Bottleneck");

3
node_modules/bottleneck/lib/index.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
"use strict";
module.exports = require("./Bottleneck");

24
node_modules/bottleneck/lib/lua.json generated vendored Normal file

File diff suppressed because one or more lines are too long

26
node_modules/bottleneck/lib/parser.js generated vendored Normal file
View file

@ -0,0 +1,26 @@
"use strict";
exports.load = function (received, defaults, onto = {}) {
var k, ref, v;
for (k in defaults) {
v = defaults[k];
onto[k] = (ref = received[k]) != null ? ref : v;
}
return onto;
};
exports.overwrite = function (received, defaults, onto = {}) {
var k, v;
for (k in received) {
v = received[k];
if (defaults[k] !== void 0) {
onto[k] = v;
}
}
return onto;
};

1
node_modules/bottleneck/lib/version.json generated vendored Normal file
View file

@ -0,0 +1 @@
{"version":"2.19.5"}