diff --git a/.babelrc.es5 b/.babelrc.es5 deleted file mode 100644 index e7120e3..0000000 --- a/.babelrc.es5 +++ /dev/null @@ -1,5 +0,0 @@ -{ - "presets": [ - ["@babel/preset-env", {}] - ] -} \ No newline at end of file diff --git a/.babelrc.lib b/.babelrc.lib deleted file mode 100644 index de9dbba..0000000 --- a/.babelrc.lib +++ /dev/null @@ -1,9 +0,0 @@ -{ - "presets": [ - ["@babel/preset-env", { - "targets": { - "node": "6.0" - } - }] - ] -} \ No newline at end of file diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..870771d --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,52 @@ +name: CI + +on: + push: + branches: + - master + pull_request: + types: [opened, reopened, synchronize, ready_for_review] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test: + name: Test all + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4.4.0 + with: + node-version-file: ".nvmrc" + + - name: Enable corepack + run: corepack enable + + - name: Use yarn cache + uses: actions/cache@v4.2.3 + with: + path: "**/node_modules" + key: ${{ runner.os }}-node-${{ hashFiles('**/yarn.lock') }} + restore-keys: | + ${{ runner.os }}-node- + + - name: Install dependencies + run: yarn + + - name: Run linter + run: yarn lint + + - name: Check types + run: yarn check-types + + - name: Redis Server in GitHub Actions + uses: supercharge/redis-github-action@1.8.0 + + - name: Run tests + run: yarn test-all diff --git a/.gitignore b/.gitignore index 3a3df2f..707cee0 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ test.js !.yarn/releases !.yarn/sdks !.yarn/versions +.eslintcache diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 0000000..c41c405 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,3 @@ +#!/bin/sh + +yarn lint-staged diff --git a/.lintstagedrc.yaml b/.lintstagedrc.yaml new file mode 100644 index 0000000..4203eb9 --- /dev/null +++ b/.lintstagedrc.yaml @@ -0,0 +1,4 @@ +"*.{js,jsx,mjs,ts,tsx,mts}": + - "yarn eslint --cache --fix --max-warnings=0 --no-warn-ignored --rule 'prettier/prettier: off'" +"*": + - yarn prettier --ignore-unknown --write diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 0000000..b8ffd70 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +22.15.0 diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..3ed55b1 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,3 @@ +.yarn +light.js +lib/ diff --git a/.prettierrc.yml b/.prettierrc.yml new file mode 100644 index 0000000..4790b8a --- /dev/null +++ b/.prettierrc.yml @@ -0,0 +1,12 @@ +printWidth: 100 +plugins: + - prettier-plugin-sh + - prettier-plugin-organize-imports + - prettier-plugin-pkg + +# prettier-plugin-sh options +binaryNextLine: false +indent: 2 + +# prettier-plugin-organize-imports options +organizeImportsSkipDestructiveCodeActions: true diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 8204ece..0000000 --- a/.travis.yml +++ /dev/null @@ -1,25 +0,0 @@ -language: node_js -node_js: - - 8 -branches: - only: - - master - - next -services: - - redis-server -env: - global: - - "REDIS_HOST=127.0.0.1" - - "REDIS_PORT=6379" -cache: - directories: - - $HOME/.npm -install: -- npm i -sudo: required -after_success: npx codecov --file=./coverage/lcov.info -script: npm run test-all - -before_install: - - npm i -g npm@5.10 - - npm --version \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..82e8e97 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,5 @@ +{ + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.formatOnSave": true, + "typescript.tsdk": "node_modules/typescript/lib" +} diff --git a/.yarnrc.yml b/.yarnrc.yml new file mode 100644 index 0000000..a660120 --- /dev/null +++ b/.yarnrc.yml @@ -0,0 +1 @@ +nodeLinker: pnpm diff --git a/README.md b/README.md index 216e461..ab50002 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Bottleneck is a lightweight and zero-dependency Task Scheduler and Rate Limiter Bottleneck is an easy solution as it adds very little complexity to your code. It is battle-hardened, reliable and production-ready and used on a large scale in private companies and open source software. -It supports **Clustering**: it can rate limit jobs across multiple Node.js instances. It uses Redis and strictly atomic operations to stay reliable in the presence of unreliable clients and networks. It also supports *Redis Cluster* and *Redis Sentinel*. +It supports **Clustering**: it can rate limit jobs across multiple Node.js instances. It uses Redis and strictly atomic operations to stay reliable in the presence of unreliable clients and networks. It also supports _Redis Cluster_ and _Redis Sentinel_. ## Why this fork exists @@ -14,8 +14,11 @@ The original [bottleneck library](https://github.com/SGrondin/bottleneck) appear - [updateSettings not working for local datastore](https://github.com/sderrow/bottleneck/pull/3) - [Typescript fixes](https://github.com/sderrow/bottleneck/commit/62e1cf43ae639ab25181f5d544692ad6098eb9ce) and [enhancements](https://github.com/sderrow/bottleneck/commit/44caf901fd8e0af06c4aba6572276a44e5ef2a13) -### No breaking changes -Despite the major version bump, that's just to signify that this is a fork. There are no breaking JS changes right now. Technically, the TypeScript version was bumped to 4.X from 2.6 in order to support variadic tuples for `schedule`, `wrap`, and `submit`. +More importantly, this library has been rewritten with modern-day JS (courtesy of [decaffeinate](https://www.npmjs.com/package/decaffeinate)) since CoffeeScript isn't necessary anymore. Much of the packages and the overall toolchain has been upgraded as well. There is still more work to do of course, such as writing the whole thing natively in TypeScript. + +### Breaking changes in v4 + +ES5 is no longer supported. ## Table of Contents @@ -23,7 +26,7 @@ Despite the major version bump, that's just to signify that this is a fork. Ther - [Install](#install) - [Quick Start](#quick-start) - * [Gotchas & Common Mistakes](#gotchas--common-mistakes) + - [Gotchas & Common Mistakes](#gotchas--common-mistakes) - [Constructor](#constructor) - [Reservoir Intervals](#reservoir-intervals) - [`submit()`](#submit) @@ -58,35 +61,29 @@ If you don't want to refactor all your import/require calls for this fork, you c ``` "dependencies": { ... - "bottleneck": "npm:@sderrow/bottleneck@^3" + "bottleneck": "npm:@sderrow/bottleneck@^4" ... } ``` -Note: To support older browsers and Node <6.0, you must import the ES5 bundle instead. - -```js -import Bottleneck from "bottleneck"; - -var Bottleneck = require("bottleneck/es5"); -``` - ## Quick Start ### Step 1 of 3 Most APIs have a rate limit. For example, to execute 3 requests per second: + ```js const limiter = new Bottleneck({ - minTime: 333 + minTime: 333, }); ``` If there's a chance some requests might take longer than 333ms and you want to prevent more than 1 request from running at a time, add `maxConcurrent: 1`: + ```js const limiter = new Bottleneck({ maxConcurrent: 1, - minTime: 333 + minTime: 333, }); ``` @@ -97,25 +94,29 @@ const limiter = new Bottleneck({ #### ➤ Using promises? Instead of this: + ```js -myFunction(arg1, arg2) -.then((result) => { +myFunction(arg1, arg2).then((result) => { /* handle result */ }); ``` + Do this: + ```js -limiter.schedule(() => myFunction(arg1, arg2)) -.then((result) => { - /* handle result */ -}); +limiter + .schedule(() => myFunction(arg1, arg2)) + .then((result) => { + /* handle result */ + }); ``` + Or this: + ```js const wrapped = limiter.wrap(myFunction); -wrapped(arg1, arg2) -.then((result) => { +wrapped(arg1, arg2).then((result) => { /* handle result */ }); ``` @@ -123,14 +124,19 @@ wrapped(arg1, arg2) #### ➤ Using async/await? Instead of this: + ```js const result = await myFunction(arg1, arg2); ``` + Do this: + ```js const result = await limiter.schedule(() => myFunction(arg1, arg2)); ``` + Or this: + ```js const wrapped = limiter.wrap(myFunction); @@ -140,10 +146,13 @@ const result = await wrapped(arg1, arg2); #### ➤ Using callbacks? Instead of this: + ```js someAsyncCall(arg1, arg2, callback); ``` + Do this: + ```js limiter.submit(someAsyncCall, arg1, arg2, callback); ``` @@ -162,25 +171,29 @@ Instead of throttling maybe [you want to batch up requests](#batching) into fewe ### Gotchas & Common Mistakes -* Make sure the function you pass to `schedule()` or `wrap()` only returns once **all the work it does** has completed. +- Make sure the function you pass to `schedule()` or `wrap()` only returns once **all the work it does** has completed. Instead of this: + ```js limiter.schedule(() => { - tasksArray.forEach(x => processTask(x)); + tasksArray.forEach((x) => processTask(x)); // BAD, we return before our processTask() functions are finished processing! }); ``` + Do this: + ```js limiter.schedule(() => { - const allTasks = tasksArray.map(x => processTask(x)); + const allTasks = tasksArray.map((x) => processTask(x)); // GOOD, we wait until all tasks are done. return Promise.all(allTasks); }); ``` -* If you're passing an object's method as a job, you'll probably need to `bind()` the object: +- If you're passing an object's method as a job, you'll probably need to `bind()` the object: + ```js // instead of this: limiter.schedule(object.doSomething); @@ -190,45 +203,46 @@ limiter.schedule(object.doSomething.bind(object)); limiter.schedule(() => object.doSomething()); ``` -* Bottleneck requires Node 6+ to function. However, an ES5 build is included: `var Bottleneck = require("bottleneck/es5");`. +- Bottleneck requires Node 6+ to function. However, an ES5 build is included: `var Bottleneck = require("bottleneck/es5");`. -* Make sure you're catching `"error"` events emitted by your limiters! +- Make sure you're catching `"error"` events emitted by your limiters! -* Consider setting a `maxConcurrent` value instead of leaving it `null`. This can help your application's performance, especially if you think the limiter's queue might become very long. +- Consider setting a `maxConcurrent` value instead of leaving it `null`. This can help your application's performance, especially if you think the limiter's queue might become very long. -* If you plan on using `priorities`, make sure to set a `maxConcurrent` value. +- If you plan on using `priorities`, make sure to set a `maxConcurrent` value. -* **When using `submit()`**, if a callback isn't necessary, you must pass `null` or an empty function instead. It will not work otherwise. +- **When using `submit()`**, if a callback isn't necessary, you must pass `null` or an empty function instead. It will not work otherwise. -* **When using `submit()`**, make sure all the jobs will eventually complete by calling their callback, or set an [`expiration`](#job-options). Even if you submitted your job with a `null` callback , it still needs to call its callback. This is particularly important if you are using a `maxConcurrent` value that isn't `null` (unlimited), otherwise those not completed jobs will be clogging up the limiter and no new jobs will be allowed to run. It's safe to call the callback more than once, subsequent calls are ignored. +- **When using `submit()`**, make sure all the jobs will eventually complete by calling their callback, or set an [`expiration`](#job-options). Even if you submitted your job with a `null` callback , it still needs to call its callback. This is particularly important if you are using a `maxConcurrent` value that isn't `null` (unlimited), otherwise those not completed jobs will be clogging up the limiter and no new jobs will be allowed to run. It's safe to call the callback more than once, subsequent calls are ignored. -* Using tools like `mockdate` in your tests to change time in JavaScript will likely result in undefined behavior from Bottleneck. +- Using tools like `mockdate` in your tests to change time in JavaScript will likely result in undefined behavior from Bottleneck. ## Docs ### Constructor ```js -const limiter = new Bottleneck({/* options */}); +const limiter = new Bottleneck({ + /* options */ +}); ``` Basic options: -| Option | Default | Description | -|--------|---------|-------------| -| `maxConcurrent` | `null` (unlimited) | How many jobs can be executing at the same time. Consider setting a value instead of leaving it `null`, it can help your application's performance, especially if you think the limiter's queue might get very long. | -| `minTime` | `0` ms | How long to wait after launching a job before launching another one. | -| `highWater` | `null` (unlimited) | How long can the queue be? When the queue length exceeds that value, the selected `strategy` is executed to shed the load. | -| `strategy` | `Bottleneck.strategy.LEAK` | Which strategy to use when the queue gets longer than the high water mark. [Read about strategies](#strategies). Strategies are never executed if `highWater` is `null`. | -| `penalty` | `15 * minTime`, or `5000` when `minTime` is `0` | The `penalty` value used by the `BLOCK` strategy. | -| `reservoir` | `null` (unlimited) | How many jobs can be executed before the limiter stops executing jobs. If `reservoir` reaches `0`, no jobs will be executed until it is no longer `0`. New jobs will still be queued up. | -| `reservoirRefreshInterval` | `null` (disabled) | Every `reservoirRefreshInterval` milliseconds, the `reservoir` value will be automatically updated to the value of `reservoirRefreshAmount`. The `reservoirRefreshInterval` value should be a [multiple of 250 (5000 for Clustering)](https://github.com/SGrondin/bottleneck/issues/88). | -| `reservoirRefreshAmount` | `null` (disabled) | The value to set `reservoir` to when `reservoirRefreshInterval` is in use. | -| `reservoirIncreaseInterval` | `null` (disabled) | Every `reservoirIncreaseInterval` milliseconds, the `reservoir` value will be automatically incremented by `reservoirIncreaseAmount`. The `reservoirIncreaseInterval` value should be a [multiple of 250 (5000 for Clustering)](https://github.com/SGrondin/bottleneck/issues/88). | -| `reservoirIncreaseAmount` | `null` (disabled) | The increment applied to `reservoir` when `reservoirIncreaseInterval` is in use. | -| `reservoirIncreaseMaximum` | `null` (disabled) | The maximum value that `reservoir` can reach when `reservoirIncreaseInterval` is in use. | -| `Promise` | `Promise` (built-in) | This lets you override the Promise library used by Bottleneck. | - +| Option | Default | Description | +| --------------------------- | ----------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `maxConcurrent` | `null` (unlimited) | How many jobs can be executing at the same time. Consider setting a value instead of leaving it `null`, it can help your application's performance, especially if you think the limiter's queue might get very long. | +| `minTime` | `0` ms | How long to wait after launching a job before launching another one. | +| `highWater` | `null` (unlimited) | How long can the queue be? When the queue length exceeds that value, the selected `strategy` is executed to shed the load. | +| `strategy` | `Bottleneck.strategy.LEAK` | Which strategy to use when the queue gets longer than the high water mark. [Read about strategies](#strategies). Strategies are never executed if `highWater` is `null`. | +| `penalty` | `15 * minTime`, or `5000` when `minTime` is `0` | The `penalty` value used by the `BLOCK` strategy. | +| `reservoir` | `null` (unlimited) | How many jobs can be executed before the limiter stops executing jobs. If `reservoir` reaches `0`, no jobs will be executed until it is no longer `0`. New jobs will still be queued up. | +| `reservoirRefreshInterval` | `null` (disabled) | Every `reservoirRefreshInterval` milliseconds, the `reservoir` value will be automatically updated to the value of `reservoirRefreshAmount`. The `reservoirRefreshInterval` value should be a [multiple of 250 (5000 for Clustering)](https://github.com/SGrondin/bottleneck/issues/88). | +| `reservoirRefreshAmount` | `null` (disabled) | The value to set `reservoir` to when `reservoirRefreshInterval` is in use. | +| `reservoirIncreaseInterval` | `null` (disabled) | Every `reservoirIncreaseInterval` milliseconds, the `reservoir` value will be automatically incremented by `reservoirIncreaseAmount`. The `reservoirIncreaseInterval` value should be a [multiple of 250 (5000 for Clustering)](https://github.com/SGrondin/bottleneck/issues/88). | +| `reservoirIncreaseAmount` | `null` (disabled) | The increment applied to `reservoir` when `reservoirIncreaseInterval` is in use. | +| `reservoirIncreaseMaximum` | `null` (disabled) | The maximum value that `reservoir` can reach when `reservoirIncreaseInterval` is in use. | +| `Promise` | `Promise` (built-in) | This lets you override the Promise library used by Bottleneck. | ### Reservoir Intervals @@ -248,9 +262,10 @@ const limiter = new Bottleneck({ // also use maxConcurrent and/or minTime for safety maxConcurrent: 1, - minTime: 333 // pick a value that makes sense for your use case + minTime: 333, // pick a value that makes sense for your use case }); ``` + `reservoir` is a counter decremented every time a job is launched, we set its initial value to 100. Then, every `reservoirRefreshInterval` (60000 ms), `reservoir` is automatically updated to be equal to the `reservoirRefreshAmount` (100). #### Increase Interval @@ -266,7 +281,7 @@ const limiter = new Bottleneck({ // also use maxConcurrent and/or minTime for safety maxConcurrent: 5, - minTime: 250 // pick a value that makes sense for your use case + minTime: 250, // pick a value that makes sense for your use case }); ``` @@ -283,9 +298,11 @@ Reservoir Intervals are an advanced feature, please take the time to read and un ### submit() Adds a job to the queue. This is the callback version of `schedule()`. + ```js limiter.submit(someAsyncCall, arg1, arg2, callback); ``` + You can pass `null` instead of an empty function if there is no callback, but `someAsyncCall` still needs to call **its** callback to let the limiter know it has completed its work. `submit()` can also accept [advanced options](#job-options). @@ -293,84 +310,111 @@ You can pass `null` instead of an empty function if there is no callback, but `s ### schedule() Adds a job to the queue. This is the Promise and async/await version of `submit()`. + ```js -const fn = function(arg1, arg2) { +const fn = function (arg1, arg2) { return httpGet(arg1, arg2); // Here httpGet() returns a promise }; -limiter.schedule(fn, arg1, arg2) -.then((result) => { +limiter.schedule(fn, arg1, arg2).then((result) => { /* ... */ }); ``` + In other words, `schedule()` takes a function **fn** and a list of arguments. `schedule()` returns a promise that will be executed according to the rate limits. `schedule()` can also accept [advanced options](#job-options). Here's another example: + ```js // suppose that `client.get(url)` returns a promise const url = "https://wikipedia.org"; -limiter.schedule(() => client.get(url)) -.then(response => console.log(response.body)); +limiter.schedule(() => client.get(url)).then((response) => console.log(response.body)); ``` ### wrap() Takes a function that returns a promise. Returns a function identical to the original, but rate limited. + ```js const wrapped = limiter.wrap(fn); wrapped() -.then(function (result) { - /* ... */ -}) -.catch(function (error) { - // Bottleneck might need to fail the job even if the original function can never fail. - // For example, your job is taking longer than the `expiration` time you've set. -}); + .then(function (result) { + /* ... */ + }) + .catch(function (error) { + // Bottleneck might need to fail the job even if the original function can never fail. + // For example, your job is taking longer than the `expiration` time you've set. + }); ``` ### Job Options `submit()`, `schedule()`, and `wrap()` all accept advanced options. + ```js // Submit -limiter.submit({/* options */}, someAsyncCall, arg1, arg2, callback); +limiter.submit( + { + /* options */ + }, + someAsyncCall, + arg1, + arg2, + callback, +); // Schedule -limiter.schedule({/* options */}, fn, arg1, arg2); +limiter.schedule( + { + /* options */ + }, + fn, + arg1, + arg2, +); // Wrap const wrapped = limiter.wrap(fn); -wrapped.withOptions({/* options */}, arg1, arg2); -``` - -| Option | Default | Description | -|--------|---------|-------------| -| `priority` | `5` | A priority between `0` and `9`. A job with a priority of `4` will be queued ahead of a job with a priority of `5`. **Important:** You must set a low `maxConcurrent` value for priorities to work, otherwise there is nothing to queue because jobs will be be scheduled immediately! | -| `weight` | `1` | Must be an integer equal to or higher than `0`. The `weight` is what increases the number of running jobs (up to `maxConcurrent`) and decreases the `reservoir` value. | -| `expiration` | `null` (unlimited) | The number of milliseconds a job is given to complete. Jobs that execute for longer than `expiration` ms will be failed with a `BottleneckError`. | -| `id` | `` | You should give an ID to your jobs, it helps with [debugging](#debugging-your-application). | +wrapped.withOptions( + { + /* options */ + }, + arg1, + arg2, +); +``` + +| Option | Default | Description | +| ------------ | ------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `priority` | `5` | A priority between `0` and `9`. A job with a priority of `4` will be queued ahead of a job with a priority of `5`. **Important:** You must set a low `maxConcurrent` value for priorities to work, otherwise there is nothing to queue because jobs will be be scheduled immediately! | +| `weight` | `1` | Must be an integer equal to or higher than `0`. The `weight` is what increases the number of running jobs (up to `maxConcurrent`) and decreases the `reservoir` value. | +| `expiration` | `null` (unlimited) | The number of milliseconds a job is given to complete. Jobs that execute for longer than `expiration` ms will be failed with a `BottleneckError`. | +| `id` | `` | You should give an ID to your jobs, it helps with [debugging](#debugging-your-application). | ### Strategies A strategy is a simple algorithm that is executed every time adding a job would cause the number of queued jobs to exceed `highWater`. Strategies are never executed if `highWater` is `null`. #### Bottleneck.strategy.LEAK + When adding a new job to a limiter, if the queue length reaches `highWater`, drop the oldest job with the lowest priority. This is useful when jobs that have been waiting for too long are not important anymore. If all the queued jobs are more important (based on their `priority` value) than the one being added, it will not be added. #### Bottleneck.strategy.OVERFLOW_PRIORITY -Same as `LEAK`, except it will only drop jobs that are *less important* than the one being added. If all the queued jobs are as or more important than the new one, it will not be added. + +Same as `LEAK`, except it will only drop jobs that are _less important_ than the one being added. If all the queued jobs are as or more important than the new one, it will not be added. #### Bottleneck.strategy.OVERFLOW + When adding a new job to a limiter, if the queue length reaches `highWater`, do not add the new job. This strategy totally ignores priority levels. #### Bottleneck.strategy.BLOCK -When adding a new job to a limiter, if the queue length reaches `highWater`, the limiter falls into "blocked mode". All queued jobs are dropped and no new jobs will be accepted until the limiter unblocks. It will unblock after `penalty` milliseconds have passed without receiving a new job. `penalty` is equal to `15 * minTime` (or `5000` if `minTime` is `0`) by default. This strategy is ideal when bruteforce attacks are to be expected. This strategy totally ignores priority levels. +When adding a new job to a limiter, if the queue length reaches `highWater`, the limiter falls into "blocked mode". All queued jobs are dropped and no new jobs will be accepted until the limiter unblocks. It will unblock after `penalty` milliseconds have passed without receiving a new job. `penalty` is equal to `15 * minTime` (or `5000` if `minTime` is `0`) by default. This strategy is ideal when bruteforce attacks are to be expected. This strategy totally ignores priority levels. ### Jobs lifecycle @@ -452,8 +496,7 @@ Returns a boolean which indicates whether there are any `RECEIVED` or `QUEUED` j #### running() ```js -limiter.running() -.then((count) => console.log(count)); +limiter.running().then((count) => console.log(count)); ``` Returns a promise that returns the **total weight** of the `RUNNING` and `EXECUTING` jobs **in the Cluster**. @@ -461,8 +504,7 @@ Returns a promise that returns the **total weight** of the `RUNNING` and `EXECUT #### done() ```js -limiter.done() -.then((count) => console.log(count)); +limiter.done().then((count) => console.log(count)); ``` Returns a promise that returns the **total weight** of `DONE` jobs **in the Cluster**. Does not require passing the `trackDoneStatus: true` option. @@ -470,15 +512,15 @@ Returns a promise that returns the **total weight** of `DONE` jobs **in the Clus #### check() ```js -limiter.check() -.then((wouldRunNow) => console.log(wouldRunNow)); +limiter.check().then((wouldRunNow) => console.log(wouldRunNow)); ``` -Checks if a new job would be executed immediately if it was submitted now. Returns a promise that returns a boolean. +Checks if a new job would be executed immediately if it was submitted now. Returns a promise that returns a boolean. ### Events -__'error'__ +**'error'** + ```js limiter.on("error", function (error) { /* handle errors here */ @@ -487,37 +529,42 @@ limiter.on("error", function (error) { The two main causes of error events are: uncaught exceptions in your event handlers, and network errors when Clustering is enabled. -__'failed'__ +**'failed'** + ```js limiter.on("failed", function (error, jobInfo) { // This will be called every time a job fails. }); ``` -__'retry'__ +**'retry'** See [Retries](#retries) to learn how to automatically retry jobs. + ```js limiter.on("retry", function (message, jobInfo) { // This will be called every time a job is retried. }); ``` -__'empty'__ +**'empty'** + ```js limiter.on("empty", function () { // This will be called when `limiter.empty()` becomes true. }); ``` -__'idle'__ +**'idle'** + ```js limiter.on("idle", function () { // This will be called when `limiter.empty()` is `true` and `limiter.running()` is `0`. }); ``` -__'dropped'__ +**'dropped'** + ```js limiter.on("dropped", function (dropped) { // This will be called when a strategy was triggered. @@ -525,7 +572,8 @@ limiter.on("dropped", function (dropped) { }); ``` -__'depleted'__ +**'depleted'** + ```js limiter.on("depleted", function (empty) { // This will be called every time the reservoir drops to 0. @@ -533,7 +581,8 @@ limiter.on("depleted", function (empty) { }); ``` -__'debug'__ +**'debug'** + ```js limiter.on("debug", function (message, data) { // Useful to figure out what the limiter is doing in real time @@ -541,11 +590,12 @@ limiter.on("debug", function (message, data) { }); ``` -__'received'__ -__'queued'__ -__'scheduled'__ -__'executing'__ -__'done'__ +**'received'** +**'queued'** +**'scheduled'** +**'executing'** +**'done'** + ```js limiter.on("queued", function (info) { // This event is triggered when a job transitions from one Lifecycle stage to another @@ -562,10 +612,10 @@ Use `removeAllListeners()` with an optional event name as first argument to remo Use `.once()` instead of `.on()` to only receive a single event. - ### Retries The following example: + ```js const limiter = new Bottleneck(); @@ -574,7 +624,8 @@ limiter.on("failed", async (error, jobInfo) => { const id = jobInfo.options.id; console.warn(`Job ${id} failed: ${error}`); - if (jobInfo.retryCount === 0) { // Here we only retry once + if (jobInfo.retryCount === 0) { + // Here we only retry once console.log(`Retrying job ${id} in 25ms!`); return 25; } @@ -587,7 +638,7 @@ const main = async function () { let executions = 0; // Schedule one job - const result = await limiter.schedule({ id: 'ABC123' }, async () => { + const result = await limiter.schedule({ id: "ABC123" }, async () => { executions++; if (executions === 1) { throw new Error("Boom!"); @@ -597,27 +648,30 @@ const main = async function () { }); console.log(`Result: ${result}`); -} +}; main(); ``` + will output + ``` Job ABC123 failed: Error: Boom! Retrying job ABC123 in 25ms! Now retrying ABC123 Result: Success! ``` + To re-run your job, simply return an integer from the `'failed'` event handler. The number returned is how many milliseconds to wait before retrying it. Return `0` to retry it immediately. **IMPORTANT:** When you ask the limiter to retry a job it will not send it back into the queue. It will stay in the `EXECUTING` [state](#jobs-lifecycle) until it succeeds or until you stop retrying it. **This means that it counts as a concurrent job for `maxConcurrent` even while it's just waiting to be retried.** The number of milliseconds to wait ignores your `minTime` settings. - ### updateSettings() ```js limiter.updateSettings(options); ``` + The options are the same as the [limiter constructor](#constructor). **Note:** Changes don't affect `SCHEDULED` jobs. @@ -627,14 +681,15 @@ The options are the same as the [limiter constructor](#constructor). ```js limiter.incrementReservoir(incrementBy); ``` + Returns a promise that returns the new reservoir value. ### currentReservoir() ```js -limiter.currentReservoir() -.then((reservoir) => console.log(reservoir)); +limiter.currentReservoir().then((reservoir) => console.log(reservoir)); ``` + Returns a promise that returns the current reservoir value. ### stop() @@ -642,27 +697,27 @@ Returns a promise that returns the current reservoir value. The `stop()` method is used to safely shutdown a limiter. It prevents any new jobs from being added to the limiter and waits for all `EXECUTING` jobs to complete. ```js -limiter.stop(options) -.then(() => { - console.log("Shutdown completed!") +limiter.stop(options).then(() => { + console.log("Shutdown completed!"); }); ``` `stop()` returns a promise that resolves once all the `EXECUTING` jobs have completed and, if desired, once all non-`EXECUTING` jobs have been dropped. -| Option | Default | Description | -|--------|---------|-------------| -| `dropWaitingJobs` | `true` | When `true`, drop all the `RECEIVED`, `QUEUED` and `RUNNING` jobs. When `false`, allow those jobs to complete before resolving the Promise returned by this method. | -| `dropErrorMessage` | `This limiter has been stopped.` | The error message used to drop jobs when `dropWaitingJobs` is `true`. | -| `enqueueErrorMessage` | `This limiter has been stopped and cannot accept new jobs.` | The error message used to reject a job added to the limiter after `stop()` has been called. | +| Option | Default | Description | +| --------------------- | ----------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `dropWaitingJobs` | `true` | When `true`, drop all the `RECEIVED`, `QUEUED` and `RUNNING` jobs. When `false`, allow those jobs to complete before resolving the Promise returned by this method. | +| `dropErrorMessage` | `This limiter has been stopped.` | The error message used to drop jobs when `dropWaitingJobs` is `true`. | +| `enqueueErrorMessage` | `This limiter has been stopped and cannot accept new jobs.` | The error message used to reject a job added to the limiter after `stop()` has been called. | ### chain() Tasks that are ready to be executed will be added to that other limiter. Suppose you have 2 types of tasks, A and B. They both have their own limiter with their own settings, but both must also follow a global limiter G: + ```js -const limiterA = new Bottleneck( /* some settings */ ); -const limiterB = new Bottleneck( /* some different settings */ ); -const limiterG = new Bottleneck( /* some global settings */ ); +const limiterA = new Bottleneck(/* some settings */); +const limiterB = new Bottleneck(/* some different settings */); +const limiterG = new Bottleneck(/* some global settings */); limiterA.chain(limiterG); limiterB.chain(limiterG); @@ -680,7 +735,6 @@ The `Group` feature of Bottleneck manages many limiters automatically for you. I Let's take a DNS server as an example of how Bottleneck can be used. It's a service that sees a lot of abuse and where incoming DNS requests need to be rate limited. Bottleneck is so tiny, it's acceptable to create one limiter for each origin IP, even if it means creating thousands of limiters. The `Group` feature is perfect for this use case. Create one Group and use the origin IP to rate limit each IP independently. Each call with the same key (IP) will be routed to the same underlying limiter. A Group is created like a limiter: - ```js const group = new Bottleneck.Group(options); ``` @@ -698,7 +752,7 @@ group.key("77.66.54.32").schedule(() => { #### key() -* `str` : The key to use. All jobs added with the same key will use the same underlying limiter. *Default: `""`* +- `str` : The key to use. All jobs added with the same key will use the same underlying limiter. _Default: `""`_ The return value of `.key(str)` is a limiter. If it doesn't already exist, it is generated for you. Calling `key()` is how limiters are created inside a Group. @@ -708,12 +762,12 @@ Limiters that have been idle for longer than 5 minutes are deleted to avoid memo ```js group.on("created", (limiter, key) => { - console.log("A new limiter was created for key: " + key) + console.log("A new limiter was created for key: " + key); // Prepare the limiter, for example we'll want to listen to its "error" events! limiter.on("error", (err) => { // Handle errors here - }) + }); }); ``` @@ -725,12 +779,12 @@ Listening for the `"created"` event is the recommended way to set up a new limit const group = new Bottleneck.Group({ maxConcurrent: 2, minTime: 250 }); group.updateSettings({ minTime: 500 }); ``` -After executing the above commands, **new limiters** will be created with `{ maxConcurrent: 2, minTime: 500 }`. +After executing the above commands, **new limiters** will be created with `{ maxConcurrent: 2, minTime: 500 }`. #### deleteKey() -* `str`: The key for the limiter to delete. +- `str`: The key for the limiter to delete. Manually deletes the limiter at the specified key. When using Clustering, the Redis data is immediately deleted and the other Groups in the Cluster will eventually delete their local key automatically, unless it is still being used. @@ -754,10 +808,11 @@ console.log(limiters); ## Batching Some APIs can accept multiple operations in a single call. Bottleneck's Batching feature helps you take advantage of those APIs: + ```js const batcher = new Bottleneck.Batcher({ maxTime: 1000, - maxSize: 10 + maxSize: 10, }); batcher.on("batch", (batch) => { @@ -772,10 +827,10 @@ batcher.add("some-other-data"); `batcher.add()` returns a Promise that resolves once the request has been flushed to a `"batch"` event. -| Option | Default | Description | -|--------|---------|-------------| +| Option | Default | Description | +| --------- | ------------------ | ----------------------------------------------------------------------------------------------------------------- | | `maxTime` | `null` (unlimited) | Maximum acceptable time (in milliseconds) a request can have to wait before being flushed to the `"batch"` event. | -| `maxSize` | `null` (unlimited) | Maximum number of requests in a batch. | +| `maxSize` | `null` (unlimited) | Maximum number of requests in a batch. | Batching doesn't throttle requests, it only groups them up optimally according to your `maxTime` and `maxSize` settings. @@ -788,6 +843,7 @@ Bottleneck will attempt to spread load evenly across limiters. ### Enabling Clustering First, add `redis` or `ioredis` to your application's dependencies: + ```bash # NodeRedis (https://github.com/NodeRedis/node_redis) npm install --save redis @@ -795,7 +851,9 @@ npm install --save redis # or ioredis (https://github.com/luin/ioredis) npm install --save ioredis ``` + Then create a limiter or a Group: + ```js const limiter = new Bottleneck({ /* Some basic options */ @@ -817,28 +875,30 @@ const limiter = new Bottleneck({ }); ``` -| Option | Default | Description | -|--------|---------|-------------| -| `datastore` | `"local"` | Where the limiter stores its internal state. The default (`"local"`) keeps the state in the limiter itself. Set it to `"redis"` or `"ioredis"` to enable Clustering. | -| `clearDatastore` | `false` | When set to `true`, on initial startup, the limiter will wipe any existing Bottleneck state data on the Redis db. | -| `clientOptions` | `{}` | This object is passed directly to the redis client library you've selected. | -| `clusterNodes` | `null` | **ioredis only.** When `clusterNodes` is not null, the client will be instantiated by calling `new Redis.Cluster(clusterNodes, clientOptions)` instead of `new Redis(clientOptions)`. | -| `timeout` | `null` (no TTL) | The Redis TTL in milliseconds ([TTL](https://redis.io/commands/ttl)) for the keys created by the limiter. When `timeout` is set, the limiter's state will be automatically removed from Redis after `timeout` milliseconds of inactivity. | -| `Redis` | `null` | Overrides the import/require of the redis/ioredis library. You shouldn't need to set this option unless your application is failing to start due to a failure to require/import the client library. | +| Option | Default | Description | +| ---------------- | --------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `datastore` | `"local"` | Where the limiter stores its internal state. The default (`"local"`) keeps the state in the limiter itself. Set it to `"redis"` or `"ioredis"` to enable Clustering. | +| `clearDatastore` | `false` | When set to `true`, on initial startup, the limiter will wipe any existing Bottleneck state data on the Redis db. | +| `clientOptions` | `{}` | This object is passed directly to the redis client library you've selected. | +| `clusterNodes` | `null` | **ioredis only.** When `clusterNodes` is not null, the client will be instantiated by calling `new Redis.Cluster(clusterNodes, clientOptions)` instead of `new Redis(clientOptions)`. | +| `timeout` | `null` (no TTL) | The Redis TTL in milliseconds ([TTL](https://redis.io/commands/ttl)) for the keys created by the limiter. When `timeout` is set, the limiter's state will be automatically removed from Redis after `timeout` milliseconds of inactivity. | +| `Redis` | `null` | Overrides the import/require of the redis/ioredis library. You shouldn't need to set this option unless your application is failing to start due to a failure to require/import the client library. | **Note: When using Groups**, the `timeout` option has a default of `300000` milliseconds and the generated limiters automatically receive an `id` with the pattern `${group.id}-${KEY}`. **Note:** If you are seeing a runtime error due to the `require()` function not being able to load `redis`/`ioredis`, then directly pass the module as the `Redis` option. Example: + ```js -import Redis from "ioredis" +import Redis from "ioredis"; const limiter = new Bottleneck({ id: "my-super-app", datastore: "ioredis", - clientOptions: { host: '12.34.56.78', port: 6379 }, - Redis + clientOptions: { host: "12.34.56.78", port: 6379 }, + Redis, }); ``` + Unfortunately, this is a side effect of having to disable inlining, which is necessary to make Bottleneck easy to use in the browser. ### Important considerations when Clustering @@ -848,11 +908,12 @@ The first limiter connecting to Redis will store its [constructor options](#cons Queued jobs are **NOT** stored on Redis. They are local to each limiter. Exiting the Node.js process will lose those jobs. This is because Bottleneck has no way to propagate the JS code to run a job across a different Node.js process than the one it originated on. Bottleneck doesn't keep track of the queue contents of the limiters on a cluster for performance and reliability reasons. You can use something like [`BeeQueue`](https://github.com/bee-queue/bee-queue) in addition to Bottleneck to get around this limitation. Due to the above, functionality relying on the queue length happens purely locally: + - Priorities are local. A higher priority job will run before a lower priority job **on the same limiter**. Another limiter on the cluster might run a lower priority job before our higher priority one. - Assuming constant priority levels, Bottleneck guarantees that jobs will be run in the order they were received **on the same limiter**. Another limiter on the cluster might run a job received later before ours runs. - `highWater` and load shedding ([strategies](#strategies)) are per limiter. However, one limiter entering Blocked mode will put the entire cluster in Blocked mode until `penalty` milliseconds have passed. See [Strategies](#strategies). - The `"empty"` event is triggered when the (local) queue is empty. -- The `"idle"` event is triggered when the (local) queue is empty *and* no jobs are currently running anywhere in the cluster. +- The `"idle"` event is triggered when the (local) queue is empty _and_ no jobs are currently running anywhere in the cluster. You must work around these limitations in your application code if they are an issue to you. The `publish()` method could be useful here. @@ -860,7 +921,7 @@ The current design guarantees reliability, is highly performant and lets limiter It is **strongly recommended** that you give an `id` to every limiter and Group since it is used to build the name of your limiter's Redis keys! Limiters with the same `id` inside the same Redis db will be sharing the same datastore. -It is **strongly recommended** that you set an `expiration` (See [Job Options](#job-options)) *on every job*, since that lets the cluster recover from crashed or disconnected clients. Otherwise, a client crashing while executing a job would not be able to tell the cluster to decrease its number of "running" jobs. By using expirations, those lost jobs are automatically cleared after the specified time has passed. Using expirations is essential to keeping a cluster reliable in the face of unpredictable application bugs, network hiccups, and so on. +It is **strongly recommended** that you set an `expiration` (See [Job Options](#job-options)) _on every job_, since that lets the cluster recover from crashed or disconnected clients. Otherwise, a client crashing while executing a job would not be able to tell the cluster to decrease its number of "running" jobs. By using expirations, those lost jobs are automatically cleared after the specified time has passed. Using expirations is essential to keeping a cluster reliable in the face of unpredictable application bugs, network hiccups, and so on. Network latency between Node.js and Redis is not taken into account when calculating timings (such as `minTime`). To minimize the impact of latency, Bottleneck only performs a single Redis call per [lifecycle transition](#jobs-lifecycle). Keeping the Redis server close to your limiters will help you get a more consistent experience. Keeping the system time consistent across all clients will also help. @@ -877,14 +938,15 @@ This method returns a promise that resolves once the limiter is connected to Red As of v2.9.0, it's no longer necessary to wait for `.ready()` to resolve before issuing commands to a limiter. The commands will be queued until the limiter successfully connects. Make sure to listen to the `"error"` event to handle connection errors. ```js -const limiter = new Bottleneck({/* options */}); +const limiter = new Bottleneck({ + /* options */ +}); limiter.on("error", (err) => { // handle network errors }); -limiter.ready() -.then(() => { +limiter.ready().then(() => { // The limiter is ready }); ``` @@ -892,8 +954,11 @@ limiter.ready() #### publish(message) This method broadcasts the `message` string to every limiter in the Cluster. It returns a promise. + ```js -const limiter = new Bottleneck({/* options */}); +const limiter = new Bottleneck({ + /* options */ +}); limiter.on("message", (msg) => { console.log(msg); // prints "this is a string" @@ -903,9 +968,10 @@ limiter.publish("this is a string"); ``` To send objects, stringify them first: + ```js limiter.on("message", (msg) => { - console.log(JSON.parse(msg).hello) // prints "world" + console.log(JSON.parse(msg).hello); // prints "world" }); limiter.publish(JSON.stringify({ hello: "world" })); @@ -914,6 +980,7 @@ limiter.publish(JSON.stringify({ hello: "world" })); #### clients() If you need direct access to the redis clients, use `.clients()`: + ```js console.log(limiter.clients()); // { client: , subscriber: } @@ -932,44 +999,59 @@ console.log(limiter.clients()); Bottleneck needs to create 2 Redis Clients to function, one for normal operations and one for pubsub subscriptions. These 2 clients are kept in a `Bottleneck.RedisConnection` (NodeRedis) or a `Bottleneck.IORedisConnection` (ioredis) object, referred to as the Connection object. By default, every Group and every standalone limiter (a limiter not created by a Group) will create their own Connection object, but it is possible to manually control this behavior. In this example, every Group and limiter is sharing the same Connection object and therefore the same 2 clients: + ```js const connection = new Bottleneck.RedisConnection({ - clientOptions: {/* NodeRedis/ioredis options */} + clientOptions: { + /* NodeRedis/ioredis options */ + }, // ioredis also accepts `clusterNodes` here }); - const limiter = new Bottleneck({ connection: connection }); const group = new Bottleneck.Group({ connection: connection }); ``` + You can access and reuse the Connection object of any Group or limiter: + ```js const group = new Bottleneck.Group({ connection: limiter.connection }); ``` + When a Connection object is created manually, the connectivity `"error"` events are emitted on the Connection itself. + ```js -connection.on("error", (err) => { /* handle connectivity errors here */ }); +connection.on("error", (err) => { + /* handle connectivity errors here */ +}); ``` + If you already have a NodeRedis/ioredis client, you can ask Bottleneck to reuse it, although currently the Connection object will still create a second client for pubsub operations: + ```js import Redis from "redis"; -const client = new Redis.createClient({/* options */}); +const client = new Redis.createClient({ + /* options */ +}); const connection = new Bottleneck.RedisConnection({ // `clientOptions` and `clusterNodes` will be ignored since we're passing a raw client - client: client + client: client, }); const limiter = new Bottleneck({ connection: connection }); const group = new Bottleneck.Group({ connection: connection }); ``` + Depending on your application, using more clients can improve performance. Use the `disconnect(flush)` method to close the Redis clients. + ```js limiter.disconnect(); group.disconnect(); ``` + If you created the Connection object manually, you need to call `connection.disconnect()` instead, for safety reasons. ## Debugging your application @@ -983,14 +1065,18 @@ Make sure you've read the ['Gotchas'](#gotchas) section. To see exactly what a limiter is doing in real time, listen to the `"debug"` event. It contains detailed information about how the limiter is executing your code. Adding [job IDs](#job-options) to all your jobs makes the debug output more readable. When Bottleneck has to fail one of your jobs, it does so by using `BottleneckError` objects. This lets you tell those errors apart from your own code's errors: + ```js -limiter.schedule(fn) -.then((result) => { /* ... */ } ) -.catch((error) => { - if (error instanceof Bottleneck.BottleneckError) { +limiter + .schedule(fn) + .then((result) => { /* ... */ - } -}); + }) + .catch((error) => { + if (error instanceof Bottleneck.BottleneckError) { + /* ... */ + } + }); ``` ## Upgrading to v2 @@ -998,6 +1084,7 @@ limiter.schedule(fn) The internal algorithms essentially haven't changed from v1, but many small changes to the interface were made to introduce new features. All the breaking changes: + - Bottleneck v2 requires Node 6+ or a modern browser. Use `require("bottleneck/es5")` if you need ES5 support in v2. Bottleneck v1 will continue to use ES5 only. - The Bottleneck constructor now takes an options object. See [Constructor](#constructor). - The `Cluster` feature is now called `Group`. This is to distinguish it from the new v2 [Clustering](#clustering) feature. @@ -1023,7 +1110,6 @@ Version 2 is more user-friendly and powerful. After upgrading your code, please take a minute to read the [Debugging your application](#debugging-your-application) chapter. - ## Contributing This README is always in need of improvements. If wording can be clearer and simpler, please consider forking this repo and submitting a Pull Request, or simply opening an issue. @@ -1038,4 +1124,4 @@ The tests must also pass in Clustering mode and using the ES5 bundle. You'll nee All contributions are appreciated and will be considered. -[license-url]: https://github.com/sderrow/bottleneck/blob/master/LICENSE \ No newline at end of file +[license-url]: https://github.com/sderrow/bottleneck/blob/master/LICENSE diff --git a/bottleneck.d.ts b/bottleneck.d.ts index 1e7aa61..a3a3027 100644 --- a/bottleneck.d.ts +++ b/bottleneck.d.ts @@ -1,134 +1,134 @@ declare module "bottleneck" { - namespace Bottleneck { + namespace Bottleneck { type ConstructorOptions = { - /** - * How many jobs can be running at the same time. - */ - readonly maxConcurrent?: number | null; - /** - * How long to wait after launching a job before launching another one. - */ - readonly minTime?: number | null; - /** - * How long can the queue get? When the queue length exceeds that value, the selected `strategy` is executed to shed the load. - */ - readonly highWater?: number | null; - /** - * Which strategy to use if the queue gets longer than the high water mark. - */ - readonly strategy?: Bottleneck.Strategy | null; - /** - * The `penalty` value used by the `Bottleneck.strategy.BLOCK` strategy. - */ - readonly penalty?: number | null; - /** - * How many jobs can be executed before the limiter stops executing jobs. If `reservoir` reaches `0`, no jobs will be executed until it is no longer `0`. - */ - readonly reservoir?: number | null; - /** - * Every `reservoirRefreshInterval` milliseconds, the `reservoir` value will be automatically reset to `reservoirRefreshAmount`. - */ - readonly reservoirRefreshInterval?: number | null; - /** - * The value to reset `reservoir` to when `reservoirRefreshInterval` is in use. - */ - readonly reservoirRefreshAmount?: number | null; - /** - * The increment applied to `reservoir` when `reservoirIncreaseInterval` is in use. - */ - readonly reservoirIncreaseAmount?: number | null; - /** - * Every `reservoirIncreaseInterval` milliseconds, the `reservoir` value will be automatically incremented by `reservoirIncreaseAmount`. - */ - readonly reservoirIncreaseInterval?: number | null; - /** - * The maximum value that `reservoir` can reach when `reservoirIncreaseInterval` is in use. - */ - readonly reservoirIncreaseMaximum?: number | null; - /** - * Optional identifier - */ - readonly id?: string | null; - /** - * Set to true to leave your failed jobs hanging instead of failing them. - */ - readonly rejectOnDrop?: boolean | null; - /** - * Set to true to keep track of done jobs with counts() and jobStatus(). Uses more memory. - */ - readonly trackDoneStatus?: boolean | null; - /** - * Where the limiter stores its internal state. The default (`local`) keeps the state in the limiter itself. Set it to `redis` to enable Clustering. - */ - readonly datastore?: string | null; - /** - * Override the Promise library used by Bottleneck. - */ - readonly Promise?: any; - /** - * This object is passed directly to the redis client library you've selected. - */ - readonly clientOptions?: any; - /** - * **ioredis only.** When `clusterNodes` is not null, the client will be instantiated by calling `new Redis.Cluster(clusterNodes, clientOptions)`. - */ - readonly clusterNodes?: any; - /** - * An existing Bottleneck.RedisConnection or Bottleneck.IORedisConnection object to use. - * If using, `datastore`, `clientOptions` and `clusterNodes` will be ignored. - */ - /** - * Optional Redis/IORedis library from `require('ioredis')` or equivalent. If not, Bottleneck will attempt to require Redis/IORedis at runtime. - */ - readonly Redis?: any; - /** - * Bottleneck connection object created from `new Bottleneck.RedisConnection` or `new Bottleneck.IORedisConnection`. - */ - readonly connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection | null; - /** - * When set to `true`, on initial startup, the limiter will wipe any existing Bottleneck state data on the Redis db. - */ - readonly clearDatastore?: boolean | null; - /** - * The Redis TTL in milliseconds for the keys created by the limiter. When `timeout` is set, the limiter's state will be automatically removed from Redis after timeout milliseconds of inactivity. Note: timeout is 300000 (5 minutes) by default when using a Group. - */ - readonly timeout?: number | null; - /** - * Every `heartbeatInterval` milliseconds, the `reservoir` is assessed. - */ - readonly heartbeatInterval?: number | null; + /** + * How many jobs can be running at the same time. + */ + readonly maxConcurrent?: number | null; + /** + * How long to wait after launching a job before launching another one. + */ + readonly minTime?: number | null; + /** + * How long can the queue get? When the queue length exceeds that value, the selected `strategy` is executed to shed the load. + */ + readonly highWater?: number | null; + /** + * Which strategy to use if the queue gets longer than the high water mark. + */ + readonly strategy?: Bottleneck.Strategy | null; + /** + * The `penalty` value used by the `Bottleneck.strategy.BLOCK` strategy. + */ + readonly penalty?: number | null; + /** + * How many jobs can be executed before the limiter stops executing jobs. If `reservoir` reaches `0`, no jobs will be executed until it is no longer `0`. + */ + readonly reservoir?: number | null; + /** + * Every `reservoirRefreshInterval` milliseconds, the `reservoir` value will be automatically reset to `reservoirRefreshAmount`. + */ + readonly reservoirRefreshInterval?: number | null; + /** + * The value to reset `reservoir` to when `reservoirRefreshInterval` is in use. + */ + readonly reservoirRefreshAmount?: number | null; + /** + * The increment applied to `reservoir` when `reservoirIncreaseInterval` is in use. + */ + readonly reservoirIncreaseAmount?: number | null; + /** + * Every `reservoirIncreaseInterval` milliseconds, the `reservoir` value will be automatically incremented by `reservoirIncreaseAmount`. + */ + readonly reservoirIncreaseInterval?: number | null; + /** + * The maximum value that `reservoir` can reach when `reservoirIncreaseInterval` is in use. + */ + readonly reservoirIncreaseMaximum?: number | null; + /** + * Optional identifier + */ + readonly id?: string | null; + /** + * Set to true to leave your failed jobs hanging instead of failing them. + */ + readonly rejectOnDrop?: boolean | null; + /** + * Set to true to keep track of done jobs with counts() and jobStatus(). Uses more memory. + */ + readonly trackDoneStatus?: boolean | null; + /** + * Where the limiter stores its internal state. The default (`local`) keeps the state in the limiter itself. Set it to `redis` to enable Clustering. + */ + readonly datastore?: string | null; + /** + * Override the Promise library used by Bottleneck. + */ + readonly Promise?: any; + /** + * This object is passed directly to the redis client library you've selected. + */ + readonly clientOptions?: any; + /** + * **ioredis only.** When `clusterNodes` is not null, the client will be instantiated by calling `new Redis.Cluster(clusterNodes, clientOptions)`. + */ + readonly clusterNodes?: any; + /** + * An existing Bottleneck.RedisConnection or Bottleneck.IORedisConnection object to use. + * If using, `datastore`, `clientOptions` and `clusterNodes` will be ignored. + */ + /** + * Optional Redis/IORedis library from `require('ioredis')` or equivalent. If not, Bottleneck will attempt to require Redis/IORedis at runtime. + */ + readonly Redis?: any; + /** + * Bottleneck connection object created from `new Bottleneck.RedisConnection` or `new Bottleneck.IORedisConnection`. + */ + readonly connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection | null; + /** + * When set to `true`, on initial startup, the limiter will wipe any existing Bottleneck state data on the Redis db. + */ + readonly clearDatastore?: boolean | null; + /** + * The Redis TTL in milliseconds for the keys created by the limiter. When `timeout` is set, the limiter's state will be automatically removed from Redis after timeout milliseconds of inactivity. Note: timeout is 300000 (5 minutes) by default when using a Group. + */ + readonly timeout?: number | null; + /** + * Every `heartbeatInterval` milliseconds, the `reservoir` is assessed. + */ + readonly heartbeatInterval?: number | null; }; type JobOptions = { - /** - * A priority between `0` and `9`. A job with a priority of `4` will _always_ be executed before a job with a priority of `5`. - */ - readonly priority?: number | null; - /** - * Must be an integer equal to or higher than `0`. The `weight` is what increases the number of running jobs (up to `maxConcurrent`, if using) and decreases the `reservoir` value (if using). - */ - readonly weight?: number | null; - /** - * The number milliseconds a job has to finish. Jobs that take longer than their `expiration` will be failed with a `BottleneckError`. - */ - readonly expiration?: number | null; - /** - * Optional identifier, helps with debug output. - */ - readonly id?: string | null; + /** + * A priority between `0` and `9`. A job with a priority of `4` will _always_ be executed before a job with a priority of `5`. + */ + readonly priority?: number | null; + /** + * Must be an integer equal to or higher than `0`. The `weight` is what increases the number of running jobs (up to `maxConcurrent`, if using) and decreases the `reservoir` value (if using). + */ + readonly weight?: number | null; + /** + * The number milliseconds a job has to finish. Jobs that take longer than their `expiration` will be failed with a `BottleneckError`. + */ + readonly expiration?: number | null; + /** + * Optional identifier, helps with debug output. + */ + readonly id?: string | null; }; type StopOptions = { - /** - * When `true`, drop all the RECEIVED, QUEUED and RUNNING jobs. When `false`, allow those jobs to complete before resolving the Promise returned by this method. - */ - readonly dropWaitingJobs?: boolean | null; - /** - * The error message used to drop jobs when `dropWaitingJobs` is `true`. - */ - readonly dropErrorMessage?: string | null; - /** - * The error message used to reject a job added to the limiter after `stop()` has been called. - */ - readonly enqueueErrorMessage?: string | null; + /** + * When `true`, drop all the RECEIVED, QUEUED and RUNNING jobs. When `false`, allow those jobs to complete before resolving the Promise returned by this method. + */ + readonly dropWaitingJobs?: boolean | null; + /** + * The error message used to drop jobs when `dropWaitingJobs` is `true`. + */ + readonly dropErrorMessage?: string | null; + /** + * The error message used to reject a job added to the limiter after `stop()` has been called. + */ + readonly enqueueErrorMessage?: string | null; }; type Callback = (err: any, result: T) => void; type ClientsList = { client?: any; subscriber?: any }; @@ -136,279 +136,278 @@ declare module "bottleneck" { interface Strategy {} type EventInfo = { - readonly args: any[]; - readonly options: { - readonly id: string; - readonly priority: number; - readonly weight: number; - readonly expiration?: number; - }; + readonly args: any[]; + readonly options: { + readonly id: string; + readonly priority: number; + readonly weight: number; + readonly expiration?: number; + }; }; type EventInfoDropped = EventInfo & { - readonly task: Function; - readonly promise: Promise; + readonly task: Function; + readonly promise: Promise; }; type EventInfoQueued = EventInfo & { - readonly reachedHWM: boolean; - readonly blocked: boolean; + readonly reachedHWM: boolean; + readonly blocked: boolean; }; - type EventInfoRetryable = EventInfo & { readonly retryCount: number; }; + type EventInfoRetryable = EventInfo & { readonly retryCount: number }; enum Status { - RECEIVED = "RECEIVED", - QUEUED = "QUEUED", - RUNNING = "RUNNING", - EXECUTING = "EXECUTING", - DONE = "DONE" + RECEIVED = "RECEIVED", + QUEUED = "QUEUED", + RUNNING = "RUNNING", + EXECUTING = "EXECUTING", + DONE = "DONE", } type Counts = { - RECEIVED: number, - QUEUED: number, - RUNNING: number, - EXECUTING: number, - DONE?: number + RECEIVED: number; + QUEUED: number; + RUNNING: number; + EXECUTING: number; + DONE?: number; }; type RedisConnectionOptions = { - /** - * This object is passed directly to NodeRedis' createClient() method. - */ - readonly clientOptions?: any; - /** - * An existing NodeRedis client to use. If using, `clientOptions` will be ignored. - */ - readonly client?: any; - /** - * Optional Redis library from `require('redis')` or equivalent. If not, Bottleneck will attempt to require Redis at runtime. - */ - readonly Redis?: any; + /** + * This object is passed directly to NodeRedis' createClient() method. + */ + readonly clientOptions?: any; + /** + * An existing NodeRedis client to use. If using, `clientOptions` will be ignored. + */ + readonly client?: any; + /** + * Optional Redis library from `require('redis')` or equivalent. If not, Bottleneck will attempt to require Redis at runtime. + */ + readonly Redis?: any; }; type IORedisConnectionOptions = { - /** - * This object is passed directly to ioredis' constructor method. - */ - readonly clientOptions?: any; - /** - * When `clusterNodes` is not null, the client will be instantiated by calling `new Redis.Cluster(clusterNodes, clientOptions)`. - */ - readonly clusterNodes?: any; - /** - * An existing ioredis client to use. If using, `clientOptions` and `clusterNodes` will be ignored. - */ - readonly client?: any; - /** - * Optional IORedis library from `require('ioredis')` or equivalent. If not, Bottleneck will attempt to require IORedis at runtime. - */ - readonly Redis?: any; + /** + * This object is passed directly to ioredis' constructor method. + */ + readonly clientOptions?: any; + /** + * When `clusterNodes` is not null, the client will be instantiated by calling `new Redis.Cluster(clusterNodes, clientOptions)`. + */ + readonly clusterNodes?: any; + /** + * An existing ioredis client to use. If using, `clientOptions` and `clusterNodes` will be ignored. + */ + readonly client?: any; + /** + * Optional IORedis library from `require('ioredis')` or equivalent. If not, Bottleneck will attempt to require IORedis at runtime. + */ + readonly Redis?: any; }; type BatcherOptions = { - /** - * Maximum acceptable time (in milliseconds) a request can have to wait before being flushed to the `"batch"` event. - */ - readonly maxTime?: number | null; - /** - * Maximum number of requests in a batch. - */ - readonly maxSize?: number | null; + /** + * Maximum acceptable time (in milliseconds) a request can have to wait before being flushed to the `"batch"` event. + */ + readonly maxTime?: number | null; + /** + * Maximum number of requests in a batch. + */ + readonly maxSize?: number | null; }; - class BottleneckError extends Error { - } + class BottleneckError extends Error {} class RedisConnection { - constructor(options?: Bottleneck.RedisConnectionOptions); - - /** - * Register an event listener. - * @param name - The event name. - * @param fn - The callback function. - */ - on(name: "error", fn: (error: any) => void): void; - - /** - * Register an event listener for one event only. - * @param name - The event name. - * @param fn - The callback function. - */ - once(name: "error", fn: (error: any) => void): void; - - /** - * Waits until the connection is ready and returns the raw Node_Redis clients. - */ - ready(): Promise; - - /** - * Close the redis clients. - * @param flush - Write transient data before closing. - */ - disconnect(flush?: boolean): Promise; + constructor(options?: Bottleneck.RedisConnectionOptions); + + /** + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: "error", fn: (error: any) => void): void; + + /** + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: "error", fn: (error: any) => void): void; + + /** + * Waits until the connection is ready and returns the raw Node_Redis clients. + */ + ready(): Promise; + + /** + * Close the redis clients. + * @param flush - Write transient data before closing. + */ + disconnect(flush?: boolean): Promise; } class IORedisConnection { - constructor(options?: Bottleneck.IORedisConnectionOptions); - - /** - * Register an event listener. - * @param name - The event name. - * @param fn - The callback function. - */ - on(name: "error", fn: (error: any) => void): void; - - /** - * Register an event listener for one event only. - * @param name - The event name. - * @param fn - The callback function. - */ - once(name: "error", fn: (error: any) => void): void; - - /** - * Waits until the connection is ready and returns the raw ioredis clients. - */ - ready(): Promise; - - /** - * Close the redis clients. - * @param flush - Write transient data before closing. - */ - disconnect(flush?: boolean): Promise; + constructor(options?: Bottleneck.IORedisConnectionOptions); + + /** + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: "error", fn: (error: any) => void): void; + + /** + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: "error", fn: (error: any) => void): void; + + /** + * Waits until the connection is ready and returns the raw ioredis clients. + */ + ready(): Promise; + + /** + * Close the redis clients. + * @param flush - Write transient data before closing. + */ + disconnect(flush?: boolean): Promise; } class Batcher { - constructor(options?: Bottleneck.BatcherOptions); - - /** - * Register an event listener. - * @param name - The event name. - * @param fn - The callback function. - */ - on(name: string, fn: Function): void; - on(name: "error", fn: (error: any) => void): void; - on(name: "batch", fn: (batch: any[]) => void): void; - - /** - * Register an event listener for one event only. - * @param name - The event name. - * @param fn - The callback function. - */ - once(name: string, fn: Function): void; - once(name: "error", fn: (error: any) => void): void; - once(name: "batch", fn: (batch: any[]) => void): void; - - /** - * Add a request to the Batcher. Batches are flushed to the "batch" event. - */ - add(data: any): Promise; + constructor(options?: Bottleneck.BatcherOptions); + + /** + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: string, fn: Function): void; + on(name: "error", fn: (error: any) => void): void; + on(name: "batch", fn: (batch: any[]) => void): void; + + /** + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: string, fn: Function): void; + once(name: "error", fn: (error: any) => void): void; + once(name: "batch", fn: (batch: any[]) => void): void; + + /** + * Add a request to the Batcher. Batches are flushed to the "batch" event. + */ + add(data: any): Promise; } class Group { - constructor(options?: Bottleneck.ConstructorOptions); - - id: string; - datastore: string; - connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection; - - /** - * Returns the limiter for the specified key. - * @param str - The limiter key. - */ - key(str: string): Bottleneck; - - /** - * Register an event listener. - * @param name - The event name. - * @param fn - The callback function. - */ - on(name: string, fn: Function): void; - on(name: "error", fn: (error: any) => void): void; - on(name: "created", fn: (limiter: Bottleneck, key: string) => void): void; - - /** - * Register an event listener for one event only. - * @param name - The event name. - * @param fn - The callback function. - */ - once(name: string, fn: Function): void; - once(name: "error", fn: (error: any) => void): void; - once(name: "created", fn: (limiter: Bottleneck, key: string) => void): void; - - /** - * Removes all registered event listeners. - * @param name - The optional event name to remove listeners from. - */ - removeAllListeners(name?: string): void; - - /** - * Updates the group settings. - * @param options - The new settings. - */ - updateSettings(options: Bottleneck.ConstructorOptions): void; - - /** - * Deletes the limiter for the given key. - * Returns true if a key was deleted. - * @param str - The key - */ - deleteKey(str: string): Promise; - - /** - * Disconnects the underlying redis clients, unless the Group was created with the `connection` option. - * @param flush - Write transient data before closing. - */ - disconnect(flush?: boolean): Promise; - - /** - * Returns all the key-limiter pairs. - */ - limiters(): Bottleneck.GroupLimiterPair[]; - - /** - * Returns all Group keys in the local instance - */ - keys(): string[]; - - /** - * Returns all Group keys in the Cluster - */ - clusterKeys(): Promise; + constructor(options?: Bottleneck.ConstructorOptions); + + id: string; + datastore: string; + connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection; + + /** + * Returns the limiter for the specified key. + * @param str - The limiter key. + */ + key(str: string): Bottleneck; + + /** + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: string, fn: Function): void; + on(name: "error", fn: (error: any) => void): void; + on(name: "created", fn: (limiter: Bottleneck, key: string) => void): void; + + /** + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: string, fn: Function): void; + once(name: "error", fn: (error: any) => void): void; + once(name: "created", fn: (limiter: Bottleneck, key: string) => void): void; + + /** + * Removes all registered event listeners. + * @param name - The optional event name to remove listeners from. + */ + removeAllListeners(name?: string): void; + + /** + * Updates the group settings. + * @param options - The new settings. + */ + updateSettings(options: Bottleneck.ConstructorOptions): void; + + /** + * Deletes the limiter for the given key. + * Returns true if a key was deleted. + * @param str - The key + */ + deleteKey(str: string): Promise; + + /** + * Disconnects the underlying redis clients, unless the Group was created with the `connection` option. + * @param flush - Write transient data before closing. + */ + disconnect(flush?: boolean): Promise; + + /** + * Returns all the key-limiter pairs. + */ + limiters(): Bottleneck.GroupLimiterPair[]; + + /** + * Returns all Group keys in the local instance + */ + keys(): string[]; + + /** + * Returns all Group keys in the Cluster + */ + clusterKeys(): Promise; } class Events { - constructor(object: Object); - - /** - * Returns the number of limiters for the event name - * @param name - The event name. - */ - listenerCount(name: string): number; - - /** - * Returns a promise with the first non-null/non-undefined result from a listener - * @param name - The event name. - * @param args - The arguments to pass to the event listeners. - */ - trigger(name: string, ...args: any[]): Promise; + constructor(object: Object); + + /** + * Returns the number of limiters for the event name + * @param name - The event name. + */ + listenerCount(name: string): number; + + /** + * Returns a promise with the first non-null/non-undefined result from a listener + * @param name - The event name. + * @param args - The arguments to pass to the event listeners. + */ + trigger(name: string, ...args: any[]): Promise; } -} -class Bottleneck { + } + class Bottleneck { public static readonly strategy: { - /** - * When adding a new job to a limiter, if the queue length reaches `highWater`, drop the oldest job with the lowest priority. This is useful when jobs that have been waiting for too long are not important anymore. If all the queued jobs are more important (based on their `priority` value) than the one being added, it will not be added. - */ - readonly LEAK: Bottleneck.Strategy; - /** - * Same as `LEAK`, except it will only drop jobs that are less important than the one being added. If all the queued jobs are as or more important than the new one, it will not be added. - */ - readonly OVERFLOW_PRIORITY: Bottleneck.Strategy; - /** - * When adding a new job to a limiter, if the queue length reaches `highWater`, do not add the new job. This strategy totally ignores priority levels. - */ - readonly OVERFLOW: Bottleneck.Strategy; - /** - * When adding a new job to a limiter, if the queue length reaches `highWater`, the limiter falls into "blocked mode". All queued jobs are dropped and no new jobs will be accepted until the limiter unblocks. It will unblock after `penalty` milliseconds have passed without receiving a new job. `penalty` is equal to `15 * minTime` (or `5000` if `minTime` is `0`) by default and can be changed by calling `changePenalty()`. This strategy is ideal when bruteforce attacks are to be expected. This strategy totally ignores priority levels. - */ - readonly BLOCK: Bottleneck.Strategy; + /** + * When adding a new job to a limiter, if the queue length reaches `highWater`, drop the oldest job with the lowest priority. This is useful when jobs that have been waiting for too long are not important anymore. If all the queued jobs are more important (based on their `priority` value) than the one being added, it will not be added. + */ + readonly LEAK: Bottleneck.Strategy; + /** + * Same as `LEAK`, except it will only drop jobs that are less important than the one being added. If all the queued jobs are as or more important than the new one, it will not be added. + */ + readonly OVERFLOW_PRIORITY: Bottleneck.Strategy; + /** + * When adding a new job to a limiter, if the queue length reaches `highWater`, do not add the new job. This strategy totally ignores priority levels. + */ + readonly OVERFLOW: Bottleneck.Strategy; + /** + * When adding a new job to a limiter, if the queue length reaches `highWater`, the limiter falls into "blocked mode". All queued jobs are dropped and no new jobs will be accepted until the limiter unblocks. It will unblock after `penalty` milliseconds have passed without receiving a new job. `penalty` is equal to `15 * minTime` (or `5000` if `minTime` is `0`) by default and can be changed by calling `changePenalty()`. This strategy is ideal when bruteforce attacks are to be expected. This strategy totally ignores priority levels. + */ + readonly BLOCK: Bottleneck.Strategy; }; constructor(options?: Bottleneck.ConstructorOptions); @@ -418,161 +417,190 @@ class Bottleneck { connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection; /** - * Returns a promise which will be resolved once the limiter is ready to accept jobs - * or rejected if it fails to start up. - */ + * Returns a promise which will be resolved once the limiter is ready to accept jobs + * or rejected if it fails to start up. + */ ready(): Promise; /** - * Returns a datastore-specific object of redis clients. - */ + * Returns a datastore-specific object of redis clients. + */ clients(): Bottleneck.ClientsList; /** - * Returns the name of the Redis pubsub channel used for this limiter - */ + * Returns the name of the Redis pubsub channel used for this limiter + */ channel(): string; /** - * Disconnects the underlying redis clients, unless the limiter was created with the `connection` option. - * @param flush - Write transient data before closing. - */ + * Disconnects the underlying redis clients, unless the limiter was created with the `connection` option. + * @param flush - Write transient data before closing. + */ disconnect(flush?: boolean): Promise; /** - * Broadcast a string to every limiter in the Cluster. - */ + * Broadcast a string to every limiter in the Cluster. + */ publish(message: string): Promise; /** - * Returns an object with the current number of jobs per status. - */ + * Returns an object with the current number of jobs per status. + */ counts(): Bottleneck.Counts; /** - * Returns the status of the job with the provided job id. - */ + * Returns the status of the job with the provided job id. + */ jobStatus(id: string): Bottleneck.Status; /** - * Returns the status of the job with the provided job id. - */ + * Returns the status of the job with the provided job id. + */ jobs(status?: Bottleneck.Status): string[]; /** - * Returns the number of requests queued. - * @param priority - Returns the number of requests queued with the specified priority. - */ + * Returns the number of requests queued. + * @param priority - Returns the number of requests queued with the specified priority. + */ queued(priority?: number): number; /** - * Returns the number of requests queued across the Cluster. - */ + * Returns the number of requests queued across the Cluster. + */ clusterQueued(): Promise; /** - * Returns whether there are any jobs currently in the queue or in the process of being added to the queue. - */ + * Returns whether there are any jobs currently in the queue or in the process of being added to the queue. + */ empty(): boolean; /** - * Returns the total weight of jobs in a RUNNING or EXECUTING state in the Cluster. - */ + * Returns the total weight of jobs in a RUNNING or EXECUTING state in the Cluster. + */ running(): Promise; /** - * Returns the total weight of jobs in a DONE state in the Cluster. - */ + * Returns the total weight of jobs in a DONE state in the Cluster. + */ done(): Promise; /** - * If a request was added right now, would it be run immediately? - * @param weight - The weight of the request - */ + * If a request was added right now, would it be run immediately? + * @param weight - The weight of the request + */ check(weight?: number): Promise; /** - * Register an event listener. - * @param name - The event name. - * @param fn - The callback function. - */ - on(name: "error", fn: (error: any) => void): void; - on(name: "empty", fn: () => void): void; - on(name: "idle", fn: () => void): void; - on(name: "depleted", fn: (empty: boolean) => void): void; - on(name: "message", fn: (message: string) => void): void; - on(name: "debug", fn: (message: string, info: any) => void): void; - on(name: "dropped", fn: (dropped: Bottleneck.EventInfoDropped) => void): void; - on(name: "received", fn: (info: Bottleneck.EventInfo) => void): void; - on(name: "queued", fn: (info: Bottleneck.EventInfoQueued) => void): void; + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: "error", fn: (error: any) => void): void; + on(name: "empty", fn: () => void): void; + on(name: "idle", fn: () => void): void; + on(name: "depleted", fn: (empty: boolean) => void): void; + on(name: "message", fn: (message: string) => void): void; + on(name: "debug", fn: (message: string, info: any) => void): void; + on(name: "dropped", fn: (dropped: Bottleneck.EventInfoDropped) => void): void; + on(name: "received", fn: (info: Bottleneck.EventInfo) => void): void; + on(name: "queued", fn: (info: Bottleneck.EventInfoQueued) => void): void; on(name: "scheduled", fn: (info: Bottleneck.EventInfo) => void): void; on(name: "executing", fn: (info: Bottleneck.EventInfoRetryable) => void): void; - on(name: "failed", fn: (error: any, info: Bottleneck.EventInfoRetryable) => Promise | number | void | null): void; - on(name: "retry", fn: (message: string, info: Bottleneck.EventInfoRetryable) => void): void; - on(name: "done", fn: (info: Bottleneck.EventInfoRetryable) => void): void; + on( + name: "failed", + fn: ( + error: any, + info: Bottleneck.EventInfoRetryable, + ) => Promise | number | void | null, + ): void; + on(name: "retry", fn: (message: string, info: Bottleneck.EventInfoRetryable) => void): void; + on(name: "done", fn: (info: Bottleneck.EventInfoRetryable) => void): void; /** - * Register an event listener for one event only. - * @param name - The event name. - * @param fn - The callback function. - */ - once(name: "error", fn: (error: any) => void): void; - once(name: "empty", fn: () => void): void; - once(name: "idle", fn: () => void): void; - once(name: "depleted", fn: (empty: boolean) => void): void; - once(name: "message", fn: (message: string) => void): void; - once(name: "debug", fn: (message: string, info: any) => void): void; - once(name: "dropped", fn: (dropped: Bottleneck.EventInfoDropped) => void): void; - once(name: "received", fn: (info: Bottleneck.EventInfo) => void): void; - once(name: "queued", fn: (info: Bottleneck.EventInfoQueued) => void): void; + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: "error", fn: (error: any) => void): void; + once(name: "empty", fn: () => void): void; + once(name: "idle", fn: () => void): void; + once(name: "depleted", fn: (empty: boolean) => void): void; + once(name: "message", fn: (message: string) => void): void; + once(name: "debug", fn: (message: string, info: any) => void): void; + once(name: "dropped", fn: (dropped: Bottleneck.EventInfoDropped) => void): void; + once(name: "received", fn: (info: Bottleneck.EventInfo) => void): void; + once(name: "queued", fn: (info: Bottleneck.EventInfoQueued) => void): void; once(name: "scheduled", fn: (info: Bottleneck.EventInfo) => void): void; once(name: "executing", fn: (info: Bottleneck.EventInfoRetryable) => void): void; - once(name: "failed", fn: (error: any, info: Bottleneck.EventInfoRetryable) => Promise | number | void | null): void; - once(name: "retry", fn: (message: string, info: Bottleneck.EventInfoRetryable) => void): void; - once(name: "done", fn: (info: Bottleneck.EventInfoRetryable) => void): void; + once( + name: "failed", + fn: ( + error: any, + info: Bottleneck.EventInfoRetryable, + ) => Promise | number | void | null, + ): void; + once(name: "retry", fn: (message: string, info: Bottleneck.EventInfoRetryable) => void): void; + once(name: "done", fn: (info: Bottleneck.EventInfoRetryable) => void): void; /** - * Removes all registered event listeners. - * @param name - The optional event name to remove listeners from. - */ + * Removes all registered event listeners. + * @param name - The optional event name to remove listeners from. + */ removeAllListeners(name?: string): void; /** - * Changes the settings for future requests. - * @param options - The new settings. - */ + * Changes the settings for future requests. + * @param options - The new settings. + */ updateSettings(options?: Bottleneck.ConstructorOptions): Promise; /** - * Adds to the reservoir count and returns the new value. - */ + * Adds to the reservoir count and returns the new value. + */ incrementReservoir(incrementBy: number): Promise; /** - * The `stop()` method is used to safely shutdown a limiter. It prevents any new jobs from being added to the limiter and waits for all Executing jobs to complete. - */ + * The `stop()` method is used to safely shutdown a limiter. It prevents any new jobs from being added to the limiter and waits for all Executing jobs to complete. + */ stop(options?: Bottleneck.StopOptions): Promise; /** - * Returns the current reservoir count, if any. - */ + * Returns the current reservoir count, if any. + */ currentReservoir(): Promise; /** - * Chain this limiter to another. - * @param limiter - The limiter that requests to this limiter must also follow. - */ + * Chain this limiter to another. + * @param limiter - The limiter that requests to this limiter must also follow. + */ chain(limiter?: Bottleneck): Bottleneck; - wrap(fn: (...args: Args) => PromiseLike): ((...args: Args) => Promise) & { withOptions: (options: Bottleneck.JobOptions, ...args: Args) => Promise; }; - - submit(fn: (...args: [...Args, Bottleneck.Callback]) => void, ...args: [...Args, Bottleneck.Callback]): void; - submit(options: Bottleneck.JobOptions, fn: (...args: [...Args, Bottleneck.Callback]) => void, ...args: [...Args, Bottleneck.Callback]): void; - - schedule(fn: (...args: Args) => PromiseLike, ...args: Args): Promise; - schedule(options: Bottleneck.JobOptions, fn: (...args: Args) => PromiseLike, ...args: Args): Promise; -} + wrap( + fn: (...args: Args) => PromiseLike, + ): ((...args: Args) => Promise) & { + withOptions: (options: Bottleneck.JobOptions, ...args: Args) => Promise; + }; -export default Bottleneck + submit( + fn: (...args: [...Args, Bottleneck.Callback]) => void, + ...args: [...Args, Bottleneck.Callback] + ): void; + submit( + options: Bottleneck.JobOptions, + fn: (...args: [...Args, Bottleneck.Callback]) => void, + ...args: [...Args, Bottleneck.Callback] + ): void; + + schedule( + fn: (...args: Args) => PromiseLike, + ...args: Args + ): Promise; + schedule( + options: Bottleneck.JobOptions, + fn: (...args: Args) => PromiseLike, + ...args: Args + ): Promise; + } + + export default Bottleneck; } - diff --git a/bower.json b/bower.json deleted file mode 100644 index b72e87e..0000000 --- a/bower.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "name": "bottleneck", - "main": "bottleneck.js", - "version": "2.19.5", - "homepage": "https://github.com/SGrondin/bottleneck", - "authors": [ - "SGrondin " - ], - "description": "Distributed task scheduler and rate limiter", - "moduleType": [ - "globals", - "node" - ], - "keywords": [ - "async", - "rate", - "limiter", - "limiting", - "throttle", - "throttling", - "load", - "ddos" - ], - "license": "MIT", - "ignore": [ - "**/.*", - "node_modules", - "bower_components" - ] -} diff --git a/es5.js b/es5.js deleted file mode 100644 index c58eaa0..0000000 --- a/es5.js +++ /dev/null @@ -1,5066 +0,0 @@ -/** - * This file contains the full Bottleneck library (MIT) compiled to ES5. - * https://github.com/SGrondin/bottleneck - * It also contains the regenerator-runtime (MIT), necessary for Babel-generated ES5 code to execute promise and async/await code. - * See the following link for Copyright and License information: - * https://github.com/facebook/regenerator/blob/master/packages/regenerator-runtime/runtime.js - */ -(function (global, factory) { - typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : - typeof define === 'function' && define.amd ? define(factory) : - (global.Bottleneck = factory()); -}(this, (function () { 'use strict'; - - var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; - - function createCommonjsModule(fn, module) { - return module = { exports: {} }, fn(module, module.exports), module.exports; - } - - function getCjsExportFromNamespace (n) { - return n && n['default'] || n; - } - - var runtime = createCommonjsModule(function (module) { - /** - * Copyright (c) 2014-present, Facebook, Inc. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */ - - !(function(global) { - - var Op = Object.prototype; - var hasOwn = Op.hasOwnProperty; - var undefined; // More compressible than void 0. - var $Symbol = typeof Symbol === "function" ? Symbol : {}; - var iteratorSymbol = $Symbol.iterator || "@@iterator"; - var asyncIteratorSymbol = $Symbol.asyncIterator || "@@asyncIterator"; - var toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag"; - var runtime = global.regeneratorRuntime; - if (runtime) { - { - // If regeneratorRuntime is defined globally and we're in a module, - // make the exports object identical to regeneratorRuntime. - module.exports = runtime; - } - // Don't bother evaluating the rest of this file if the runtime was - // already defined globally. - return; - } - - // Define the runtime globally (as expected by generated code) as either - // module.exports (if we're in a module) or a new, empty object. - runtime = global.regeneratorRuntime = module.exports; - - function wrap(innerFn, outerFn, self, tryLocsList) { - // If outerFn provided and outerFn.prototype is a Generator, then outerFn.prototype instanceof Generator. - var protoGenerator = outerFn && outerFn.prototype instanceof Generator ? outerFn : Generator; - var generator = Object.create(protoGenerator.prototype); - var context = new Context(tryLocsList || []); - - // The ._invoke method unifies the implementations of the .next, - // .throw, and .return methods. - generator._invoke = makeInvokeMethod(innerFn, self, context); - - return generator; - } - runtime.wrap = wrap; - - // Try/catch helper to minimize deoptimizations. Returns a completion - // record like context.tryEntries[i].completion. This interface could - // have been (and was previously) designed to take a closure to be - // invoked without arguments, but in all the cases we care about we - // already have an existing method we want to call, so there's no need - // to create a new function object. We can even get away with assuming - // the method takes exactly one argument, since that happens to be true - // in every case, so we don't have to touch the arguments object. The - // only additional allocation required is the completion record, which - // has a stable shape and so hopefully should be cheap to allocate. - function tryCatch(fn, obj, arg) { - try { - return { type: "normal", arg: fn.call(obj, arg) }; - } catch (err) { - return { type: "throw", arg: err }; - } - } - - var GenStateSuspendedStart = "suspendedStart"; - var GenStateSuspendedYield = "suspendedYield"; - var GenStateExecuting = "executing"; - var GenStateCompleted = "completed"; - - // Returning this object from the innerFn has the same effect as - // breaking out of the dispatch switch statement. - var ContinueSentinel = {}; - - // Dummy constructor functions that we use as the .constructor and - // .constructor.prototype properties for functions that return Generator - // objects. For full spec compliance, you may wish to configure your - // minifier not to mangle the names of these two functions. - function Generator() {} - function GeneratorFunction() {} - function GeneratorFunctionPrototype() {} - - // This is a polyfill for %IteratorPrototype% for environments that - // don't natively support it. - var IteratorPrototype = {}; - IteratorPrototype[iteratorSymbol] = function () { - return this; - }; - - var getProto = Object.getPrototypeOf; - var NativeIteratorPrototype = getProto && getProto(getProto(values([]))); - if (NativeIteratorPrototype && - NativeIteratorPrototype !== Op && - hasOwn.call(NativeIteratorPrototype, iteratorSymbol)) { - // This environment has a native %IteratorPrototype%; use it instead - // of the polyfill. - IteratorPrototype = NativeIteratorPrototype; - } - - var Gp = GeneratorFunctionPrototype.prototype = - Generator.prototype = Object.create(IteratorPrototype); - GeneratorFunction.prototype = Gp.constructor = GeneratorFunctionPrototype; - GeneratorFunctionPrototype.constructor = GeneratorFunction; - GeneratorFunctionPrototype[toStringTagSymbol] = - GeneratorFunction.displayName = "GeneratorFunction"; - - // Helper for defining the .next, .throw, and .return methods of the - // Iterator interface in terms of a single ._invoke method. - function defineIteratorMethods(prototype) { - ["next", "throw", "return"].forEach(function(method) { - prototype[method] = function(arg) { - return this._invoke(method, arg); - }; - }); - } - - runtime.isGeneratorFunction = function(genFun) { - var ctor = typeof genFun === "function" && genFun.constructor; - return ctor - ? ctor === GeneratorFunction || - // For the native GeneratorFunction constructor, the best we can - // do is to check its .name property. - (ctor.displayName || ctor.name) === "GeneratorFunction" - : false; - }; - - runtime.mark = function(genFun) { - if (Object.setPrototypeOf) { - Object.setPrototypeOf(genFun, GeneratorFunctionPrototype); - } else { - genFun.__proto__ = GeneratorFunctionPrototype; - if (!(toStringTagSymbol in genFun)) { - genFun[toStringTagSymbol] = "GeneratorFunction"; - } - } - genFun.prototype = Object.create(Gp); - return genFun; - }; - - // Within the body of any async function, `await x` is transformed to - // `yield regeneratorRuntime.awrap(x)`, so that the runtime can test - // `hasOwn.call(value, "__await")` to determine if the yielded value is - // meant to be awaited. - runtime.awrap = function(arg) { - return { __await: arg }; - }; - - function AsyncIterator(generator) { - function invoke(method, arg, resolve, reject) { - var record = tryCatch(generator[method], generator, arg); - if (record.type === "throw") { - reject(record.arg); - } else { - var result = record.arg; - var value = result.value; - if (value && - typeof value === "object" && - hasOwn.call(value, "__await")) { - return Promise.resolve(value.__await).then(function(value) { - invoke("next", value, resolve, reject); - }, function(err) { - invoke("throw", err, resolve, reject); - }); - } - - return Promise.resolve(value).then(function(unwrapped) { - // When a yielded Promise is resolved, its final value becomes - // the .value of the Promise<{value,done}> result for the - // current iteration. - result.value = unwrapped; - resolve(result); - }, function(error) { - // If a rejected Promise was yielded, throw the rejection back - // into the async generator function so it can be handled there. - return invoke("throw", error, resolve, reject); - }); - } - } - - var previousPromise; - - function enqueue(method, arg) { - function callInvokeWithMethodAndArg() { - return new Promise(function(resolve, reject) { - invoke(method, arg, resolve, reject); - }); - } - - return previousPromise = - // If enqueue has been called before, then we want to wait until - // all previous Promises have been resolved before calling invoke, - // so that results are always delivered in the correct order. If - // enqueue has not been called before, then it is important to - // call invoke immediately, without waiting on a callback to fire, - // so that the async generator function has the opportunity to do - // any necessary setup in a predictable way. This predictability - // is why the Promise constructor synchronously invokes its - // executor callback, and why async functions synchronously - // execute code before the first await. Since we implement simple - // async functions in terms of async generators, it is especially - // important to get this right, even though it requires care. - previousPromise ? previousPromise.then( - callInvokeWithMethodAndArg, - // Avoid propagating failures to Promises returned by later - // invocations of the iterator. - callInvokeWithMethodAndArg - ) : callInvokeWithMethodAndArg(); - } - - // Define the unified helper method that is used to implement .next, - // .throw, and .return (see defineIteratorMethods). - this._invoke = enqueue; - } - - defineIteratorMethods(AsyncIterator.prototype); - AsyncIterator.prototype[asyncIteratorSymbol] = function () { - return this; - }; - runtime.AsyncIterator = AsyncIterator; - - // Note that simple async functions are implemented on top of - // AsyncIterator objects; they just return a Promise for the value of - // the final result produced by the iterator. - runtime.async = function(innerFn, outerFn, self, tryLocsList) { - var iter = new AsyncIterator( - wrap(innerFn, outerFn, self, tryLocsList) - ); - - return runtime.isGeneratorFunction(outerFn) - ? iter // If outerFn is a generator, return the full iterator. - : iter.next().then(function(result) { - return result.done ? result.value : iter.next(); - }); - }; - - function makeInvokeMethod(innerFn, self, context) { - var state = GenStateSuspendedStart; - - return function invoke(method, arg) { - if (state === GenStateExecuting) { - throw new Error("Generator is already running"); - } - - if (state === GenStateCompleted) { - if (method === "throw") { - throw arg; - } - - // Be forgiving, per 25.3.3.3.3 of the spec: - // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-generatorresume - return doneResult(); - } - - context.method = method; - context.arg = arg; - - while (true) { - var delegate = context.delegate; - if (delegate) { - var delegateResult = maybeInvokeDelegate(delegate, context); - if (delegateResult) { - if (delegateResult === ContinueSentinel) continue; - return delegateResult; - } - } - - if (context.method === "next") { - // Setting context._sent for legacy support of Babel's - // function.sent implementation. - context.sent = context._sent = context.arg; - - } else if (context.method === "throw") { - if (state === GenStateSuspendedStart) { - state = GenStateCompleted; - throw context.arg; - } - - context.dispatchException(context.arg); - - } else if (context.method === "return") { - context.abrupt("return", context.arg); - } - - state = GenStateExecuting; - - var record = tryCatch(innerFn, self, context); - if (record.type === "normal") { - // If an exception is thrown from innerFn, we leave state === - // GenStateExecuting and loop back for another invocation. - state = context.done - ? GenStateCompleted - : GenStateSuspendedYield; - - if (record.arg === ContinueSentinel) { - continue; - } - - return { - value: record.arg, - done: context.done - }; - - } else if (record.type === "throw") { - state = GenStateCompleted; - // Dispatch the exception by looping back around to the - // context.dispatchException(context.arg) call above. - context.method = "throw"; - context.arg = record.arg; - } - } - }; - } - - // Call delegate.iterator[context.method](context.arg) and handle the - // result, either by returning a { value, done } result from the - // delegate iterator, or by modifying context.method and context.arg, - // setting context.delegate to null, and returning the ContinueSentinel. - function maybeInvokeDelegate(delegate, context) { - var method = delegate.iterator[context.method]; - if (method === undefined) { - // A .throw or .return when the delegate iterator has no .throw - // method always terminates the yield* loop. - context.delegate = null; - - if (context.method === "throw") { - if (delegate.iterator.return) { - // If the delegate iterator has a return method, give it a - // chance to clean up. - context.method = "return"; - context.arg = undefined; - maybeInvokeDelegate(delegate, context); - - if (context.method === "throw") { - // If maybeInvokeDelegate(context) changed context.method from - // "return" to "throw", let that override the TypeError below. - return ContinueSentinel; - } - } - - context.method = "throw"; - context.arg = new TypeError( - "The iterator does not provide a 'throw' method"); - } - - return ContinueSentinel; - } - - var record = tryCatch(method, delegate.iterator, context.arg); - - if (record.type === "throw") { - context.method = "throw"; - context.arg = record.arg; - context.delegate = null; - return ContinueSentinel; - } - - var info = record.arg; - - if (! info) { - context.method = "throw"; - context.arg = new TypeError("iterator result is not an object"); - context.delegate = null; - return ContinueSentinel; - } - - if (info.done) { - // Assign the result of the finished delegate to the temporary - // variable specified by delegate.resultName (see delegateYield). - context[delegate.resultName] = info.value; - - // Resume execution at the desired location (see delegateYield). - context.next = delegate.nextLoc; - - // If context.method was "throw" but the delegate handled the - // exception, let the outer generator proceed normally. If - // context.method was "next", forget context.arg since it has been - // "consumed" by the delegate iterator. If context.method was - // "return", allow the original .return call to continue in the - // outer generator. - if (context.method !== "return") { - context.method = "next"; - context.arg = undefined; - } - - } else { - // Re-yield the result returned by the delegate method. - return info; - } - - // The delegate iterator is finished, so forget it and continue with - // the outer generator. - context.delegate = null; - return ContinueSentinel; - } - - // Define Generator.prototype.{next,throw,return} in terms of the - // unified ._invoke helper method. - defineIteratorMethods(Gp); - - Gp[toStringTagSymbol] = "Generator"; - - // A Generator should always return itself as the iterator object when the - // @@iterator function is called on it. Some browsers' implementations of the - // iterator prototype chain incorrectly implement this, causing the Generator - // object to not be returned from this call. This ensures that doesn't happen. - // See https://github.com/facebook/regenerator/issues/274 for more details. - Gp[iteratorSymbol] = function() { - return this; - }; - - Gp.toString = function() { - return "[object Generator]"; - }; - - function pushTryEntry(locs) { - var entry = { tryLoc: locs[0] }; - - if (1 in locs) { - entry.catchLoc = locs[1]; - } - - if (2 in locs) { - entry.finallyLoc = locs[2]; - entry.afterLoc = locs[3]; - } - - this.tryEntries.push(entry); - } - - function resetTryEntry(entry) { - var record = entry.completion || {}; - record.type = "normal"; - delete record.arg; - entry.completion = record; - } - - function Context(tryLocsList) { - // The root entry object (effectively a try statement without a catch - // or a finally block) gives us a place to store values thrown from - // locations where there is no enclosing try statement. - this.tryEntries = [{ tryLoc: "root" }]; - tryLocsList.forEach(pushTryEntry, this); - this.reset(true); - } - - runtime.keys = function(object) { - var keys = []; - for (var key in object) { - keys.push(key); - } - keys.reverse(); - - // Rather than returning an object with a next method, we keep - // things simple and return the next function itself. - return function next() { - while (keys.length) { - var key = keys.pop(); - if (key in object) { - next.value = key; - next.done = false; - return next; - } - } - - // To avoid creating an additional object, we just hang the .value - // and .done properties off the next function object itself. This - // also ensures that the minifier will not anonymize the function. - next.done = true; - return next; - }; - }; - - function values(iterable) { - if (iterable) { - var iteratorMethod = iterable[iteratorSymbol]; - if (iteratorMethod) { - return iteratorMethod.call(iterable); - } - - if (typeof iterable.next === "function") { - return iterable; - } - - if (!isNaN(iterable.length)) { - var i = -1, next = function next() { - while (++i < iterable.length) { - if (hasOwn.call(iterable, i)) { - next.value = iterable[i]; - next.done = false; - return next; - } - } - - next.value = undefined; - next.done = true; - - return next; - }; - - return next.next = next; - } - } - - // Return an iterator with no values. - return { next: doneResult }; - } - runtime.values = values; - - function doneResult() { - return { value: undefined, done: true }; - } - - Context.prototype = { - constructor: Context, - - reset: function(skipTempReset) { - this.prev = 0; - this.next = 0; - // Resetting context._sent for legacy support of Babel's - // function.sent implementation. - this.sent = this._sent = undefined; - this.done = false; - this.delegate = null; - - this.method = "next"; - this.arg = undefined; - - this.tryEntries.forEach(resetTryEntry); - - if (!skipTempReset) { - for (var name in this) { - // Not sure about the optimal order of these conditions: - if (name.charAt(0) === "t" && - hasOwn.call(this, name) && - !isNaN(+name.slice(1))) { - this[name] = undefined; - } - } - } - }, - - stop: function() { - this.done = true; - - var rootEntry = this.tryEntries[0]; - var rootRecord = rootEntry.completion; - if (rootRecord.type === "throw") { - throw rootRecord.arg; - } - - return this.rval; - }, - - dispatchException: function(exception) { - if (this.done) { - throw exception; - } - - var context = this; - function handle(loc, caught) { - record.type = "throw"; - record.arg = exception; - context.next = loc; - - if (caught) { - // If the dispatched exception was caught by a catch block, - // then let that catch block handle the exception normally. - context.method = "next"; - context.arg = undefined; - } - - return !! caught; - } - - for (var i = this.tryEntries.length - 1; i >= 0; --i) { - var entry = this.tryEntries[i]; - var record = entry.completion; - - if (entry.tryLoc === "root") { - // Exception thrown outside of any try block that could handle - // it, so set the completion value of the entire function to - // throw the exception. - return handle("end"); - } - - if (entry.tryLoc <= this.prev) { - var hasCatch = hasOwn.call(entry, "catchLoc"); - var hasFinally = hasOwn.call(entry, "finallyLoc"); - - if (hasCatch && hasFinally) { - if (this.prev < entry.catchLoc) { - return handle(entry.catchLoc, true); - } else if (this.prev < entry.finallyLoc) { - return handle(entry.finallyLoc); - } - - } else if (hasCatch) { - if (this.prev < entry.catchLoc) { - return handle(entry.catchLoc, true); - } - - } else if (hasFinally) { - if (this.prev < entry.finallyLoc) { - return handle(entry.finallyLoc); - } - - } else { - throw new Error("try statement without catch or finally"); - } - } - } - }, - - abrupt: function(type, arg) { - for (var i = this.tryEntries.length - 1; i >= 0; --i) { - var entry = this.tryEntries[i]; - if (entry.tryLoc <= this.prev && - hasOwn.call(entry, "finallyLoc") && - this.prev < entry.finallyLoc) { - var finallyEntry = entry; - break; - } - } - - if (finallyEntry && - (type === "break" || - type === "continue") && - finallyEntry.tryLoc <= arg && - arg <= finallyEntry.finallyLoc) { - // Ignore the finally entry if control is not jumping to a - // location outside the try/catch block. - finallyEntry = null; - } - - var record = finallyEntry ? finallyEntry.completion : {}; - record.type = type; - record.arg = arg; - - if (finallyEntry) { - this.method = "next"; - this.next = finallyEntry.finallyLoc; - return ContinueSentinel; - } - - return this.complete(record); - }, - - complete: function(record, afterLoc) { - if (record.type === "throw") { - throw record.arg; - } - - if (record.type === "break" || - record.type === "continue") { - this.next = record.arg; - } else if (record.type === "return") { - this.rval = this.arg = record.arg; - this.method = "return"; - this.next = "end"; - } else if (record.type === "normal" && afterLoc) { - this.next = afterLoc; - } - - return ContinueSentinel; - }, - - finish: function(finallyLoc) { - for (var i = this.tryEntries.length - 1; i >= 0; --i) { - var entry = this.tryEntries[i]; - if (entry.finallyLoc === finallyLoc) { - this.complete(entry.completion, entry.afterLoc); - resetTryEntry(entry); - return ContinueSentinel; - } - } - }, - - "catch": function(tryLoc) { - for (var i = this.tryEntries.length - 1; i >= 0; --i) { - var entry = this.tryEntries[i]; - if (entry.tryLoc === tryLoc) { - var record = entry.completion; - if (record.type === "throw") { - var thrown = record.arg; - resetTryEntry(entry); - } - return thrown; - } - } - - // The context.catch method must only be called with a location - // argument that corresponds to a known catch block. - throw new Error("illegal catch attempt"); - }, - - delegateYield: function(iterable, resultName, nextLoc) { - this.delegate = { - iterator: values(iterable), - resultName: resultName, - nextLoc: nextLoc - }; - - if (this.method === "next") { - // Deliberately forget the last sent value so that we don't - // accidentally pass it on to the delegate. - this.arg = undefined; - } - - return ContinueSentinel; - } - }; - })( - // In sloppy mode, unbound `this` refers to the global object, fallback to - // Function constructor if we're in global strict mode. That is sadly a form - // of indirect eval which violates Content Security Policy. - (function() { - return this || (typeof self === "object" && self); - })() || Function("return this")() - ); - }); - - function _typeof(obj) { - if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { - _typeof = function (obj) { - return typeof obj; - }; - } else { - _typeof = function (obj) { - return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; - }; - } - - return _typeof(obj); - } - - function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { - try { - var info = gen[key](arg); - var value = info.value; - } catch (error) { - reject(error); - return; - } - - if (info.done) { - resolve(value); - } else { - Promise.resolve(value).then(_next, _throw); - } - } - - function _asyncToGenerator(fn) { - return function () { - var self = this, - args = arguments; - return new Promise(function (resolve, reject) { - var gen = fn.apply(self, args); - - function _next(value) { - asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); - } - - function _throw(err) { - asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); - } - - _next(undefined); - }); - }; - } - - function _classCallCheck(instance, Constructor) { - if (!(instance instanceof Constructor)) { - throw new TypeError("Cannot call a class as a function"); - } - } - - function _defineProperties(target, props) { - for (var i = 0; i < props.length; i++) { - var descriptor = props[i]; - descriptor.enumerable = descriptor.enumerable || false; - descriptor.configurable = true; - if ("value" in descriptor) descriptor.writable = true; - Object.defineProperty(target, descriptor.key, descriptor); - } - } - - function _createClass(Constructor, protoProps, staticProps) { - if (protoProps) _defineProperties(Constructor.prototype, protoProps); - if (staticProps) _defineProperties(Constructor, staticProps); - return Constructor; - } - - function _inherits(subClass, superClass) { - if (typeof superClass !== "function" && superClass !== null) { - throw new TypeError("Super expression must either be null or a function"); - } - - subClass.prototype = Object.create(superClass && superClass.prototype, { - constructor: { - value: subClass, - writable: true, - configurable: true - } - }); - if (superClass) _setPrototypeOf(subClass, superClass); - } - - function _getPrototypeOf(o) { - _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { - return o.__proto__ || Object.getPrototypeOf(o); - }; - return _getPrototypeOf(o); - } - - function _setPrototypeOf(o, p) { - _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { - o.__proto__ = p; - return o; - }; - - return _setPrototypeOf(o, p); - } - - function isNativeReflectConstruct() { - if (typeof Reflect === "undefined" || !Reflect.construct) return false; - if (Reflect.construct.sham) return false; - if (typeof Proxy === "function") return true; - - try { - Date.prototype.toString.call(Reflect.construct(Date, [], function () {})); - return true; - } catch (e) { - return false; - } - } - - function _construct(Parent, args, Class) { - if (isNativeReflectConstruct()) { - _construct = Reflect.construct; - } else { - _construct = function _construct(Parent, args, Class) { - var a = [null]; - a.push.apply(a, args); - var Constructor = Function.bind.apply(Parent, a); - var instance = new Constructor(); - if (Class) _setPrototypeOf(instance, Class.prototype); - return instance; - }; - } - - return _construct.apply(null, arguments); - } - - function _isNativeFunction(fn) { - return Function.toString.call(fn).indexOf("[native code]") !== -1; - } - - function _wrapNativeSuper(Class) { - var _cache = typeof Map === "function" ? new Map() : undefined; - - _wrapNativeSuper = function _wrapNativeSuper(Class) { - if (Class === null || !_isNativeFunction(Class)) return Class; - - if (typeof Class !== "function") { - throw new TypeError("Super expression must either be null or a function"); - } - - if (typeof _cache !== "undefined") { - if (_cache.has(Class)) return _cache.get(Class); - - _cache.set(Class, Wrapper); - } - - function Wrapper() { - return _construct(Class, arguments, _getPrototypeOf(this).constructor); - } - - Wrapper.prototype = Object.create(Class.prototype, { - constructor: { - value: Wrapper, - enumerable: false, - writable: true, - configurable: true - } - }); - return _setPrototypeOf(Wrapper, Class); - }; - - return _wrapNativeSuper(Class); - } - - function _assertThisInitialized(self) { - if (self === void 0) { - throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); - } - - return self; - } - - function _possibleConstructorReturn(self, call) { - if (call && (typeof call === "object" || typeof call === "function")) { - return call; - } - - return _assertThisInitialized(self); - } - - function _slicedToArray(arr, i) { - return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); - } - - function _toArray(arr) { - return _arrayWithHoles(arr) || _iterableToArray(arr) || _nonIterableRest(); - } - - function _toConsumableArray(arr) { - return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread(); - } - - function _arrayWithoutHoles(arr) { - if (Array.isArray(arr)) { - for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) arr2[i] = arr[i]; - - return arr2; - } - } - - function _arrayWithHoles(arr) { - if (Array.isArray(arr)) return arr; - } - - function _iterableToArray(iter) { - if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); - } - - function _iterableToArrayLimit(arr, i) { - var _arr = []; - var _n = true; - var _d = false; - var _e = undefined; - - try { - for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { - _arr.push(_s.value); - - if (i && _arr.length === i) break; - } - } catch (err) { - _d = true; - _e = err; - } finally { - try { - if (!_n && _i["return"] != null) _i["return"](); - } finally { - if (_d) throw _e; - } - } - - return _arr; - } - - function _nonIterableSpread() { - throw new TypeError("Invalid attempt to spread non-iterable instance"); - } - - function _nonIterableRest() { - throw new TypeError("Invalid attempt to destructure non-iterable instance"); - } - - var load = function load(received, defaults) { - var onto = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; - var k, ref, v; - - for (k in defaults) { - v = defaults[k]; - onto[k] = (ref = received[k]) != null ? ref : v; - } - - return onto; - }; - - var overwrite = function overwrite(received, defaults) { - var onto = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; - var k, v; - - for (k in received) { - v = received[k]; - - if (defaults[k] !== void 0) { - onto[k] = v; - } - } - - return onto; - }; - - var parser = { - load: load, - overwrite: overwrite - }; - - var DLList; - - DLList = - /*#__PURE__*/ - function () { - function DLList(incr, decr) { - _classCallCheck(this, DLList); - - this.incr = incr; - this.decr = decr; - this._first = null; - this._last = null; - this.length = 0; - } - - _createClass(DLList, [{ - key: "push", - value: function push(value) { - var node; - this.length++; - - if (typeof this.incr === "function") { - this.incr(); - } - - node = { - value: value, - prev: this._last, - next: null - }; - - if (this._last != null) { - this._last.next = node; - this._last = node; - } else { - this._first = this._last = node; - } - - return void 0; - } - }, { - key: "shift", - value: function shift() { - var value; - - if (this._first == null) { - return; - } else { - this.length--; - - if (typeof this.decr === "function") { - this.decr(); - } - } - - value = this._first.value; - - if ((this._first = this._first.next) != null) { - this._first.prev = null; - } else { - this._last = null; - } - - return value; - } - }, { - key: "first", - value: function first() { - if (this._first != null) { - return this._first.value; - } - } - }, { - key: "getArray", - value: function getArray() { - var node, ref, results; - node = this._first; - results = []; - - while (node != null) { - results.push((ref = node, node = node.next, ref.value)); - } - - return results; - } - }, { - key: "forEachShift", - value: function forEachShift(cb) { - var node; - node = this.shift(); - - while (node != null) { - cb(node), node = this.shift(); - } - - return void 0; - } - }, { - key: "debug", - value: function debug() { - var node, ref, ref1, ref2, results; - node = this._first; - results = []; - - while (node != null) { - results.push((ref = node, node = node.next, { - value: ref.value, - prev: (ref1 = ref.prev) != null ? ref1.value : void 0, - next: (ref2 = ref.next) != null ? ref2.value : void 0 - })); - } - - return results; - } - }]); - - return DLList; - }(); - - var DLList_1 = DLList; - - var Events; - - Events = - /*#__PURE__*/ - function () { - function Events(instance) { - var _this = this; - - _classCallCheck(this, Events); - - this.instance = instance; - this._events = {}; - - if (this.instance.on != null || this.instance.once != null || this.instance.removeAllListeners != null) { - throw new Error("An Emitter already exists for this object"); - } - - this.instance.on = function (name, cb) { - return _this._addListener(name, "many", cb); - }; - - this.instance.once = function (name, cb) { - return _this._addListener(name, "once", cb); - }; - - this.instance.removeAllListeners = function () { - var name = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - - if (name != null) { - return delete _this._events[name]; - } else { - return _this._events = {}; - } - }; - } - - _createClass(Events, [{ - key: "_addListener", - value: function _addListener(name, status, cb) { - var base; - - if ((base = this._events)[name] == null) { - base[name] = []; - } - - this._events[name].push({ - cb: cb, - status: status - }); - - return this.instance; - } - }, { - key: "listenerCount", - value: function listenerCount(name) { - if (this._events[name] != null) { - return this._events[name].length; - } else { - return 0; - } - } - }, { - key: "trigger", - value: function () { - var _trigger = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee2(name) { - var _this2 = this; - - var _len, - args, - _key, - e, - promises, - _args2 = arguments; - - return regeneratorRuntime.wrap(function _callee2$(_context2) { - while (1) { - switch (_context2.prev = _context2.next) { - case 0: - for (_len = _args2.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { - args[_key - 1] = _args2[_key]; - } - - _context2.prev = 1; - - if (name !== "debug") { - this.trigger("debug", "Event triggered: ".concat(name), args); - } - - if (!(this._events[name] == null)) { - _context2.next = 5; - break; - } - - return _context2.abrupt("return"); - - case 5: - this._events[name] = this._events[name].filter(function (listener) { - return listener.status !== "none"; - }); - promises = this._events[name].map( - /*#__PURE__*/ - function () { - var _ref = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee(listener) { - var e, returned; - return regeneratorRuntime.wrap(function _callee$(_context) { - while (1) { - switch (_context.prev = _context.next) { - case 0: - if (!(listener.status === "none")) { - _context.next = 2; - break; - } - - return _context.abrupt("return"); - - case 2: - if (listener.status === "once") { - listener.status = "none"; - } - - _context.prev = 3; - returned = typeof listener.cb === "function" ? listener.cb.apply(listener, args) : void 0; - - if (!(typeof (returned != null ? returned.then : void 0) === "function")) { - _context.next = 11; - break; - } - - _context.next = 8; - return returned; - - case 8: - return _context.abrupt("return", _context.sent); - - case 11: - return _context.abrupt("return", returned); - - case 12: - _context.next = 19; - break; - - case 14: - _context.prev = 14; - _context.t0 = _context["catch"](3); - e = _context.t0; - - { - _this2.trigger("error", e); - } - - return _context.abrupt("return", null); - - case 19: - case "end": - return _context.stop(); - } - } - }, _callee, null, [[3, 14]]); - })); - - return function (_x2) { - return _ref.apply(this, arguments); - }; - }()); - _context2.next = 9; - return Promise.all(promises); - - case 9: - _context2.t0 = function (x) { - return x != null; - }; - - return _context2.abrupt("return", _context2.sent.find(_context2.t0)); - - case 13: - _context2.prev = 13; - _context2.t1 = _context2["catch"](1); - e = _context2.t1; - - { - this.trigger("error", e); - } - - return _context2.abrupt("return", null); - - case 18: - case "end": - return _context2.stop(); - } - } - }, _callee2, this, [[1, 13]]); - })); - - function trigger(_x) { - return _trigger.apply(this, arguments); - } - - return trigger; - }() - }]); - - return Events; - }(); - - var Events_1 = Events; - - var DLList$1, Events$1, Queues; - DLList$1 = DLList_1; - Events$1 = Events_1; - - Queues = - /*#__PURE__*/ - function () { - function Queues(num_priorities) { - _classCallCheck(this, Queues); - - var i; - this.Events = new Events$1(this); - this._length = 0; - - this._lists = function () { - var _this = this; - - var j, ref, results; - results = []; - - for (i = j = 1, ref = num_priorities; 1 <= ref ? j <= ref : j >= ref; i = 1 <= ref ? ++j : --j) { - results.push(new DLList$1(function () { - return _this.incr(); - }, function () { - return _this.decr(); - })); - } - - return results; - }.call(this); - } - - _createClass(Queues, [{ - key: "incr", - value: function incr() { - if (this._length++ === 0) { - return this.Events.trigger("leftzero"); - } - } - }, { - key: "decr", - value: function decr() { - if (--this._length === 0) { - return this.Events.trigger("zero"); - } - } - }, { - key: "push", - value: function push(job) { - return this._lists[job.options.priority].push(job); - } - }, { - key: "queued", - value: function queued(priority) { - if (priority != null) { - return this._lists[priority].length; - } else { - return this._length; - } - } - }, { - key: "shiftAll", - value: function shiftAll(fn) { - return this._lists.forEach(function (list) { - return list.forEachShift(fn); - }); - } - }, { - key: "getFirst", - value: function getFirst() { - var arr = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this._lists; - var j, len, list; - - for (j = 0, len = arr.length; j < len; j++) { - list = arr[j]; - - if (list.length > 0) { - return list; - } - } - - return []; - } - }, { - key: "shiftLastFrom", - value: function shiftLastFrom(priority) { - return this.getFirst(this._lists.slice(priority).reverse()).shift(); - } - }]); - - return Queues; - }(); - - var Queues_1 = Queues; - - var BottleneckError; - - BottleneckError = - /*#__PURE__*/ - function (_Error) { - _inherits(BottleneckError, _Error); - - function BottleneckError() { - _classCallCheck(this, BottleneckError); - - return _possibleConstructorReturn(this, _getPrototypeOf(BottleneckError).apply(this, arguments)); - } - - return BottleneckError; - }(_wrapNativeSuper(Error)); - - var BottleneckError_1 = BottleneckError; - - var BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1; - NUM_PRIORITIES = 10; - DEFAULT_PRIORITY = 5; - parser$1 = parser; - BottleneckError$1 = BottleneckError_1; - - Job = - /*#__PURE__*/ - function () { - function Job(task, args, options, jobDefaults, rejectOnDrop, Events, _states, Promise) { - var _this = this; - - _classCallCheck(this, Job); - - this.task = task; - this.args = args; - this.rejectOnDrop = rejectOnDrop; - this.Events = Events; - this._states = _states; - this.Promise = Promise; - this.options = parser$1.load(options, jobDefaults); - this.options.priority = this._sanitizePriority(this.options.priority); - - if (this.options.id === jobDefaults.id) { - this.options.id = "".concat(this.options.id, "-").concat(this._randomIndex()); - } - - this.promise = new this.Promise(function (_resolve, _reject) { - _this._resolve = _resolve; - _this._reject = _reject; - }); - this.retryCount = 0; - } - - _createClass(Job, [{ - key: "_sanitizePriority", - value: function _sanitizePriority(priority) { - var sProperty; - sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; - - if (sProperty < 0) { - return 0; - } else if (sProperty > NUM_PRIORITIES - 1) { - return NUM_PRIORITIES - 1; - } else { - return sProperty; - } - } - }, { - key: "_randomIndex", - value: function _randomIndex() { - return Math.random().toString(36).slice(2); - } - }, { - key: "doDrop", - value: function doDrop() { - var _ref = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}, - error = _ref.error, - _ref$message = _ref.message, - message = _ref$message === void 0 ? "This job has been dropped by Bottleneck" : _ref$message; - - if (this._states.remove(this.options.id)) { - if (this.rejectOnDrop) { - this._reject(error != null ? error : new BottleneckError$1(message)); - } - - this.Events.trigger("dropped", { - args: this.args, - options: this.options, - task: this.task, - promise: this.promise - }); - return true; - } else { - return false; - } - } - }, { - key: "_assertStatus", - value: function _assertStatus(expected) { - var status; - status = this._states.jobStatus(this.options.id); - - if (!(status === expected || expected === "DONE" && status === null)) { - throw new BottleneckError$1("Invalid job status ".concat(status, ", expected ").concat(expected, ". Please open an issue at https://github.com/SGrondin/bottleneck/issues")); - } - } - }, { - key: "doReceive", - value: function doReceive() { - this._states.start(this.options.id); - - return this.Events.trigger("received", { - args: this.args, - options: this.options - }); - } - }, { - key: "doQueue", - value: function doQueue(reachedHWM, blocked) { - this._assertStatus("RECEIVED"); - - this._states.next(this.options.id); - - return this.Events.trigger("queued", { - args: this.args, - options: this.options, - reachedHWM: reachedHWM, - blocked: blocked - }); - } - }, { - key: "doRun", - value: function doRun() { - if (this.retryCount === 0) { - this._assertStatus("QUEUED"); - - this._states.next(this.options.id); - } else { - this._assertStatus("EXECUTING"); - } - - return this.Events.trigger("scheduled", { - args: this.args, - options: this.options - }); - } - }, { - key: "doExecute", - value: function () { - var _doExecute = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee(chained, clearGlobalState, run, free) { - var error, eventInfo, passed; - return regeneratorRuntime.wrap(function _callee$(_context) { - while (1) { - switch (_context.prev = _context.next) { - case 0: - if (this.retryCount === 0) { - this._assertStatus("RUNNING"); - - this._states.next(this.options.id); - } else { - this._assertStatus("EXECUTING"); - } - - eventInfo = { - args: this.args, - options: this.options, - retryCount: this.retryCount - }; - this.Events.trigger("executing", eventInfo); - _context.prev = 3; - _context.next = 6; - return chained != null ? chained.schedule.apply(chained, [this.options, this.task].concat(_toConsumableArray(this.args))) : this.task.apply(this, _toConsumableArray(this.args)); - - case 6: - passed = _context.sent; - - if (!clearGlobalState()) { - _context.next = 13; - break; - } - - this.doDone(eventInfo); - _context.next = 11; - return free(this.options, eventInfo); - - case 11: - this._assertStatus("DONE"); - - return _context.abrupt("return", this._resolve(passed)); - - case 13: - _context.next = 19; - break; - - case 15: - _context.prev = 15; - _context.t0 = _context["catch"](3); - error = _context.t0; - return _context.abrupt("return", this._onFailure(error, eventInfo, clearGlobalState, run, free)); - - case 19: - case "end": - return _context.stop(); - } - } - }, _callee, this, [[3, 15]]); - })); - - function doExecute(_x, _x2, _x3, _x4) { - return _doExecute.apply(this, arguments); - } - - return doExecute; - }() - }, { - key: "doExpire", - value: function doExpire(clearGlobalState, run, free) { - var error, eventInfo; - - if (this._states.jobStatus(this.options.id === "RUNNING")) { - this._states.next(this.options.id); - } - - this._assertStatus("EXECUTING"); - - eventInfo = { - args: this.args, - options: this.options, - retryCount: this.retryCount - }; - error = new BottleneckError$1("This job timed out after ".concat(this.options.expiration, " ms.")); - return this._onFailure(error, eventInfo, clearGlobalState, run, free); - } - }, { - key: "_onFailure", - value: function () { - var _onFailure2 = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee2(error, eventInfo, clearGlobalState, run, free) { - var retry, retryAfter; - return regeneratorRuntime.wrap(function _callee2$(_context2) { - while (1) { - switch (_context2.prev = _context2.next) { - case 0: - if (!clearGlobalState()) { - _context2.next = 16; - break; - } - - _context2.next = 3; - return this.Events.trigger("failed", error, eventInfo); - - case 3: - retry = _context2.sent; - - if (!(retry != null)) { - _context2.next = 11; - break; - } - - retryAfter = ~~retry; - this.Events.trigger("retry", "Retrying ".concat(this.options.id, " after ").concat(retryAfter, " ms"), eventInfo); - this.retryCount++; - return _context2.abrupt("return", run(retryAfter)); - - case 11: - this.doDone(eventInfo); - _context2.next = 14; - return free(this.options, eventInfo); - - case 14: - this._assertStatus("DONE"); - - return _context2.abrupt("return", this._reject(error)); - - case 16: - case "end": - return _context2.stop(); - } - } - }, _callee2, this); - })); - - function _onFailure(_x5, _x6, _x7, _x8, _x9) { - return _onFailure2.apply(this, arguments); - } - - return _onFailure; - }() - }, { - key: "doDone", - value: function doDone(eventInfo) { - this._assertStatus("EXECUTING"); - - this._states.next(this.options.id); - - return this.Events.trigger("done", eventInfo); - } - }]); - - return Job; - }(); - - var Job_1 = Job; - - var BottleneckError$2, LocalDatastore, parser$2; - parser$2 = parser; - BottleneckError$2 = BottleneckError_1; - - LocalDatastore = - /*#__PURE__*/ - function () { - function LocalDatastore(instance, storeOptions, storeInstanceOptions) { - _classCallCheck(this, LocalDatastore); - - this.instance = instance; - this.storeOptions = storeOptions; - this.clientId = this.instance._randomIndex(); - parser$2.load(storeInstanceOptions, storeInstanceOptions, this); - this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); - this._running = 0; - this._done = 0; - this._unblockTime = 0; - this.ready = this.Promise.resolve(); - this.clients = {}; - - this._startHeartbeat(); - } - - _createClass(LocalDatastore, [{ - key: "_startHeartbeat", - value: function _startHeartbeat() { - var _this = this; - - var base; - - if (this.heartbeat != null) { - clearInterval(this.heartbeat); - } - - if (this.storeOptions.reservoirRefreshInterval != null && this.storeOptions.reservoirRefreshAmount != null || this.storeOptions.reservoirIncreaseInterval != null && this.storeOptions.reservoirIncreaseAmount != null) { - return typeof (base = this.heartbeat = setInterval(function () { - var amount, incr, maximum, now, reservoir; - now = Date.now(); - - if (_this.storeOptions.reservoirRefreshInterval != null && now >= _this._lastReservoirRefresh + _this.storeOptions.reservoirRefreshInterval) { - _this._lastReservoirRefresh = now; - _this.storeOptions.reservoir = _this.storeOptions.reservoirRefreshAmount; - - _this.instance._drainAll(_this.computeCapacity()); - } - - if (_this.storeOptions.reservoirIncreaseInterval != null && now >= _this._lastReservoirIncrease + _this.storeOptions.reservoirIncreaseInterval) { - var _this$storeOptions = _this.storeOptions; - amount = _this$storeOptions.reservoirIncreaseAmount; - maximum = _this$storeOptions.reservoirIncreaseMaximum; - reservoir = _this$storeOptions.reservoir; - _this._lastReservoirIncrease = now; - incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; - - if (incr > 0) { - _this.storeOptions.reservoir += incr; - return _this.instance._drainAll(_this.computeCapacity()); - } - } - }, this.heartbeatInterval)).unref === "function" ? base.unref() : void 0; - } - } - }, { - key: "__publish__", - value: function () { - var _publish__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee(message) { - return regeneratorRuntime.wrap(function _callee$(_context) { - while (1) { - switch (_context.prev = _context.next) { - case 0: - _context.next = 2; - return this.yieldLoop(); - - case 2: - return _context.abrupt("return", this.instance.Events.trigger("message", message.toString())); - - case 3: - case "end": - return _context.stop(); - } - } - }, _callee, this); - })); - - function __publish__(_x) { - return _publish__.apply(this, arguments); - } - - return __publish__; - }() - }, { - key: "__disconnect__", - value: function () { - var _disconnect__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee2(flush) { - return regeneratorRuntime.wrap(function _callee2$(_context2) { - while (1) { - switch (_context2.prev = _context2.next) { - case 0: - _context2.next = 2; - return this.yieldLoop(); - - case 2: - clearInterval(this.heartbeat); - return _context2.abrupt("return", this.Promise.resolve()); - - case 4: - case "end": - return _context2.stop(); - } - } - }, _callee2, this); - })); - - function __disconnect__(_x2) { - return _disconnect__.apply(this, arguments); - } - - return __disconnect__; - }() - }, { - key: "yieldLoop", - value: function yieldLoop() { - var t = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; - return new this.Promise(function (resolve, reject) { - return setTimeout(resolve, t); - }); - } - }, { - key: "computePenalty", - value: function computePenalty() { - var ref; - return (ref = this.storeOptions.penalty) != null ? ref : 15 * this.storeOptions.minTime || 5000; - } - }, { - key: "__updateSettings__", - value: function () { - var _updateSettings__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee3(options) { - return regeneratorRuntime.wrap(function _callee3$(_context3) { - while (1) { - switch (_context3.prev = _context3.next) { - case 0: - _context3.next = 2; - return this.yieldLoop(); - - case 2: - parser$2.overwrite(options, options, this.storeOptions); - - this._startHeartbeat(); - - this.instance._drainAll(this.computeCapacity()); - - return _context3.abrupt("return", true); - - case 6: - case "end": - return _context3.stop(); - } - } - }, _callee3, this); - })); - - function __updateSettings__(_x3) { - return _updateSettings__.apply(this, arguments); - } - - return __updateSettings__; - }() - }, { - key: "__running__", - value: function () { - var _running__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee4() { - return regeneratorRuntime.wrap(function _callee4$(_context4) { - while (1) { - switch (_context4.prev = _context4.next) { - case 0: - _context4.next = 2; - return this.yieldLoop(); - - case 2: - return _context4.abrupt("return", this._running); - - case 3: - case "end": - return _context4.stop(); - } - } - }, _callee4, this); - })); - - function __running__() { - return _running__.apply(this, arguments); - } - - return __running__; - }() - }, { - key: "__queued__", - value: function () { - var _queued__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee5() { - return regeneratorRuntime.wrap(function _callee5$(_context5) { - while (1) { - switch (_context5.prev = _context5.next) { - case 0: - _context5.next = 2; - return this.yieldLoop(); - - case 2: - return _context5.abrupt("return", this.instance.queued()); - - case 3: - case "end": - return _context5.stop(); - } - } - }, _callee5, this); - })); - - function __queued__() { - return _queued__.apply(this, arguments); - } - - return __queued__; - }() - }, { - key: "__done__", - value: function () { - var _done__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee6() { - return regeneratorRuntime.wrap(function _callee6$(_context6) { - while (1) { - switch (_context6.prev = _context6.next) { - case 0: - _context6.next = 2; - return this.yieldLoop(); - - case 2: - return _context6.abrupt("return", this._done); - - case 3: - case "end": - return _context6.stop(); - } - } - }, _callee6, this); - })); - - function __done__() { - return _done__.apply(this, arguments); - } - - return __done__; - }() - }, { - key: "__groupCheck__", - value: function () { - var _groupCheck__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee7(time) { - return regeneratorRuntime.wrap(function _callee7$(_context7) { - while (1) { - switch (_context7.prev = _context7.next) { - case 0: - _context7.next = 2; - return this.yieldLoop(); - - case 2: - return _context7.abrupt("return", this._nextRequest + this.timeout < time); - - case 3: - case "end": - return _context7.stop(); - } - } - }, _callee7, this); - })); - - function __groupCheck__(_x4) { - return _groupCheck__.apply(this, arguments); - } - - return __groupCheck__; - }() - }, { - key: "computeCapacity", - value: function computeCapacity() { - var maxConcurrent, reservoir; - var _this$storeOptions2 = this.storeOptions; - maxConcurrent = _this$storeOptions2.maxConcurrent; - reservoir = _this$storeOptions2.reservoir; - - if (maxConcurrent != null && reservoir != null) { - return Math.min(maxConcurrent - this._running, reservoir); - } else if (maxConcurrent != null) { - return maxConcurrent - this._running; - } else if (reservoir != null) { - return reservoir; - } else { - return null; - } - } - }, { - key: "conditionsCheck", - value: function conditionsCheck(weight) { - var capacity; - capacity = this.computeCapacity(); - return capacity == null || weight <= capacity; - } - }, { - key: "__incrementReservoir__", - value: function () { - var _incrementReservoir__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee8(incr) { - var reservoir; - return regeneratorRuntime.wrap(function _callee8$(_context8) { - while (1) { - switch (_context8.prev = _context8.next) { - case 0: - _context8.next = 2; - return this.yieldLoop(); - - case 2: - reservoir = this.storeOptions.reservoir += incr; - - this.instance._drainAll(this.computeCapacity()); - - return _context8.abrupt("return", reservoir); - - case 5: - case "end": - return _context8.stop(); - } - } - }, _callee8, this); - })); - - function __incrementReservoir__(_x5) { - return _incrementReservoir__.apply(this, arguments); - } - - return __incrementReservoir__; - }() - }, { - key: "__currentReservoir__", - value: function () { - var _currentReservoir__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee9() { - return regeneratorRuntime.wrap(function _callee9$(_context9) { - while (1) { - switch (_context9.prev = _context9.next) { - case 0: - _context9.next = 2; - return this.yieldLoop(); - - case 2: - return _context9.abrupt("return", this.storeOptions.reservoir); - - case 3: - case "end": - return _context9.stop(); - } - } - }, _callee9, this); - })); - - function __currentReservoir__() { - return _currentReservoir__.apply(this, arguments); - } - - return __currentReservoir__; - }() - }, { - key: "isBlocked", - value: function isBlocked(now) { - return this._unblockTime >= now; - } - }, { - key: "check", - value: function check(weight, now) { - return this.conditionsCheck(weight) && this._nextRequest - now <= 0; - } - }, { - key: "__check__", - value: function () { - var _check__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee10(weight) { - var now; - return regeneratorRuntime.wrap(function _callee10$(_context10) { - while (1) { - switch (_context10.prev = _context10.next) { - case 0: - _context10.next = 2; - return this.yieldLoop(); - - case 2: - now = Date.now(); - return _context10.abrupt("return", this.check(weight, now)); - - case 4: - case "end": - return _context10.stop(); - } - } - }, _callee10, this); - })); - - function __check__(_x6) { - return _check__.apply(this, arguments); - } - - return __check__; - }() - }, { - key: "__register__", - value: function () { - var _register__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee11(index, weight, expiration) { - var now, wait; - return regeneratorRuntime.wrap(function _callee11$(_context11) { - while (1) { - switch (_context11.prev = _context11.next) { - case 0: - _context11.next = 2; - return this.yieldLoop(); - - case 2: - now = Date.now(); - - if (!this.conditionsCheck(weight)) { - _context11.next = 11; - break; - } - - this._running += weight; - - if (this.storeOptions.reservoir != null) { - this.storeOptions.reservoir -= weight; - } - - wait = Math.max(this._nextRequest - now, 0); - this._nextRequest = now + wait + this.storeOptions.minTime; - return _context11.abrupt("return", { - success: true, - wait: wait, - reservoir: this.storeOptions.reservoir - }); - - case 11: - return _context11.abrupt("return", { - success: false - }); - - case 12: - case "end": - return _context11.stop(); - } - } - }, _callee11, this); - })); - - function __register__(_x7, _x8, _x9) { - return _register__.apply(this, arguments); - } - - return __register__; - }() - }, { - key: "strategyIsBlock", - value: function strategyIsBlock() { - return this.storeOptions.strategy === 3; - } - }, { - key: "__submit__", - value: function () { - var _submit__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee12(queueLength, weight) { - var blocked, now, reachedHWM; - return regeneratorRuntime.wrap(function _callee12$(_context12) { - while (1) { - switch (_context12.prev = _context12.next) { - case 0: - _context12.next = 2; - return this.yieldLoop(); - - case 2: - if (!(this.storeOptions.maxConcurrent != null && weight > this.storeOptions.maxConcurrent)) { - _context12.next = 4; - break; - } - - throw new BottleneckError$2("Impossible to add a job having a weight of ".concat(weight, " to a limiter having a maxConcurrent setting of ").concat(this.storeOptions.maxConcurrent)); - - case 4: - now = Date.now(); - reachedHWM = this.storeOptions.highWater != null && queueLength === this.storeOptions.highWater && !this.check(weight, now); - blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now)); - - if (blocked) { - this._unblockTime = now + this.computePenalty(); - this._nextRequest = this._unblockTime + this.storeOptions.minTime; - - this.instance._dropAllQueued(); - } - - return _context12.abrupt("return", { - reachedHWM: reachedHWM, - blocked: blocked, - strategy: this.storeOptions.strategy - }); - - case 9: - case "end": - return _context12.stop(); - } - } - }, _callee12, this); - })); - - function __submit__(_x10, _x11) { - return _submit__.apply(this, arguments); - } - - return __submit__; - }() - }, { - key: "__free__", - value: function () { - var _free__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee13(index, weight) { - return regeneratorRuntime.wrap(function _callee13$(_context13) { - while (1) { - switch (_context13.prev = _context13.next) { - case 0: - _context13.next = 2; - return this.yieldLoop(); - - case 2: - this._running -= weight; - this._done += weight; - - this.instance._drainAll(this.computeCapacity()); - - return _context13.abrupt("return", { - running: this._running - }); - - case 6: - case "end": - return _context13.stop(); - } - } - }, _callee13, this); - })); - - function __free__(_x12, _x13) { - return _free__.apply(this, arguments); - } - - return __free__; - }() - }]); - - return LocalDatastore; - }(); - - var LocalDatastore_1 = LocalDatastore; - - var lua = { - "blacklist_client.lua": "local blacklist = ARGV[num_static_argv + 1]\n\nif redis.call('zscore', client_last_seen_key, blacklist) then\n redis.call('zadd', client_last_seen_key, 0, blacklist)\nend\n\n\nreturn {}\n", - "check.lua": "local weight = tonumber(ARGV[num_static_argv + 1])\n\nlocal capacity = process_tick(now, false)['capacity']\nlocal nextRequest = tonumber(redis.call('hget', settings_key, 'nextRequest'))\n\nreturn conditions_check(capacity, weight) and nextRequest - now <= 0\n", - "conditions_check.lua": "local conditions_check = function (capacity, weight)\n return capacity == nil or weight <= capacity\nend\n", - "current_reservoir.lua": "return process_tick(now, false)['reservoir']\n", - "done.lua": "process_tick(now, false)\n\nreturn tonumber(redis.call('hget', settings_key, 'done'))\n", - "free.lua": "local index = ARGV[num_static_argv + 1]\n\nredis.call('zadd', job_expirations_key, 0, index)\n\nreturn process_tick(now, false)['running']\n", - "get_time.lua": "redis.replicate_commands()\n\nlocal get_time = function ()\n local time = redis.call('time')\n\n return tonumber(time[1]..string.sub(time[2], 1, 3))\nend\n", - "group_check.lua": "return not (redis.call('exists', settings_key) == 1)\n", - "heartbeat.lua": "process_tick(now, true)\n", - "increment_reservoir.lua": "local incr = tonumber(ARGV[num_static_argv + 1])\n\nredis.call('hincrby', settings_key, 'reservoir', incr)\n\nlocal reservoir = process_tick(now, true)['reservoir']\n\nlocal groupTimeout = tonumber(redis.call('hget', settings_key, 'groupTimeout'))\nrefresh_expiration(0, 0, groupTimeout)\n\nreturn reservoir\n", - "init.lua": "local clear = tonumber(ARGV[num_static_argv + 1])\nlocal limiter_version = ARGV[num_static_argv + 2]\nlocal num_local_argv = num_static_argv + 2\n\nif clear == 1 then\n redis.call('del', unpack(KEYS))\nend\n\nif redis.call('exists', settings_key) == 0 then\n -- Create\n local args = {'hmset', settings_key}\n\n for i = num_local_argv + 1, #ARGV do\n table.insert(args, ARGV[i])\n end\n\n redis.call(unpack(args))\n redis.call('hmset', settings_key,\n 'nextRequest', now,\n 'lastReservoirRefresh', now,\n 'lastReservoirIncrease', now,\n 'running', 0,\n 'done', 0,\n 'unblockTime', 0,\n 'capacityPriorityCounter', 0\n )\n\nelse\n -- Apply migrations\n local settings = redis.call('hmget', settings_key,\n 'id',\n 'version'\n )\n local id = settings[1]\n local current_version = settings[2]\n\n if current_version ~= limiter_version then\n local version_digits = {}\n for k, v in string.gmatch(current_version, \"([^.]+)\") do\n table.insert(version_digits, tonumber(k))\n end\n\n -- 2.10.0\n if version_digits[2] < 10 then\n redis.call('hsetnx', settings_key, 'reservoirRefreshInterval', '')\n redis.call('hsetnx', settings_key, 'reservoirRefreshAmount', '')\n redis.call('hsetnx', settings_key, 'lastReservoirRefresh', '')\n redis.call('hsetnx', settings_key, 'done', 0)\n redis.call('hset', settings_key, 'version', '2.10.0')\n end\n\n -- 2.11.1\n if version_digits[2] < 11 or (version_digits[2] == 11 and version_digits[3] < 1) then\n if redis.call('hstrlen', settings_key, 'lastReservoirRefresh') == 0 then\n redis.call('hmset', settings_key,\n 'lastReservoirRefresh', now,\n 'version', '2.11.1'\n )\n end\n end\n\n -- 2.14.0\n if version_digits[2] < 14 then\n local old_running_key = 'b_'..id..'_running'\n local old_executing_key = 'b_'..id..'_executing'\n\n if redis.call('exists', old_running_key) == 1 then\n redis.call('rename', old_running_key, job_weights_key)\n end\n if redis.call('exists', old_executing_key) == 1 then\n redis.call('rename', old_executing_key, job_expirations_key)\n end\n redis.call('hset', settings_key, 'version', '2.14.0')\n end\n\n -- 2.15.2\n if version_digits[2] < 15 or (version_digits[2] == 15 and version_digits[3] < 2) then\n redis.call('hsetnx', settings_key, 'capacityPriorityCounter', 0)\n redis.call('hset', settings_key, 'version', '2.15.2')\n end\n\n -- 2.17.0\n if version_digits[2] < 17 then\n redis.call('hsetnx', settings_key, 'clientTimeout', 10000)\n redis.call('hset', settings_key, 'version', '2.17.0')\n end\n\n -- 2.18.0\n if version_digits[2] < 18 then\n redis.call('hsetnx', settings_key, 'reservoirIncreaseInterval', '')\n redis.call('hsetnx', settings_key, 'reservoirIncreaseAmount', '')\n redis.call('hsetnx', settings_key, 'reservoirIncreaseMaximum', '')\n redis.call('hsetnx', settings_key, 'lastReservoirIncrease', now)\n redis.call('hset', settings_key, 'version', '2.18.0')\n end\n\n end\n\n process_tick(now, false)\nend\n\nlocal groupTimeout = tonumber(redis.call('hget', settings_key, 'groupTimeout'))\nrefresh_expiration(0, 0, groupTimeout)\n\nreturn {}\n", - "process_tick.lua": "local process_tick = function (now, always_publish)\n\n local compute_capacity = function (maxConcurrent, running, reservoir)\n if maxConcurrent ~= nil and reservoir ~= nil then\n return math.min((maxConcurrent - running), reservoir)\n elseif maxConcurrent ~= nil then\n return maxConcurrent - running\n elseif reservoir ~= nil then\n return reservoir\n else\n return nil\n end\n end\n\n local settings = redis.call('hmget', settings_key,\n 'id',\n 'maxConcurrent',\n 'running',\n 'reservoir',\n 'reservoirRefreshInterval',\n 'reservoirRefreshAmount',\n 'lastReservoirRefresh',\n 'reservoirIncreaseInterval',\n 'reservoirIncreaseAmount',\n 'reservoirIncreaseMaximum',\n 'lastReservoirIncrease',\n 'capacityPriorityCounter',\n 'clientTimeout'\n )\n local id = settings[1]\n local maxConcurrent = tonumber(settings[2])\n local running = tonumber(settings[3])\n local reservoir = tonumber(settings[4])\n local reservoirRefreshInterval = tonumber(settings[5])\n local reservoirRefreshAmount = tonumber(settings[6])\n local lastReservoirRefresh = tonumber(settings[7])\n local reservoirIncreaseInterval = tonumber(settings[8])\n local reservoirIncreaseAmount = tonumber(settings[9])\n local reservoirIncreaseMaximum = tonumber(settings[10])\n local lastReservoirIncrease = tonumber(settings[11])\n local capacityPriorityCounter = tonumber(settings[12])\n local clientTimeout = tonumber(settings[13])\n\n local initial_capacity = compute_capacity(maxConcurrent, running, reservoir)\n\n --\n -- Process 'running' changes\n --\n local expired = redis.call('zrangebyscore', job_expirations_key, '-inf', '('..now)\n\n if #expired > 0 then\n redis.call('zremrangebyscore', job_expirations_key, '-inf', '('..now)\n\n local flush_batch = function (batch, acc)\n local weights = redis.call('hmget', job_weights_key, unpack(batch))\n redis.call('hdel', job_weights_key, unpack(batch))\n local clients = redis.call('hmget', job_clients_key, unpack(batch))\n redis.call('hdel', job_clients_key, unpack(batch))\n\n -- Calculate sum of removed weights\n for i = 1, #weights do\n acc['total'] = acc['total'] + (tonumber(weights[i]) or 0)\n end\n\n -- Calculate sum of removed weights by client\n local client_weights = {}\n for i = 1, #clients do\n local removed = tonumber(weights[i]) or 0\n if removed > 0 then\n acc['client_weights'][clients[i]] = (acc['client_weights'][clients[i]] or 0) + removed\n end\n end\n end\n\n local acc = {\n ['total'] = 0,\n ['client_weights'] = {}\n }\n local batch_size = 1000\n\n -- Compute changes to Zsets and apply changes to Hashes\n for i = 1, #expired, batch_size do\n local batch = {}\n for j = i, math.min(i + batch_size - 1, #expired) do\n table.insert(batch, expired[j])\n end\n\n flush_batch(batch, acc)\n end\n\n -- Apply changes to Zsets\n if acc['total'] > 0 then\n redis.call('hincrby', settings_key, 'done', acc['total'])\n running = tonumber(redis.call('hincrby', settings_key, 'running', -acc['total']))\n end\n\n for client, weight in pairs(acc['client_weights']) do\n redis.call('zincrby', client_running_key, -weight, client)\n end\n end\n\n --\n -- Process 'reservoir' changes\n --\n local reservoirRefreshActive = reservoirRefreshInterval ~= nil and reservoirRefreshAmount ~= nil\n if reservoirRefreshActive and now >= lastReservoirRefresh + reservoirRefreshInterval then\n reservoir = reservoirRefreshAmount\n redis.call('hmset', settings_key,\n 'reservoir', reservoir,\n 'lastReservoirRefresh', now\n )\n end\n\n local reservoirIncreaseActive = reservoirIncreaseInterval ~= nil and reservoirIncreaseAmount ~= nil\n if reservoirIncreaseActive and now >= lastReservoirIncrease + reservoirIncreaseInterval then\n local num_intervals = math.floor((now - lastReservoirIncrease) / reservoirIncreaseInterval)\n local incr = reservoirIncreaseAmount * num_intervals\n if reservoirIncreaseMaximum ~= nil then\n incr = math.min(incr, reservoirIncreaseMaximum - (reservoir or 0))\n end\n if incr > 0 then\n reservoir = (reservoir or 0) + incr\n end\n redis.call('hmset', settings_key,\n 'reservoir', reservoir,\n 'lastReservoirIncrease', lastReservoirIncrease + (num_intervals * reservoirIncreaseInterval)\n )\n end\n\n --\n -- Clear unresponsive clients\n --\n local unresponsive = redis.call('zrangebyscore', client_last_seen_key, '-inf', (now - clientTimeout))\n local unresponsive_lookup = {}\n local terminated_clients = {}\n for i = 1, #unresponsive do\n unresponsive_lookup[unresponsive[i]] = true\n if tonumber(redis.call('zscore', client_running_key, unresponsive[i])) == 0 then\n table.insert(terminated_clients, unresponsive[i])\n end\n end\n if #terminated_clients > 0 then\n redis.call('zrem', client_running_key, unpack(terminated_clients))\n redis.call('hdel', client_num_queued_key, unpack(terminated_clients))\n redis.call('zrem', client_last_registered_key, unpack(terminated_clients))\n redis.call('zrem', client_last_seen_key, unpack(terminated_clients))\n end\n\n --\n -- Broadcast capacity changes\n --\n local final_capacity = compute_capacity(maxConcurrent, running, reservoir)\n\n if always_publish or (initial_capacity ~= nil and final_capacity == nil) then\n -- always_publish or was not unlimited, now unlimited\n redis.call('publish', 'b_'..id, 'capacity:'..(final_capacity or ''))\n\n elseif initial_capacity ~= nil and final_capacity ~= nil and final_capacity > initial_capacity then\n -- capacity was increased\n -- send the capacity message to the limiter having the lowest number of running jobs\n -- the tiebreaker is the limiter having not registered a job in the longest time\n\n local lowest_concurrency_value = nil\n local lowest_concurrency_clients = {}\n local lowest_concurrency_last_registered = {}\n local client_concurrencies = redis.call('zrange', client_running_key, 0, -1, 'withscores')\n\n for i = 1, #client_concurrencies, 2 do\n local client = client_concurrencies[i]\n local concurrency = tonumber(client_concurrencies[i+1])\n\n if (\n lowest_concurrency_value == nil or lowest_concurrency_value == concurrency\n ) and (\n not unresponsive_lookup[client]\n ) and (\n tonumber(redis.call('hget', client_num_queued_key, client)) > 0\n ) then\n lowest_concurrency_value = concurrency\n table.insert(lowest_concurrency_clients, client)\n local last_registered = tonumber(redis.call('zscore', client_last_registered_key, client))\n table.insert(lowest_concurrency_last_registered, last_registered)\n end\n end\n\n if #lowest_concurrency_clients > 0 then\n local position = 1\n local earliest = lowest_concurrency_last_registered[1]\n\n for i,v in ipairs(lowest_concurrency_last_registered) do\n if v < earliest then\n position = i\n earliest = v\n end\n end\n\n local next_client = lowest_concurrency_clients[position]\n redis.call('publish', 'b_'..id,\n 'capacity-priority:'..(final_capacity or '')..\n ':'..next_client..\n ':'..capacityPriorityCounter\n )\n redis.call('hincrby', settings_key, 'capacityPriorityCounter', '1')\n else\n redis.call('publish', 'b_'..id, 'capacity:'..(final_capacity or ''))\n end\n end\n\n return {\n ['capacity'] = final_capacity,\n ['running'] = running,\n ['reservoir'] = reservoir\n }\nend\n", - "queued.lua": "local clientTimeout = tonumber(redis.call('hget', settings_key, 'clientTimeout'))\nlocal valid_clients = redis.call('zrangebyscore', client_last_seen_key, (now - clientTimeout), 'inf')\nlocal client_queued = redis.call('hmget', client_num_queued_key, unpack(valid_clients))\n\nlocal sum = 0\nfor i = 1, #client_queued do\n sum = sum + tonumber(client_queued[i])\nend\n\nreturn sum\n", - "refresh_expiration.lua": "local refresh_expiration = function (now, nextRequest, groupTimeout)\n\n if groupTimeout ~= nil then\n local ttl = (nextRequest + groupTimeout) - now\n\n for i = 1, #KEYS do\n redis.call('pexpire', KEYS[i], ttl)\n end\n end\n\nend\n", - "refs.lua": "local settings_key = KEYS[1]\nlocal job_weights_key = KEYS[2]\nlocal job_expirations_key = KEYS[3]\nlocal job_clients_key = KEYS[4]\nlocal client_running_key = KEYS[5]\nlocal client_num_queued_key = KEYS[6]\nlocal client_last_registered_key = KEYS[7]\nlocal client_last_seen_key = KEYS[8]\n\nlocal now = tonumber(ARGV[1])\nlocal client = ARGV[2]\n\nlocal num_static_argv = 2\n", - "register.lua": "local index = ARGV[num_static_argv + 1]\nlocal weight = tonumber(ARGV[num_static_argv + 2])\nlocal expiration = tonumber(ARGV[num_static_argv + 3])\n\nlocal state = process_tick(now, false)\nlocal capacity = state['capacity']\nlocal reservoir = state['reservoir']\n\nlocal settings = redis.call('hmget', settings_key,\n 'nextRequest',\n 'minTime',\n 'groupTimeout'\n)\nlocal nextRequest = tonumber(settings[1])\nlocal minTime = tonumber(settings[2])\nlocal groupTimeout = tonumber(settings[3])\n\nif conditions_check(capacity, weight) then\n\n redis.call('hincrby', settings_key, 'running', weight)\n redis.call('hset', job_weights_key, index, weight)\n if expiration ~= nil then\n redis.call('zadd', job_expirations_key, now + expiration, index)\n end\n redis.call('hset', job_clients_key, index, client)\n redis.call('zincrby', client_running_key, weight, client)\n redis.call('hincrby', client_num_queued_key, client, -1)\n redis.call('zadd', client_last_registered_key, now, client)\n\n local wait = math.max(nextRequest - now, 0)\n local newNextRequest = now + wait + minTime\n\n if reservoir == nil then\n redis.call('hset', settings_key,\n 'nextRequest', newNextRequest\n )\n else\n reservoir = reservoir - weight\n redis.call('hmset', settings_key,\n 'reservoir', reservoir,\n 'nextRequest', newNextRequest\n )\n end\n\n refresh_expiration(now, newNextRequest, groupTimeout)\n\n return {true, wait, reservoir}\n\nelse\n return {false}\nend\n", - "register_client.lua": "local queued = tonumber(ARGV[num_static_argv + 1])\n\n-- Could have been re-registered concurrently\nif not redis.call('zscore', client_last_seen_key, client) then\n redis.call('zadd', client_running_key, 0, client)\n redis.call('hset', client_num_queued_key, client, queued)\n redis.call('zadd', client_last_registered_key, 0, client)\nend\n\nredis.call('zadd', client_last_seen_key, now, client)\n\nlocal groupTimeout = tonumber(redis.call('hget', settings_key, 'groupTimeout'))\nrefresh_expiration(0, 0, groupTimeout)\n\nreturn {}\n", - "running.lua": "return process_tick(now, false)['running']\n", - "submit.lua": "local queueLength = tonumber(ARGV[num_static_argv + 1])\nlocal weight = tonumber(ARGV[num_static_argv + 2])\n\nlocal capacity = process_tick(now, false)['capacity']\n\nlocal settings = redis.call('hmget', settings_key,\n 'id',\n 'maxConcurrent',\n 'highWater',\n 'nextRequest',\n 'strategy',\n 'unblockTime',\n 'penalty',\n 'minTime',\n 'groupTimeout'\n)\nlocal id = settings[1]\nlocal maxConcurrent = tonumber(settings[2])\nlocal highWater = tonumber(settings[3])\nlocal nextRequest = tonumber(settings[4])\nlocal strategy = tonumber(settings[5])\nlocal unblockTime = tonumber(settings[6])\nlocal penalty = tonumber(settings[7])\nlocal minTime = tonumber(settings[8])\nlocal groupTimeout = tonumber(settings[9])\n\nif maxConcurrent ~= nil and weight > maxConcurrent then\n return redis.error_reply('OVERWEIGHT:'..weight..':'..maxConcurrent)\nend\n\nlocal reachedHWM = (highWater ~= nil and queueLength == highWater\n and not (\n conditions_check(capacity, weight)\n and nextRequest - now <= 0\n )\n)\n\nlocal blocked = strategy == 3 and (reachedHWM or unblockTime >= now)\n\nif blocked then\n local computedPenalty = penalty\n if computedPenalty == nil then\n if minTime == 0 then\n computedPenalty = 5000\n else\n computedPenalty = 15 * minTime\n end\n end\n\n local newNextRequest = now + computedPenalty + minTime\n\n redis.call('hmset', settings_key,\n 'unblockTime', now + computedPenalty,\n 'nextRequest', newNextRequest\n )\n\n local clients_queued_reset = redis.call('hkeys', client_num_queued_key)\n local queued_reset = {}\n for i = 1, #clients_queued_reset do\n table.insert(queued_reset, clients_queued_reset[i])\n table.insert(queued_reset, 0)\n end\n redis.call('hmset', client_num_queued_key, unpack(queued_reset))\n\n redis.call('publish', 'b_'..id, 'blocked:')\n\n refresh_expiration(now, newNextRequest, groupTimeout)\nend\n\nif not blocked and not reachedHWM then\n redis.call('hincrby', client_num_queued_key, client, 1)\nend\n\nreturn {reachedHWM, blocked, strategy}\n", - "update_settings.lua": "local args = {'hmset', settings_key}\n\nfor i = num_static_argv + 1, #ARGV do\n table.insert(args, ARGV[i])\nend\n\nredis.call(unpack(args))\n\nprocess_tick(now, true)\n\nlocal groupTimeout = tonumber(redis.call('hget', settings_key, 'groupTimeout'))\nrefresh_expiration(0, 0, groupTimeout)\n\nreturn {}\n", - "validate_client.lua": "if not redis.call('zscore', client_last_seen_key, client) then\n return redis.error_reply('UNKNOWN_CLIENT')\nend\n\nredis.call('zadd', client_last_seen_key, now, client)\n", - "validate_keys.lua": "if not (redis.call('exists', settings_key) == 1) then\n return redis.error_reply('SETTINGS_KEY_NOT_FOUND')\nend\n" - }; - - var lua$1 = /*#__PURE__*/Object.freeze({ - default: lua - }); - - var require$$0 = getCjsExportFromNamespace(lua$1); - - var Scripts = createCommonjsModule(function (module, exports) { - var headers, lua, templates; - lua = require$$0; - headers = { - refs: lua["refs.lua"], - validate_keys: lua["validate_keys.lua"], - validate_client: lua["validate_client.lua"], - refresh_expiration: lua["refresh_expiration.lua"], - process_tick: lua["process_tick.lua"], - conditions_check: lua["conditions_check.lua"], - get_time: lua["get_time.lua"] - }; - - exports.allKeys = function (id) { - return [ - /* - HASH - */ - "b_".concat(id, "_settings"), - /* - HASH - job index -> weight - */ - "b_".concat(id, "_job_weights"), - /* - ZSET - job index -> expiration - */ - "b_".concat(id, "_job_expirations"), - /* - HASH - job index -> client - */ - "b_".concat(id, "_job_clients"), - /* - ZSET - client -> sum running - */ - "b_".concat(id, "_client_running"), - /* - HASH - client -> num queued - */ - "b_".concat(id, "_client_num_queued"), - /* - ZSET - client -> last job registered - */ - "b_".concat(id, "_client_last_registered"), - /* - ZSET - client -> last seen - */ - "b_".concat(id, "_client_last_seen")]; - }; - - templates = { - init: { - keys: exports.allKeys, - headers: ["process_tick"], - refresh_expiration: true, - code: lua["init.lua"] - }, - group_check: { - keys: exports.allKeys, - headers: [], - refresh_expiration: false, - code: lua["group_check.lua"] - }, - register_client: { - keys: exports.allKeys, - headers: ["validate_keys"], - refresh_expiration: true, - code: lua["register_client.lua"] - }, - blacklist_client: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client"], - refresh_expiration: false, - code: lua["blacklist_client.lua"] - }, - heartbeat: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: false, - code: lua["heartbeat.lua"] - }, - update_settings: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: true, - code: lua["update_settings.lua"] - }, - running: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: false, - code: lua["running.lua"] - }, - queued: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client"], - refresh_expiration: false, - code: lua["queued.lua"] - }, - done: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: false, - code: lua["done.lua"] - }, - check: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], - refresh_expiration: false, - code: lua["check.lua"] - }, - submit: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], - refresh_expiration: true, - code: lua["submit.lua"] - }, - register: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], - refresh_expiration: true, - code: lua["register.lua"] - }, - free: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: true, - code: lua["free.lua"] - }, - current_reservoir: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: false, - code: lua["current_reservoir.lua"] - }, - increment_reservoir: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: true, - code: lua["increment_reservoir.lua"] - } - }; - exports.names = Object.keys(templates); - - exports.keys = function (name, id) { - return templates[name].keys(id); - }; - - exports.payload = function (name) { - var template; - template = templates[name]; - return Array.prototype.concat(headers.refs, template.headers.map(function (h) { - return headers[h]; - }), template.refresh_expiration ? headers.refresh_expiration : "", template.code).join("\n"); - }; - }); - var Scripts_1 = Scripts.allKeys; - var Scripts_2 = Scripts.names; - var Scripts_3 = Scripts.keys; - var Scripts_4 = Scripts.payload; - - var Events$2, RedisConnection, Scripts$1, parser$3; - parser$3 = parser; - Events$2 = Events_1; - Scripts$1 = Scripts; - - RedisConnection = function () { - var RedisConnection = - /*#__PURE__*/ - function () { - function RedisConnection() { - var _this = this; - - var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; - - _classCallCheck(this, RedisConnection); - - parser$3.load(options, this.defaults, this); - - if (this.Redis == null) { - this.Redis = eval("require")("redis"); // Obfuscated or else Webpack/Angular will try to inline the optional redis module. To override this behavior: pass the redis module to Bottleneck as the 'Redis' option. - } - - if (this.Events == null) { - this.Events = new Events$2(this); - } - - this.terminated = false; - - if (this.client == null) { - this.client = this.Redis.createClient(this.clientOptions); - } - - this.subscriber = this.client.duplicate(); - this.limiters = {}; - this.shas = {}; - this.ready = this.Promise.all([this._setup(this.client, false), this._setup(this.subscriber, true)]).then(function () { - return _this._loadScripts(); - }).then(function () { - return { - client: _this.client, - subscriber: _this.subscriber - }; - }); - } - - _createClass(RedisConnection, [{ - key: "_setup", - value: function _setup(client, sub) { - var _this2 = this; - - client.setMaxListeners(0); - return new this.Promise(function (resolve, reject) { - client.on("error", function (e) { - return _this2.Events.trigger("error", e); - }); - - if (sub) { - client.on("message", function (channel, message) { - var ref; - return (ref = _this2.limiters[channel]) != null ? ref._store.onMessage(channel, message) : void 0; - }); - } - - if (client.ready) { - return resolve(); - } else { - return client.once("ready", resolve); - } - }); - } - }, { - key: "_loadScript", - value: function _loadScript(name) { - var _this3 = this; - - return new this.Promise(function (resolve, reject) { - var payload; - payload = Scripts$1.payload(name); - return _this3.client.multi([["script", "load", payload]]).exec(function (err, replies) { - if (err != null) { - return reject(err); - } - - _this3.shas[name] = replies[0]; - return resolve(replies[0]); - }); - }); - } - }, { - key: "_loadScripts", - value: function _loadScripts() { - var _this4 = this; - - return this.Promise.all(Scripts$1.names.map(function (k) { - return _this4._loadScript(k); - })); - } - }, { - key: "__runCommand__", - value: function () { - var _runCommand__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee(cmd) { - var _this5 = this; - - return regeneratorRuntime.wrap(function _callee$(_context) { - while (1) { - switch (_context.prev = _context.next) { - case 0: - _context.next = 2; - return this.ready; - - case 2: - return _context.abrupt("return", new this.Promise(function (resolve, reject) { - return _this5.client.multi([cmd]).exec_atomic(function (err, replies) { - if (err != null) { - return reject(err); - } else { - return resolve(replies[0]); - } - }); - })); - - case 3: - case "end": - return _context.stop(); - } - } - }, _callee, this); - })); - - function __runCommand__(_x) { - return _runCommand__.apply(this, arguments); - } - - return __runCommand__; - }() - }, { - key: "__addLimiter__", - value: function __addLimiter__(instance) { - var _this6 = this; - - return this.Promise.all([instance.channel(), instance.channel_client()].map(function (channel) { - return new _this6.Promise(function (resolve, reject) { - var _handler; - - _handler = function handler(chan) { - if (chan === channel) { - _this6.subscriber.removeListener("subscribe", _handler); - - _this6.limiters[channel] = instance; - return resolve(); - } - }; - - _this6.subscriber.on("subscribe", _handler); - - return _this6.subscriber.subscribe(channel); - }); - })); - } - }, { - key: "__removeLimiter__", - value: function __removeLimiter__(instance) { - var _this7 = this; - - return this.Promise.all([instance.channel(), instance.channel_client()].map( - /*#__PURE__*/ - function () { - var _ref = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee2(channel) { - return regeneratorRuntime.wrap(function _callee2$(_context2) { - while (1) { - switch (_context2.prev = _context2.next) { - case 0: - if (_this7.terminated) { - _context2.next = 3; - break; - } - - _context2.next = 3; - return new _this7.Promise(function (resolve, reject) { - return _this7.subscriber.unsubscribe(channel, function (err, chan) { - if (err != null) { - return reject(err); - } - - if (chan === channel) { - return resolve(); - } - }); - }); - - case 3: - return _context2.abrupt("return", delete _this7.limiters[channel]); - - case 4: - case "end": - return _context2.stop(); - } - } - }, _callee2); - })); - - return function (_x2) { - return _ref.apply(this, arguments); - }; - }())); - } - }, { - key: "__scriptArgs__", - value: function __scriptArgs__(name, id, args, cb) { - var keys; - keys = Scripts$1.keys(name, id); - return [this.shas[name], keys.length].concat(keys, args, cb); - } - }, { - key: "__scriptFn__", - value: function __scriptFn__(name) { - return this.client.evalsha.bind(this.client); - } - }, { - key: "disconnect", - value: function disconnect() { - var flush = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true; - var i, k, len, ref; - ref = Object.keys(this.limiters); - - for (i = 0, len = ref.length; i < len; i++) { - k = ref[i]; - clearInterval(this.limiters[k]._store.heartbeat); - } - - this.limiters = {}; - this.terminated = true; - this.client.end(flush); - this.subscriber.end(flush); - return this.Promise.resolve(); - } - }]); - - return RedisConnection; - }(); - RedisConnection.prototype.datastore = "redis"; - RedisConnection.prototype.defaults = { - Redis: null, - clientOptions: {}, - client: null, - Promise: Promise, - Events: null - }; - return RedisConnection; - }.call(commonjsGlobal); - - var RedisConnection_1 = RedisConnection; - - var Events$3, IORedisConnection, Scripts$2, parser$4; - parser$4 = parser; - Events$3 = Events_1; - Scripts$2 = Scripts; - - IORedisConnection = function () { - var IORedisConnection = - /*#__PURE__*/ - function () { - function IORedisConnection() { - var _this = this; - - var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; - - _classCallCheck(this, IORedisConnection); - - parser$4.load(options, this.defaults, this); - - if (this.Redis == null) { - this.Redis = eval("require")("ioredis"); // Obfuscated or else Webpack/Angular will try to inline the optional ioredis module. To override this behavior: pass the ioredis module to Bottleneck as the 'Redis' option. - } - - if (this.Events == null) { - this.Events = new Events$3(this); - } - - this.terminated = false; - - if (this.clusterNodes != null) { - this.client = new this.Redis.Cluster(this.clusterNodes, this.clientOptions); - this.subscriber = new this.Redis.Cluster(this.clusterNodes, this.clientOptions); - } else if (this.client != null && this.client.duplicate == null) { - this.subscriber = new this.Redis.Cluster(this.client.startupNodes, this.client.options); - } else { - if (this.client == null) { - this.client = new this.Redis(this.clientOptions); - } - - this.subscriber = this.client.duplicate(); - } - - this.limiters = {}; - this.ready = this.Promise.all([this._setup(this.client, false), this._setup(this.subscriber, true)]).then(function () { - _this._loadScripts(); - - return { - client: _this.client, - subscriber: _this.subscriber - }; - }); - } - - _createClass(IORedisConnection, [{ - key: "_setup", - value: function _setup(client, sub) { - var _this2 = this; - - client.setMaxListeners(0); - return new this.Promise(function (resolve, reject) { - client.on("error", function (e) { - return _this2.Events.trigger("error", e); - }); - - if (sub) { - client.on("message", function (channel, message) { - var ref; - return (ref = _this2.limiters[channel]) != null ? ref._store.onMessage(channel, message) : void 0; - }); - } - - if (client.status === "ready") { - return resolve(); - } else { - return client.once("ready", resolve); - } - }); - } - }, { - key: "_loadScripts", - value: function _loadScripts() { - var _this3 = this; - - return Scripts$2.names.forEach(function (name) { - return _this3.client.defineCommand(name, { - lua: Scripts$2.payload(name) - }); - }); - } - }, { - key: "__runCommand__", - value: function () { - var _runCommand__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee(cmd) { - var _, deleted, _ref, _ref2, _ref2$; - - return regeneratorRuntime.wrap(function _callee$(_context) { - while (1) { - switch (_context.prev = _context.next) { - case 0: - _context.next = 2; - return this.ready; - - case 2: - _context.next = 4; - return this.client.pipeline([cmd]).exec(); - - case 4: - _ref = _context.sent; - _ref2 = _slicedToArray(_ref, 1); - _ref2$ = _slicedToArray(_ref2[0], 2); - _ = _ref2$[0]; - deleted = _ref2$[1]; - return _context.abrupt("return", deleted); - - case 10: - case "end": - return _context.stop(); - } - } - }, _callee, this); - })); - - function __runCommand__(_x) { - return _runCommand__.apply(this, arguments); - } - - return __runCommand__; - }() - }, { - key: "__addLimiter__", - value: function __addLimiter__(instance) { - var _this4 = this; - - return this.Promise.all([instance.channel(), instance.channel_client()].map(function (channel) { - return new _this4.Promise(function (resolve, reject) { - return _this4.subscriber.subscribe(channel, function () { - _this4.limiters[channel] = instance; - return resolve(); - }); - }); - })); - } - }, { - key: "__removeLimiter__", - value: function __removeLimiter__(instance) { - var _this5 = this; - - return [instance.channel(), instance.channel_client()].forEach( - /*#__PURE__*/ - function () { - var _ref3 = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee2(channel) { - return regeneratorRuntime.wrap(function _callee2$(_context2) { - while (1) { - switch (_context2.prev = _context2.next) { - case 0: - if (_this5.terminated) { - _context2.next = 3; - break; - } - - _context2.next = 3; - return _this5.subscriber.unsubscribe(channel); - - case 3: - return _context2.abrupt("return", delete _this5.limiters[channel]); - - case 4: - case "end": - return _context2.stop(); - } - } - }, _callee2); - })); - - return function (_x2) { - return _ref3.apply(this, arguments); - }; - }()); - } - }, { - key: "__scriptArgs__", - value: function __scriptArgs__(name, id, args, cb) { - var keys; - keys = Scripts$2.keys(name, id); - return [keys.length].concat(keys, args, cb); - } - }, { - key: "__scriptFn__", - value: function __scriptFn__(name) { - return this.client[name].bind(this.client); - } - }, { - key: "disconnect", - value: function disconnect() { - var flush = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true; - var i, k, len, ref; - ref = Object.keys(this.limiters); - - for (i = 0, len = ref.length; i < len; i++) { - k = ref[i]; - clearInterval(this.limiters[k]._store.heartbeat); - } - - this.limiters = {}; - this.terminated = true; - - if (flush) { - return this.Promise.all([this.client.quit(), this.subscriber.quit()]); - } else { - this.client.disconnect(); - this.subscriber.disconnect(); - return this.Promise.resolve(); - } - } - }]); - - return IORedisConnection; - }(); - IORedisConnection.prototype.datastore = "ioredis"; - IORedisConnection.prototype.defaults = { - Redis: null, - clientOptions: {}, - clusterNodes: null, - client: null, - Promise: Promise, - Events: null - }; - return IORedisConnection; - }.call(commonjsGlobal); - - var IORedisConnection_1 = IORedisConnection; - - var BottleneckError$3, IORedisConnection$1, RedisConnection$1, RedisDatastore, parser$5; - parser$5 = parser; - BottleneckError$3 = BottleneckError_1; - RedisConnection$1 = RedisConnection_1; - IORedisConnection$1 = IORedisConnection_1; - - RedisDatastore = - /*#__PURE__*/ - function () { - function RedisDatastore(instance, storeOptions, storeInstanceOptions) { - var _this = this; - - _classCallCheck(this, RedisDatastore); - - this.instance = instance; - this.storeOptions = storeOptions; - this.originalId = this.instance.id; - this.clientId = this.instance._randomIndex(); - parser$5.load(storeInstanceOptions, storeInstanceOptions, this); - this.clients = {}; - this.capacityPriorityCounters = {}; - this.sharedConnection = this.connection != null; - - if (this.connection == null) { - this.connection = this.instance.datastore === "redis" ? new RedisConnection$1({ - Redis: this.Redis, - clientOptions: this.clientOptions, - Promise: this.Promise, - Events: this.instance.Events - }) : this.instance.datastore === "ioredis" ? new IORedisConnection$1({ - Redis: this.Redis, - clientOptions: this.clientOptions, - clusterNodes: this.clusterNodes, - Promise: this.Promise, - Events: this.instance.Events - }) : void 0; - } - - this.instance.connection = this.connection; - this.instance.datastore = this.connection.datastore; - this.ready = this.connection.ready.then(function (clients) { - _this.clients = clients; - return _this.runScript("init", _this.prepareInitSettings(_this.clearDatastore)); - }).then(function () { - return _this.connection.__addLimiter__(_this.instance); - }).then(function () { - return _this.runScript("register_client", [_this.instance.queued()]); - }).then(function () { - var base; - - if (typeof (base = _this.heartbeat = setInterval(function () { - return _this.runScript("heartbeat", [])["catch"](function (e) { - return _this.instance.Events.trigger("error", e); - }); - }, _this.heartbeatInterval)).unref === "function") { - base.unref(); - } - - return _this.clients; - }); - } - - _createClass(RedisDatastore, [{ - key: "__publish__", - value: function () { - var _publish__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee(message) { - var client, _ref; - - return regeneratorRuntime.wrap(function _callee$(_context) { - while (1) { - switch (_context.prev = _context.next) { - case 0: - _context.next = 2; - return this.ready; - - case 2: - _ref = _context.sent; - client = _ref.client; - return _context.abrupt("return", client.publish(this.instance.channel(), "message:".concat(message.toString()))); - - case 5: - case "end": - return _context.stop(); - } - } - }, _callee, this); - })); - - function __publish__(_x) { - return _publish__.apply(this, arguments); - } - - return __publish__; - }() - }, { - key: "onMessage", - value: function () { - var _onMessage = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee3(channel, message) { - var _this2 = this; - - var capacity, counter, data, drained, e, newCapacity, pos, priorityClient, rawCapacity, type, _ref2, _data$split, _data$split2; - - return regeneratorRuntime.wrap(function _callee3$(_context3) { - while (1) { - switch (_context3.prev = _context3.next) { - case 0: - _context3.prev = 0; - pos = message.indexOf(":"); - _ref2 = [message.slice(0, pos), message.slice(pos + 1)]; - type = _ref2[0]; - data = _ref2[1]; - - if (!(type === "capacity")) { - _context3.next = 11; - break; - } - - _context3.next = 8; - return this.instance._drainAll(data.length > 0 ? ~~data : void 0); - - case 8: - return _context3.abrupt("return", _context3.sent); - - case 11: - if (!(type === "capacity-priority")) { - _context3.next = 37; - break; - } - - _data$split = data.split(":"); - _data$split2 = _slicedToArray(_data$split, 3); - rawCapacity = _data$split2[0]; - priorityClient = _data$split2[1]; - counter = _data$split2[2]; - capacity = rawCapacity.length > 0 ? ~~rawCapacity : void 0; - - if (!(priorityClient === this.clientId)) { - _context3.next = 28; - break; - } - - _context3.next = 21; - return this.instance._drainAll(capacity); - - case 21: - drained = _context3.sent; - newCapacity = capacity != null ? capacity - (drained || 0) : ""; - _context3.next = 25; - return this.clients.client.publish(this.instance.channel(), "capacity-priority:".concat(newCapacity, "::").concat(counter)); - - case 25: - return _context3.abrupt("return", _context3.sent); - - case 28: - if (!(priorityClient === "")) { - _context3.next = 34; - break; - } - - clearTimeout(this.capacityPriorityCounters[counter]); - delete this.capacityPriorityCounters[counter]; - return _context3.abrupt("return", this.instance._drainAll(capacity)); - - case 34: - return _context3.abrupt("return", this.capacityPriorityCounters[counter] = setTimeout( - /*#__PURE__*/ - _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee2() { - var e; - return regeneratorRuntime.wrap(function _callee2$(_context2) { - while (1) { - switch (_context2.prev = _context2.next) { - case 0: - _context2.prev = 0; - delete _this2.capacityPriorityCounters[counter]; - _context2.next = 4; - return _this2.runScript("blacklist_client", [priorityClient]); - - case 4: - _context2.next = 6; - return _this2.instance._drainAll(capacity); - - case 6: - return _context2.abrupt("return", _context2.sent); - - case 9: - _context2.prev = 9; - _context2.t0 = _context2["catch"](0); - e = _context2.t0; - return _context2.abrupt("return", _this2.instance.Events.trigger("error", e)); - - case 13: - case "end": - return _context2.stop(); - } - } - }, _callee2, null, [[0, 9]]); - })), 1000)); - - case 35: - _context3.next = 45; - break; - - case 37: - if (!(type === "message")) { - _context3.next = 41; - break; - } - - return _context3.abrupt("return", this.instance.Events.trigger("message", data)); - - case 41: - if (!(type === "blocked")) { - _context3.next = 45; - break; - } - - _context3.next = 44; - return this.instance._dropAllQueued(); - - case 44: - return _context3.abrupt("return", _context3.sent); - - case 45: - _context3.next = 51; - break; - - case 47: - _context3.prev = 47; - _context3.t0 = _context3["catch"](0); - e = _context3.t0; - return _context3.abrupt("return", this.instance.Events.trigger("error", e)); - - case 51: - case "end": - return _context3.stop(); - } - } - }, _callee3, this, [[0, 47]]); - })); - - function onMessage(_x2, _x3) { - return _onMessage.apply(this, arguments); - } - - return onMessage; - }() - }, { - key: "__disconnect__", - value: function __disconnect__(flush) { - clearInterval(this.heartbeat); - - if (this.sharedConnection) { - return this.connection.__removeLimiter__(this.instance); - } else { - return this.connection.disconnect(flush); - } - } - }, { - key: "runScript", - value: function () { - var _runScript = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee4(name, args) { - var _this3 = this; - - return regeneratorRuntime.wrap(function _callee4$(_context4) { - while (1) { - switch (_context4.prev = _context4.next) { - case 0: - if (name === "init" || name === "register_client") { - _context4.next = 3; - break; - } - - _context4.next = 3; - return this.ready; - - case 3: - return _context4.abrupt("return", new this.Promise(function (resolve, reject) { - var all_args, arr; - all_args = [Date.now(), _this3.clientId].concat(args); - - _this3.instance.Events.trigger("debug", "Calling Redis script: ".concat(name, ".lua"), all_args); - - arr = _this3.connection.__scriptArgs__(name, _this3.originalId, all_args, function (err, replies) { - if (err != null) { - return reject(err); - } - - return resolve(replies); - }); - return _this3.connection.__scriptFn__(name).apply(void 0, _toConsumableArray(arr)); - })["catch"](function (e) { - if (typeof e.message === "string" && e.message.match(/^(.*\s)?SETTINGS_KEY_NOT_FOUND$/) !== null) { - if (name === "heartbeat") { - return _this3.Promise.resolve(); - } else { - return _this3.runScript("init", _this3.prepareInitSettings(false)).then(function () { - return _this3.runScript(name, args); - }); - } - } else if (typeof e.message === "string" && e.message.match(/^(.*\s)?UNKNOWN_CLIENT$/) !== null) { - return _this3.runScript("register_client", [_this3.instance.queued()]).then(function () { - return _this3.runScript(name, args); - }); - } else { - return _this3.Promise.reject(e); - } - })); - - case 4: - case "end": - return _context4.stop(); - } - } - }, _callee4, this); - })); - - function runScript(_x4, _x5) { - return _runScript.apply(this, arguments); - } - - return runScript; - }() - }, { - key: "prepareArray", - value: function prepareArray(arr) { - var i, len, results, x; - results = []; - - for (i = 0, len = arr.length; i < len; i++) { - x = arr[i]; - results.push(x != null ? x.toString() : ""); - } - - return results; - } - }, { - key: "prepareObject", - value: function prepareObject(obj) { - var arr, k, v; - arr = []; - - for (k in obj) { - v = obj[k]; - arr.push(k, v != null ? v.toString() : ""); - } - - return arr; - } - }, { - key: "prepareInitSettings", - value: function prepareInitSettings(clear) { - var args; - args = this.prepareObject(Object.assign({}, this.storeOptions, { - id: this.originalId, - version: this.instance.version, - groupTimeout: this.timeout, - clientTimeout: this.clientTimeout - })); - args.unshift(clear ? 1 : 0, this.instance.version); - return args; - } - }, { - key: "convertBool", - value: function convertBool(b) { - return !!b; - } - }, { - key: "__updateSettings__", - value: function () { - var _updateSettings__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee5(options) { - return regeneratorRuntime.wrap(function _callee5$(_context5) { - while (1) { - switch (_context5.prev = _context5.next) { - case 0: - _context5.next = 2; - return this.runScript("update_settings", this.prepareObject(options)); - - case 2: - return _context5.abrupt("return", parser$5.overwrite(options, options, this.storeOptions)); - - case 3: - case "end": - return _context5.stop(); - } - } - }, _callee5, this); - })); - - function __updateSettings__(_x6) { - return _updateSettings__.apply(this, arguments); - } - - return __updateSettings__; - }() - }, { - key: "__running__", - value: function __running__() { - return this.runScript("running", []); - } - }, { - key: "__queued__", - value: function __queued__() { - return this.runScript("queued", []); - } - }, { - key: "__done__", - value: function __done__() { - return this.runScript("done", []); - } - }, { - key: "__groupCheck__", - value: function () { - var _groupCheck__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee6() { - return regeneratorRuntime.wrap(function _callee6$(_context6) { - while (1) { - switch (_context6.prev = _context6.next) { - case 0: - _context6.t0 = this; - _context6.next = 3; - return this.runScript("group_check", []); - - case 3: - _context6.t1 = _context6.sent; - return _context6.abrupt("return", _context6.t0.convertBool.call(_context6.t0, _context6.t1)); - - case 5: - case "end": - return _context6.stop(); - } - } - }, _callee6, this); - })); - - function __groupCheck__() { - return _groupCheck__.apply(this, arguments); - } - - return __groupCheck__; - }() - }, { - key: "__incrementReservoir__", - value: function __incrementReservoir__(incr) { - return this.runScript("increment_reservoir", [incr]); - } - }, { - key: "__currentReservoir__", - value: function __currentReservoir__() { - return this.runScript("current_reservoir", []); - } - }, { - key: "__check__", - value: function () { - var _check__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee7(weight) { - return regeneratorRuntime.wrap(function _callee7$(_context7) { - while (1) { - switch (_context7.prev = _context7.next) { - case 0: - _context7.t0 = this; - _context7.next = 3; - return this.runScript("check", this.prepareArray([weight])); - - case 3: - _context7.t1 = _context7.sent; - return _context7.abrupt("return", _context7.t0.convertBool.call(_context7.t0, _context7.t1)); - - case 5: - case "end": - return _context7.stop(); - } - } - }, _callee7, this); - })); - - function __check__(_x7) { - return _check__.apply(this, arguments); - } - - return __check__; - }() - }, { - key: "__register__", - value: function () { - var _register__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee8(index, weight, expiration) { - var reservoir, success, wait, _ref4, _ref5; - - return regeneratorRuntime.wrap(function _callee8$(_context8) { - while (1) { - switch (_context8.prev = _context8.next) { - case 0: - _context8.next = 2; - return this.runScript("register", this.prepareArray([index, weight, expiration])); - - case 2: - _ref4 = _context8.sent; - _ref5 = _slicedToArray(_ref4, 3); - success = _ref5[0]; - wait = _ref5[1]; - reservoir = _ref5[2]; - return _context8.abrupt("return", { - success: this.convertBool(success), - wait: wait, - reservoir: reservoir - }); - - case 8: - case "end": - return _context8.stop(); - } - } - }, _callee8, this); - })); - - function __register__(_x8, _x9, _x10) { - return _register__.apply(this, arguments); - } - - return __register__; - }() - }, { - key: "__submit__", - value: function () { - var _submit__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee9(queueLength, weight) { - var blocked, e, maxConcurrent, overweight, reachedHWM, strategy, _ref6, _ref7, _e$message$split, _e$message$split2; - - return regeneratorRuntime.wrap(function _callee9$(_context9) { - while (1) { - switch (_context9.prev = _context9.next) { - case 0: - _context9.prev = 0; - _context9.next = 3; - return this.runScript("submit", this.prepareArray([queueLength, weight])); - - case 3: - _ref6 = _context9.sent; - _ref7 = _slicedToArray(_ref6, 3); - reachedHWM = _ref7[0]; - blocked = _ref7[1]; - strategy = _ref7[2]; - return _context9.abrupt("return", { - reachedHWM: this.convertBool(reachedHWM), - blocked: this.convertBool(blocked), - strategy: strategy - }); - - case 11: - _context9.prev = 11; - _context9.t0 = _context9["catch"](0); - e = _context9.t0; - - if (!(e.message.indexOf("OVERWEIGHT") === 0)) { - _context9.next = 23; - break; - } - - _e$message$split = e.message.split(":"); - _e$message$split2 = _slicedToArray(_e$message$split, 3); - overweight = _e$message$split2[0]; - weight = _e$message$split2[1]; - maxConcurrent = _e$message$split2[2]; - throw new BottleneckError$3("Impossible to add a job having a weight of ".concat(weight, " to a limiter having a maxConcurrent setting of ").concat(maxConcurrent)); - - case 23: - throw e; - - case 24: - case "end": - return _context9.stop(); - } - } - }, _callee9, this, [[0, 11]]); - })); - - function __submit__(_x11, _x12) { - return _submit__.apply(this, arguments); - } - - return __submit__; - }() - }, { - key: "__free__", - value: function () { - var _free__ = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee10(index, weight) { - var running; - return regeneratorRuntime.wrap(function _callee10$(_context10) { - while (1) { - switch (_context10.prev = _context10.next) { - case 0: - _context10.next = 2; - return this.runScript("free", this.prepareArray([index])); - - case 2: - running = _context10.sent; - return _context10.abrupt("return", { - running: running - }); - - case 4: - case "end": - return _context10.stop(); - } - } - }, _callee10, this); - })); - - function __free__(_x13, _x14) { - return _free__.apply(this, arguments); - } - - return __free__; - }() - }]); - - return RedisDatastore; - }(); - - var RedisDatastore_1 = RedisDatastore; - - var BottleneckError$4, States; - BottleneckError$4 = BottleneckError_1; - - States = - /*#__PURE__*/ - function () { - function States(status1) { - _classCallCheck(this, States); - - this.status = status1; - this._jobs = {}; - this.counts = this.status.map(function () { - return 0; - }); - } - - _createClass(States, [{ - key: "next", - value: function next(id) { - var current, next; - current = this._jobs[id]; - next = current + 1; - - if (current != null && next < this.status.length) { - this.counts[current]--; - this.counts[next]++; - return this._jobs[id]++; - } else if (current != null) { - this.counts[current]--; - return delete this._jobs[id]; - } - } - }, { - key: "start", - value: function start(id) { - var initial; - initial = 0; - this._jobs[id] = initial; - return this.counts[initial]++; - } - }, { - key: "remove", - value: function remove(id) { - var current; - current = this._jobs[id]; - - if (current != null) { - this.counts[current]--; - delete this._jobs[id]; - } - - return current != null; - } - }, { - key: "jobStatus", - value: function jobStatus(id) { - var ref; - return (ref = this.status[this._jobs[id]]) != null ? ref : null; - } - }, { - key: "statusJobs", - value: function statusJobs(status) { - var k, pos, ref, results, v; - - if (status != null) { - pos = this.status.indexOf(status); - - if (pos < 0) { - throw new BottleneckError$4("status must be one of ".concat(this.status.join(', '))); - } - - ref = this._jobs; - results = []; - - for (k in ref) { - v = ref[k]; - - if (v === pos) { - results.push(k); - } - } - - return results; - } else { - return Object.keys(this._jobs); - } - } - }, { - key: "statusCounts", - value: function statusCounts() { - var _this = this; - - return this.counts.reduce(function (acc, v, i) { - acc[_this.status[i]] = v; - return acc; - }, {}); - } - }]); - - return States; - }(); - - var States_1 = States; - - var DLList$2, Sync; - DLList$2 = DLList_1; - - Sync = - /*#__PURE__*/ - function () { - function Sync(name, Promise) { - _classCallCheck(this, Sync); - - this.schedule = this.schedule.bind(this); - this.name = name; - this.Promise = Promise; - this._running = 0; - this._queue = new DLList$2(); - } - - _createClass(Sync, [{ - key: "isEmpty", - value: function isEmpty() { - return this._queue.length === 0; - } - }, { - key: "_tryToRun", - value: function () { - var _tryToRun2 = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee2() { - var args, cb, error, reject, resolve, returned, task, _this$_queue$shift; - - return regeneratorRuntime.wrap(function _callee2$(_context2) { - while (1) { - switch (_context2.prev = _context2.next) { - case 0: - if (!(this._running < 1 && this._queue.length > 0)) { - _context2.next = 13; - break; - } - - this._running++; - _this$_queue$shift = this._queue.shift(); - task = _this$_queue$shift.task; - args = _this$_queue$shift.args; - resolve = _this$_queue$shift.resolve; - reject = _this$_queue$shift.reject; - _context2.next = 9; - return _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee() { - return regeneratorRuntime.wrap(function _callee$(_context) { - while (1) { - switch (_context.prev = _context.next) { - case 0: - _context.prev = 0; - _context.next = 3; - return task.apply(void 0, _toConsumableArray(args)); - - case 3: - returned = _context.sent; - return _context.abrupt("return", function () { - return resolve(returned); - }); - - case 7: - _context.prev = 7; - _context.t0 = _context["catch"](0); - error = _context.t0; - return _context.abrupt("return", function () { - return reject(error); - }); - - case 11: - case "end": - return _context.stop(); - } - } - }, _callee, null, [[0, 7]]); - }))(); - - case 9: - cb = _context2.sent; - this._running--; - - this._tryToRun(); - - return _context2.abrupt("return", cb()); - - case 13: - case "end": - return _context2.stop(); - } - } - }, _callee2, this); - })); - - function _tryToRun() { - return _tryToRun2.apply(this, arguments); - } - - return _tryToRun; - }() - }, { - key: "schedule", - value: function schedule(task) { - var promise, reject, resolve; - resolve = reject = null; - promise = new this.Promise(function (_resolve, _reject) { - resolve = _resolve; - return reject = _reject; - }); - - for (var _len = arguments.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { - args[_key - 1] = arguments[_key]; - } - - this._queue.push({ - task: task, - args: args, - resolve: resolve, - reject: reject - }); - - this._tryToRun(); - - return promise; - } - }]); - - return Sync; - }(); - - var Sync_1 = Sync; - - var version = "3.0.7"; - var version$1 = { - version: version - }; - - var version$2 = /*#__PURE__*/Object.freeze({ - version: version, - default: version$1 - }); - - var Events$4, Group, IORedisConnection$2, RedisConnection$2, Scripts$3, parser$6; - parser$6 = parser; - Events$4 = Events_1; - RedisConnection$2 = RedisConnection_1; - IORedisConnection$2 = IORedisConnection_1; - Scripts$3 = Scripts; - - Group = function () { - var Group = - /*#__PURE__*/ - function () { - function Group() { - var limiterOptions = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; - - _classCallCheck(this, Group); - - this.deleteKey = this.deleteKey.bind(this); - this.limiterOptions = limiterOptions; - parser$6.load(this.limiterOptions, this.defaults, this); - this.Events = new Events$4(this); - this.instances = {}; - this.Bottleneck = Bottleneck_1; - - this._startAutoCleanup(); - - this.sharedConnection = this.connection != null; - - if (this.connection == null) { - if (this.limiterOptions.datastore === "redis") { - this.connection = new RedisConnection$2(Object.assign({}, this.limiterOptions, { - Events: this.Events - })); - } else if (this.limiterOptions.datastore === "ioredis") { - this.connection = new IORedisConnection$2(Object.assign({}, this.limiterOptions, { - Events: this.Events - })); - } - } - } - - _createClass(Group, [{ - key: "key", - value: function key() { - var _this = this; - - var _key = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : ""; - - var ref; - return (ref = this.instances[_key]) != null ? ref : function () { - var limiter; - limiter = _this.instances[_key] = new _this.Bottleneck(Object.assign(_this.limiterOptions, { - id: "".concat(_this.id, "-").concat(_key), - timeout: _this.timeout, - connection: _this.connection - })); - - _this.Events.trigger("created", limiter, _key); - - return limiter; - }(); - } - }, { - key: "deleteKey", - value: function () { - var _deleteKey = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee() { - var key, - deleted, - instance, - _args = arguments; - return regeneratorRuntime.wrap(function _callee$(_context) { - while (1) { - switch (_context.prev = _context.next) { - case 0: - key = _args.length > 0 && _args[0] !== undefined ? _args[0] : ""; - instance = this.instances[key]; - - if (!this.connection) { - _context.next = 6; - break; - } - - _context.next = 5; - return this.connection.__runCommand__(['del'].concat(_toConsumableArray(Scripts$3.allKeys("".concat(this.id, "-").concat(key))))); - - case 5: - deleted = _context.sent; - - case 6: - if (!(instance != null)) { - _context.next = 10; - break; - } - - delete this.instances[key]; - _context.next = 10; - return instance.disconnect(); - - case 10: - return _context.abrupt("return", instance != null || deleted > 0); - - case 11: - case "end": - return _context.stop(); - } - } - }, _callee, this); - })); - - function deleteKey() { - return _deleteKey.apply(this, arguments); - } - - return deleteKey; - }() - }, { - key: "limiters", - value: function limiters() { - var k, ref, results, v; - ref = this.instances; - results = []; - - for (k in ref) { - v = ref[k]; - results.push({ - key: k, - limiter: v - }); - } - - return results; - } - }, { - key: "keys", - value: function keys() { - return Object.keys(this.instances); - } - }, { - key: "clusterKeys", - value: function () { - var _clusterKeys = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee2() { - var cursor, end, found, i, k, keys, len, next, start, _ref, _ref2; - - return regeneratorRuntime.wrap(function _callee2$(_context2) { - while (1) { - switch (_context2.prev = _context2.next) { - case 0: - if (!(this.connection == null)) { - _context2.next = 2; - break; - } - - return _context2.abrupt("return", this.Promise.resolve(this.keys())); - - case 2: - keys = []; - cursor = null; - start = "b_".concat(this.id, "-").length; - end = "_settings".length; - - case 6: - if (!(cursor !== 0)) { - _context2.next = 17; - break; - } - - _context2.next = 9; - return this.connection.__runCommand__(["scan", cursor != null ? cursor : 0, "match", "b_".concat(this.id, "-*_settings"), "count", 10000]); - - case 9: - _ref = _context2.sent; - _ref2 = _slicedToArray(_ref, 2); - next = _ref2[0]; - found = _ref2[1]; - cursor = ~~next; - - for (i = 0, len = found.length; i < len; i++) { - k = found[i]; - keys.push(k.slice(start, -end)); - } - - _context2.next = 6; - break; - - case 17: - return _context2.abrupt("return", keys); - - case 18: - case "end": - return _context2.stop(); - } - } - }, _callee2, this); - })); - - function clusterKeys() { - return _clusterKeys.apply(this, arguments); - } - - return clusterKeys; - }() - }, { - key: "_startAutoCleanup", - value: function _startAutoCleanup() { - var _this2 = this; - - var base; - clearInterval(this.interval); - return typeof (base = this.interval = setInterval( - /*#__PURE__*/ - _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee3() { - var e, k, ref, results, time, v; - return regeneratorRuntime.wrap(function _callee3$(_context3) { - while (1) { - switch (_context3.prev = _context3.next) { - case 0: - time = Date.now(); - ref = _this2.instances; - results = []; - _context3.t0 = regeneratorRuntime.keys(ref); - - case 4: - if ((_context3.t1 = _context3.t0()).done) { - _context3.next = 23; - break; - } - - k = _context3.t1.value; - v = ref[k]; - _context3.prev = 7; - _context3.next = 10; - return v._store.__groupCheck__(time); - - case 10: - if (!_context3.sent) { - _context3.next = 14; - break; - } - - results.push(_this2.deleteKey(k)); - _context3.next = 15; - break; - - case 14: - results.push(void 0); - - case 15: - _context3.next = 21; - break; - - case 17: - _context3.prev = 17; - _context3.t2 = _context3["catch"](7); - e = _context3.t2; - results.push(v.Events.trigger("error", e)); - - case 21: - _context3.next = 4; - break; - - case 23: - return _context3.abrupt("return", results); - - case 24: - case "end": - return _context3.stop(); - } - } - }, _callee3, null, [[7, 17]]); - })), this.timeout / 2)).unref === "function" ? base.unref() : void 0; - } - }, { - key: "updateSettings", - value: function updateSettings() { - var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; - parser$6.overwrite(options, this.defaults, this); - parser$6.overwrite(options, options, this.limiterOptions); - - if (options.timeout != null) { - return this._startAutoCleanup(); - } - } - }, { - key: "disconnect", - value: function disconnect() { - var flush = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true; - var ref; - - if (!this.sharedConnection) { - return (ref = this.connection) != null ? ref.disconnect(flush) : void 0; - } - } - }]); - - return Group; - }(); - Group.prototype.defaults = { - timeout: 1000 * 60 * 5, - connection: null, - Promise: Promise, - id: "group-key" - }; - return Group; - }.call(commonjsGlobal); - - var Group_1 = Group; - - var Batcher, Events$5, parser$7; - parser$7 = parser; - Events$5 = Events_1; - - Batcher = function () { - var Batcher = - /*#__PURE__*/ - function () { - function Batcher() { - var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; - - _classCallCheck(this, Batcher); - - this.options = options; - parser$7.load(this.options, this.defaults, this); - this.Events = new Events$5(this); - this._arr = []; - - this._resetPromise(); - - this._lastFlush = Date.now(); - } - - _createClass(Batcher, [{ - key: "_resetPromise", - value: function _resetPromise() { - var _this = this; - - return this._promise = new this.Promise(function (res, rej) { - return _this._resolve = res; - }); - } - }, { - key: "_flush", - value: function _flush() { - clearTimeout(this._timeout); - this._lastFlush = Date.now(); - - this._resolve(); - - this.Events.trigger("batch", this._arr); - this._arr = []; - return this._resetPromise(); - } - }, { - key: "add", - value: function add(data) { - var _this2 = this; - - var ret; - - this._arr.push(data); - - ret = this._promise; - - if (this._arr.length === this.maxSize) { - this._flush(); - } else if (this.maxTime != null && this._arr.length === 1) { - this._timeout = setTimeout(function () { - return _this2._flush(); - }, this.maxTime); - } - - return ret; - } - }]); - - return Batcher; - }(); - Batcher.prototype.defaults = { - maxTime: null, - maxSize: null, - Promise: Promise - }; - return Batcher; - }.call(commonjsGlobal); - - var Batcher_1 = Batcher; - - var require$$8 = getCjsExportFromNamespace(version$2); - - var Bottleneck, - DEFAULT_PRIORITY$1, - Events$6, - Job$1, - LocalDatastore$1, - NUM_PRIORITIES$1, - Queues$1, - RedisDatastore$1, - States$1, - Sync$1, - parser$8, - splice = [].splice; - NUM_PRIORITIES$1 = 10; - DEFAULT_PRIORITY$1 = 5; - parser$8 = parser; - Queues$1 = Queues_1; - Job$1 = Job_1; - LocalDatastore$1 = LocalDatastore_1; - RedisDatastore$1 = RedisDatastore_1; - Events$6 = Events_1; - States$1 = States_1; - Sync$1 = Sync_1; - - Bottleneck = function () { - var Bottleneck = - /*#__PURE__*/ - function () { - function Bottleneck() { - var _this = this; - - var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; - - _classCallCheck(this, Bottleneck); - - var storeInstanceOptions, storeOptions; - this._addToQueue = this._addToQueue.bind(this); - - for (var _len = arguments.length, invalid = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { - invalid[_key - 1] = arguments[_key]; - } - - this._validateOptions(options, invalid); - - parser$8.load(options, this.instanceDefaults, this); - this._queues = new Queues$1(NUM_PRIORITIES$1); - this._scheduled = {}; - this._states = new States$1(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : [])); - this._limiter = null; - this.Events = new Events$6(this); - this._submitLock = new Sync$1("submit", this.Promise); - this._registerLock = new Sync$1("register", this.Promise); - storeOptions = parser$8.load(options, this.storeDefaults, {}); - - this._store = function () { - if (this.datastore === "redis" || this.datastore === "ioredis" || this.connection != null) { - storeInstanceOptions = parser$8.load(options, this.redisStoreDefaults, {}); - return new RedisDatastore$1(this, storeOptions, storeInstanceOptions); - } else if (this.datastore === "local") { - storeInstanceOptions = parser$8.load(options, this.localStoreDefaults, {}); - return new LocalDatastore$1(this, storeOptions, storeInstanceOptions); - } else { - throw new Bottleneck.prototype.BottleneckError("Invalid datastore type: ".concat(this.datastore)); - } - }.call(this); - - this._queues.on("leftzero", function () { - var ref; - return (ref = _this._store.heartbeat) != null ? typeof ref.ref === "function" ? ref.ref() : void 0 : void 0; - }); - - this._queues.on("zero", function () { - var ref; - return (ref = _this._store.heartbeat) != null ? typeof ref.unref === "function" ? ref.unref() : void 0 : void 0; - }); - } - - _createClass(Bottleneck, [{ - key: "_validateOptions", - value: function _validateOptions(options, invalid) { - if (!(options != null && _typeof(options) === "object" && invalid.length === 0)) { - throw new Bottleneck.prototype.BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1."); - } - } - }, { - key: "ready", - value: function ready() { - return this._store.ready; - } - }, { - key: "clients", - value: function clients() { - return this._store.clients; - } - }, { - key: "channel", - value: function channel() { - return "b_".concat(this.id); - } - }, { - key: "channel_client", - value: function channel_client() { - return "b_".concat(this.id, "_").concat(this._store.clientId); - } - }, { - key: "publish", - value: function publish(message) { - return this._store.__publish__(message); - } - }, { - key: "disconnect", - value: function disconnect() { - var flush = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true; - return this._store.__disconnect__(flush); - } - }, { - key: "chain", - value: function chain(_limiter) { - this._limiter = _limiter; - return this; - } - }, { - key: "queued", - value: function queued(priority) { - return this._queues.queued(priority); - } - }, { - key: "clusterQueued", - value: function clusterQueued() { - return this._store.__queued__(); - } - }, { - key: "empty", - value: function empty() { - return this.queued() === 0 && this._submitLock.isEmpty(); - } - }, { - key: "running", - value: function running() { - return this._store.__running__(); - } - }, { - key: "done", - value: function done() { - return this._store.__done__(); - } - }, { - key: "jobStatus", - value: function jobStatus(id) { - return this._states.jobStatus(id); - } - }, { - key: "jobs", - value: function jobs(status) { - return this._states.statusJobs(status); - } - }, { - key: "counts", - value: function counts() { - return this._states.statusCounts(); - } - }, { - key: "_randomIndex", - value: function _randomIndex() { - return Math.random().toString(36).slice(2); - } - }, { - key: "check", - value: function check() { - var weight = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 1; - return this._store.__check__(weight); - } - }, { - key: "_clearGlobalState", - value: function _clearGlobalState(index) { - if (this._scheduled[index] != null) { - clearTimeout(this._scheduled[index].expiration); - delete this._scheduled[index]; - return true; - } else { - return false; - } - } - }, { - key: "_free", - value: function () { - var _free2 = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee(index, job, options, eventInfo) { - var e, running, _ref; - - return regeneratorRuntime.wrap(function _callee$(_context) { - while (1) { - switch (_context.prev = _context.next) { - case 0: - _context.prev = 0; - _context.next = 3; - return this._store.__free__(index, options.weight); - - case 3: - _ref = _context.sent; - running = _ref.running; - this.Events.trigger("debug", "Freed ".concat(options.id), eventInfo); - - if (!(running === 0 && this.empty())) { - _context.next = 8; - break; - } - - return _context.abrupt("return", this.Events.trigger("idle")); - - case 8: - _context.next = 14; - break; - - case 10: - _context.prev = 10; - _context.t0 = _context["catch"](0); - e = _context.t0; - return _context.abrupt("return", this.Events.trigger("error", e)); - - case 14: - case "end": - return _context.stop(); - } - } - }, _callee, this, [[0, 10]]); - })); - - function _free(_x, _x2, _x3, _x4) { - return _free2.apply(this, arguments); - } - - return _free; - }() - }, { - key: "_run", - value: function _run(index, job, wait) { - var _this2 = this; - - var clearGlobalState, free, run; - job.doRun(); - clearGlobalState = this._clearGlobalState.bind(this, index); - run = this._run.bind(this, index, job); - free = this._free.bind(this, index, job); - return this._scheduled[index] = { - timeout: setTimeout(function () { - return job.doExecute(_this2._limiter, clearGlobalState, run, free); - }, wait), - expiration: job.options.expiration != null ? setTimeout(function () { - return job.doExpire(clearGlobalState, run, free); - }, wait + job.options.expiration) : void 0, - job: job - }; - } - }, { - key: "_drainOne", - value: function _drainOne(capacity) { - var _this3 = this; - - return this._registerLock.schedule(function () { - var args, index, next, options, queue; - - if (_this3.queued() === 0) { - return _this3.Promise.resolve(null); - } - - queue = _this3._queues.getFirst(); - - var _next = next = queue.first(); - - options = _next.options; - args = _next.args; - - if (capacity != null && options.weight > capacity) { - return _this3.Promise.resolve(null); - } - - _this3.Events.trigger("debug", "Draining ".concat(options.id), { - args: args, - options: options - }); - - index = _this3._randomIndex(); - return _this3._store.__register__(index, options.weight, options.expiration).then(function (_ref2) { - var success = _ref2.success, - wait = _ref2.wait, - reservoir = _ref2.reservoir; - var empty; - - _this3.Events.trigger("debug", "Drained ".concat(options.id), { - success: success, - args: args, - options: options - }); - - if (success) { - queue.shift(); - empty = _this3.empty(); - - if (empty) { - _this3.Events.trigger("empty"); - } - - if (reservoir === 0) { - _this3.Events.trigger("depleted", empty); - } - - _this3._run(index, next, wait); - - return _this3.Promise.resolve(options.weight); - } else { - return _this3.Promise.resolve(null); - } - }); - }); - } - }, { - key: "_drainAll", - value: function _drainAll(capacity) { - var _this4 = this; - - var total = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; - return this._drainOne(capacity).then(function (drained) { - var newCapacity; - - if (drained != null) { - newCapacity = capacity != null ? capacity - drained : capacity; - return _this4._drainAll(newCapacity, total + drained); - } else { - return _this4.Promise.resolve(total); - } - })["catch"](function (e) { - return _this4.Events.trigger("error", e); - }); - } - }, { - key: "_dropAllQueued", - value: function _dropAllQueued(message) { - return this._queues.shiftAll(function (job) { - return job.doDrop({ - message: message - }); - }); - } - }, { - key: "stop", - value: function stop() { - var _this5 = this; - - var options = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; - var done, waitForExecuting; - options = parser$8.load(options, this.stopDefaults); - - waitForExecuting = function waitForExecuting(at) { - var finished; - - finished = function finished() { - var counts; - counts = _this5._states.counts; - return counts[0] + counts[1] + counts[2] + counts[3] === at; - }; - - return new _this5.Promise(function (resolve, reject) { - if (finished()) { - return resolve(); - } else { - return _this5.on("done", function () { - if (finished()) { - _this5.removeAllListeners("done"); - - return resolve(); - } - }); - } - }); - }; - - done = options.dropWaitingJobs ? (this._run = function (index, next) { - return next.doDrop({ - message: options.dropErrorMessage - }); - }, this._drainOne = function () { - return _this5.Promise.resolve(null); - }, this._registerLock.schedule(function () { - return _this5._submitLock.schedule(function () { - var k, ref, v; - ref = _this5._scheduled; - - for (k in ref) { - v = ref[k]; - - if (_this5.jobStatus(v.job.options.id) === "RUNNING") { - clearTimeout(v.timeout); - clearTimeout(v.expiration); - v.job.doDrop({ - message: options.dropErrorMessage - }); - } - } - - _this5._dropAllQueued(options.dropErrorMessage); - - return waitForExecuting(0); - }); - })) : this.schedule({ - priority: NUM_PRIORITIES$1 - 1, - weight: 0 - }, function () { - return waitForExecuting(1); - }); - - this._receive = function (job) { - return job._reject(new Bottleneck.prototype.BottleneckError(options.enqueueErrorMessage)); - }; - - this.stop = function () { - return _this5.Promise.reject(new Bottleneck.prototype.BottleneckError("stop() has already been called")); - }; - - return done; - } - }, { - key: "_addToQueue", - value: function () { - var _addToQueue2 = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee2(job) { - var args, blocked, error, options, reachedHWM, shifted, strategy, _ref3; - - return regeneratorRuntime.wrap(function _callee2$(_context2) { - while (1) { - switch (_context2.prev = _context2.next) { - case 0: - args = job.args; - options = job.options; - _context2.prev = 2; - _context2.next = 5; - return this._store.__submit__(this.queued(), options.weight); - - case 5: - _ref3 = _context2.sent; - reachedHWM = _ref3.reachedHWM; - blocked = _ref3.blocked; - strategy = _ref3.strategy; - _context2.next = 17; - break; - - case 11: - _context2.prev = 11; - _context2.t0 = _context2["catch"](2); - error = _context2.t0; - this.Events.trigger("debug", "Could not queue ".concat(options.id), { - args: args, - options: options, - error: error - }); - job.doDrop({ - error: error - }); - return _context2.abrupt("return", false); - - case 17: - if (!blocked) { - _context2.next = 22; - break; - } - - job.doDrop(); - return _context2.abrupt("return", true); - - case 22: - if (!reachedHWM) { - _context2.next = 28; - break; - } - - shifted = strategy === Bottleneck.prototype.strategy.LEAK ? this._queues.shiftLastFrom(options.priority) : strategy === Bottleneck.prototype.strategy.OVERFLOW_PRIORITY ? this._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck.prototype.strategy.OVERFLOW ? job : void 0; - - if (shifted != null) { - shifted.doDrop(); - } - - if (!(shifted == null || strategy === Bottleneck.prototype.strategy.OVERFLOW)) { - _context2.next = 28; - break; - } - - if (shifted == null) { - job.doDrop(); - } - - return _context2.abrupt("return", reachedHWM); - - case 28: - job.doQueue(reachedHWM, blocked); - - this._queues.push(job); - - _context2.next = 32; - return this._drainAll(); - - case 32: - return _context2.abrupt("return", reachedHWM); - - case 33: - case "end": - return _context2.stop(); - } - } - }, _callee2, this, [[2, 11]]); - })); - - function _addToQueue(_x5) { - return _addToQueue2.apply(this, arguments); - } - - return _addToQueue; - }() - }, { - key: "_receive", - value: function _receive(job) { - if (this._states.jobStatus(job.options.id) != null) { - job._reject(new Bottleneck.prototype.BottleneckError("A job with the same id already exists (id=".concat(job.options.id, ")"))); - - return false; - } else { - job.doReceive(); - return this._submitLock.schedule(this._addToQueue, job); - } - } - }, { - key: "submit", - value: function submit() { - var _this6 = this; - - for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { - args[_key2] = arguments[_key2]; - } - - var cb, fn, job, options, ref, ref1, task; - - if (typeof args[0] === "function") { - var _ref4, _ref5, _splice$call, _splice$call2; - - ref = args, (_ref4 = ref, _ref5 = _toArray(_ref4), fn = _ref5[0], args = _ref5.slice(1), _ref4), (_splice$call = splice.call(args, -1), _splice$call2 = _slicedToArray(_splice$call, 1), cb = _splice$call2[0], _splice$call); - options = parser$8.load({}, this.jobDefaults); - } else { - var _ref6, _ref7, _splice$call3, _splice$call4; - - ref1 = args, (_ref6 = ref1, _ref7 = _toArray(_ref6), options = _ref7[0], fn = _ref7[1], args = _ref7.slice(2), _ref6), (_splice$call3 = splice.call(args, -1), _splice$call4 = _slicedToArray(_splice$call3, 1), cb = _splice$call4[0], _splice$call3); - options = parser$8.load(options, this.jobDefaults); - } - - task = function task() { - for (var _len3 = arguments.length, args = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) { - args[_key3] = arguments[_key3]; - } - - return new _this6.Promise(function (resolve, reject) { - return fn.apply(void 0, args.concat([function () { - for (var _len4 = arguments.length, args = new Array(_len4), _key4 = 0; _key4 < _len4; _key4++) { - args[_key4] = arguments[_key4]; - } - - return (args[0] != null ? reject : resolve)(args); - }])); - }); - }; - - job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); - job.promise.then(function (args) { - return typeof cb === "function" ? cb.apply(void 0, _toConsumableArray(args)) : void 0; - })["catch"](function (args) { - if (Array.isArray(args)) { - return typeof cb === "function" ? cb.apply(void 0, _toConsumableArray(args)) : void 0; - } else { - return typeof cb === "function" ? cb(args) : void 0; - } - }); - return this._receive(job); - } - }, { - key: "schedule", - value: function schedule() { - for (var _len5 = arguments.length, args = new Array(_len5), _key5 = 0; _key5 < _len5; _key5++) { - args[_key5] = arguments[_key5]; - } - - var job, options, task; - - if (typeof args[0] === "function") { - var _args3 = args; - - var _args4 = _toArray(_args3); - - task = _args4[0]; - args = _args4.slice(1); - options = {}; - } else { - var _args5 = args; - - var _args6 = _toArray(_args5); - - options = _args6[0]; - task = _args6[1]; - args = _args6.slice(2); - } - - job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); - - this._receive(job); - - return job.promise; - } - }, { - key: "wrap", - value: function wrap(fn) { - var schedule, wrapped; - schedule = this.schedule.bind(this); - - wrapped = function wrapped() { - for (var _len6 = arguments.length, args = new Array(_len6), _key6 = 0; _key6 < _len6; _key6++) { - args[_key6] = arguments[_key6]; - } - - return schedule.apply(void 0, [fn.bind(this)].concat(args)); - }; - - wrapped.withOptions = function (options) { - for (var _len7 = arguments.length, args = new Array(_len7 > 1 ? _len7 - 1 : 0), _key7 = 1; _key7 < _len7; _key7++) { - args[_key7 - 1] = arguments[_key7]; - } - - return schedule.apply(void 0, [options, fn].concat(args)); - }; - - return wrapped; - } - }, { - key: "updateSettings", - value: function () { - var _updateSettings = _asyncToGenerator( - /*#__PURE__*/ - regeneratorRuntime.mark(function _callee3() { - var options, - _args7 = arguments; - return regeneratorRuntime.wrap(function _callee3$(_context3) { - while (1) { - switch (_context3.prev = _context3.next) { - case 0: - options = _args7.length > 0 && _args7[0] !== undefined ? _args7[0] : {}; - _context3.next = 3; - return this._store.__updateSettings__(parser$8.overwrite(options, this.storeDefaults)); - - case 3: - parser$8.overwrite(options, this.instanceDefaults, this); - return _context3.abrupt("return", this); - - case 5: - case "end": - return _context3.stop(); - } - } - }, _callee3, this); - })); - - function updateSettings() { - return _updateSettings.apply(this, arguments); - } - - return updateSettings; - }() - }, { - key: "currentReservoir", - value: function currentReservoir() { - return this._store.__currentReservoir__(); - } - }, { - key: "incrementReservoir", - value: function incrementReservoir() { - var incr = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0; - return this._store.__incrementReservoir__(incr); - } - }]); - - return Bottleneck; - }(); - Bottleneck["default"] = Bottleneck; - Bottleneck.Events = Events$6; - Bottleneck.version = Bottleneck.prototype.version = require$$8.version; - Bottleneck.strategy = Bottleneck.prototype.strategy = { - LEAK: 1, - OVERFLOW: 2, - OVERFLOW_PRIORITY: 4, - BLOCK: 3 - }; - Bottleneck.BottleneckError = Bottleneck.prototype.BottleneckError = BottleneckError_1; - Bottleneck.Group = Bottleneck.prototype.Group = Group_1; - Bottleneck.RedisConnection = Bottleneck.prototype.RedisConnection = RedisConnection_1; - Bottleneck.IORedisConnection = Bottleneck.prototype.IORedisConnection = IORedisConnection_1; - Bottleneck.Batcher = Bottleneck.prototype.Batcher = Batcher_1; - Bottleneck.prototype.jobDefaults = { - priority: DEFAULT_PRIORITY$1, - weight: 1, - expiration: null, - id: "" - }; - Bottleneck.prototype.storeDefaults = { - maxConcurrent: null, - minTime: 0, - highWater: null, - strategy: Bottleneck.prototype.strategy.LEAK, - penalty: null, - reservoir: null, - reservoirRefreshInterval: null, - reservoirRefreshAmount: null, - reservoirIncreaseInterval: null, - reservoirIncreaseAmount: null, - reservoirIncreaseMaximum: null - }; - Bottleneck.prototype.localStoreDefaults = { - Promise: Promise, - timeout: null, - heartbeatInterval: 250 - }; - Bottleneck.prototype.redisStoreDefaults = { - Promise: Promise, - timeout: null, - heartbeatInterval: 5000, - clientTimeout: 10000, - Redis: null, - clientOptions: {}, - clusterNodes: null, - clearDatastore: false, - connection: null - }; - Bottleneck.prototype.instanceDefaults = { - datastore: "local", - connection: null, - id: "", - rejectOnDrop: true, - trackDoneStatus: false, - Promise: Promise - }; - Bottleneck.prototype.stopDefaults = { - enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.", - dropWaitingJobs: true, - dropErrorMessage: "This limiter has been stopped." - }; - return Bottleneck; - }.call(commonjsGlobal); - - var Bottleneck_1 = Bottleneck; - - var es5 = Bottleneck_1; - - return es5; - -}))); diff --git a/eslint.config.mjs b/eslint.config.mjs new file mode 100644 index 0000000..afe713a --- /dev/null +++ b/eslint.config.mjs @@ -0,0 +1,28 @@ +import pluginJs from "@eslint/js"; +import prettierConfig from "eslint-config-prettier"; +import globals from "globals"; + +/** @type {import('eslint').Linter.Config[]} */ +export default [ + { files: ["**/*.js"], languageOptions: { sourceType: "commonjs" } }, + { languageOptions: { globals: globals.node } }, + pluginJs.configs.recommended, + prettierConfig, + { + rules: { + "no-unused-vars": [ + "warn", + { + vars: "all", + varsIgnorePattern: "^_", + args: "after-used", + argsIgnorePattern: "^_", + ignoreRestSiblings: true, + }, + ], + }, + }, + { + ignores: ["**/.yarn", "lib/**", "eslint.config.mjs", "light.js", "test.js"], + }, +]; diff --git a/lib/Batcher.js b/lib/Batcher.js index f52892a..3880283 100644 --- a/lib/Batcher.js +++ b/lib/Batcher.js @@ -1,66 +1,40 @@ -"use strict"; - -var Batcher, Events, parser; -parser = require("./parser"); -Events = require("./Events"); - -Batcher = function () { - class Batcher { - constructor(options = {}) { - this.options = options; - parser.load(this.options, this.defaults, this); - this.Events = new Events(this); - this._arr = []; - - this._resetPromise(); - - this._lastFlush = Date.now(); +const parser = require("./parser"); +const Events = require("./Events"); +class Batcher { + constructor(options) { + this.defaults = { maxTime: null, maxSize: null }; + this.options = options !== null && options !== void 0 ? options : {}; + parser.load(this.options, this.defaults, this); + this.Events = new Events(this); + this._arr = []; + this._resetPromise(); + this._lastFlush = Date.now(); } - _resetPromise() { - return this._promise = new this.Promise((res, rej) => { - return this._resolve = res; - }); + this._promise = new Promise((res) => { + this._resolve = res; + }); } - _flush() { - clearTimeout(this._timeout); - this._lastFlush = Date.now(); - - this._resolve(); - - this.Events.trigger("batch", this._arr); - this._arr = []; - return this._resetPromise(); + clearTimeout(this._timeout); + this._lastFlush = Date.now(); + this._resolve(); + this.Events.trigger("batch", this._arr); + this._arr = []; + this._resetPromise(); } - add(data) { - var ret; - - this._arr.push(data); - - ret = this._promise; - - if (this._arr.length === this.maxSize) { - this._flush(); - } else if (this.maxTime != null && this._arr.length === 1) { - this._timeout = setTimeout(() => { - return this._flush(); - }, this.maxTime); - } - - return ret; + this._arr.push(data); + const existingPromise = this._promise; + if (this._arr.length === this.maxSize) { + this._flush(); + } + else if (this.maxTime != null && this._arr.length === 1) { + this._timeout = setTimeout(() => { + this._flush(); + }, this.maxTime); + } + return existingPromise; } - - } - - ; - Batcher.prototype.defaults = { - maxTime: null, - maxSize: null, - Promise: Promise - }; - return Batcher; -}.call(void 0); - -module.exports = Batcher; \ No newline at end of file +} +module.exports = Batcher; diff --git a/lib/Bottleneck.js b/lib/Bottleneck.js index ff640a1..5d9c4c0 100644 --- a/lib/Bottleneck.js +++ b/lib/Bottleneck.js @@ -1,594 +1,430 @@ -"use strict"; - -function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); } - -function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } - -function _toArray(arr) { return _arrayWithHoles(arr) || _iterableToArray(arr) || _nonIterableRest(); } - -function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } - -function _iterableToArray(iter) { if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter); } - -function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -var Bottleneck, - DEFAULT_PRIORITY, - Events, - Job, - LocalDatastore, - NUM_PRIORITIES, - Queues, - RedisDatastore, - States, - Sync, - parser, - splice = [].splice; -NUM_PRIORITIES = 10; -DEFAULT_PRIORITY = 5; -parser = require("./parser"); -Queues = require("./Queues"); -Job = require("./Job"); -LocalDatastore = require("./LocalDatastore"); -RedisDatastore = require("./RedisDatastore"); -Events = require("./Events"); -States = require("./States"); -Sync = require("./Sync"); - -Bottleneck = function () { - class Bottleneck { - constructor(options = {}, ...invalid) { - var storeInstanceOptions, storeOptions; - this._addToQueue = this._addToQueue.bind(this); - - this._validateOptions(options, invalid); - - parser.load(options, this.instanceDefaults, this); - this._queues = new Queues(NUM_PRIORITIES); - this._scheduled = {}; - this._states = new States(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : [])); - this._limiter = null; - this.Events = new Events(this); - this._submitLock = new Sync("submit", this.Promise); - this._registerLock = new Sync("register", this.Promise); - storeOptions = parser.load(options, this.storeDefaults, {}); - - this._store = function () { +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +const NUM_PRIORITIES = 10; +const DEFAULT_PRIORITY = 5; +const parser = require("./parser"); +const Queues = require("./Queues"); +const Job = require("./Job"); +const LocalDatastore = require("./LocalDatastore"); +const RedisDatastore = require("./RedisDatastore"); +const Events = require("./Events"); +const States = require("./States"); +const Sync = require("./Sync"); +const BottleneckError = require("./BottleneckError"); +const Group = require("./Group"); +const RedisConnection = require("./RedisConnection"); +const IORedisConnection = require("./IORedisConnection"); +const Batcher = require("./Batcher"); +const version = require("../package.json").version; +class Bottleneck { + constructor(options, ...invalid) { + this.version = version; + this.jobDefaults = { + priority: DEFAULT_PRIORITY, + weight: 1, + expiration: null, + id: "", + }; + this.storeDefaults = { + maxConcurrent: null, + minTime: 0, + highWater: null, + strategy: Bottleneck.strategy.LEAK, + penalty: null, + reservoir: null, + reservoirRefreshInterval: null, + reservoirRefreshAmount: null, + reservoirIncreaseInterval: null, + reservoirIncreaseAmount: null, + reservoirIncreaseMaximum: null, + }; + this.localStoreDefaults = { + Promise, + timeout: null, + heartbeatInterval: 250, + }; + this.redisStoreDefaults = { + Promise, + timeout: null, + heartbeatInterval: 5000, + clientTimeout: 10000, + Redis: null, + clientOptions: {}, + clusterNodes: null, + clearDatastore: false, + connection: null, + }; + this.instanceDefaults = { + datastore: "local", + connection: null, + id: "", + rejectOnDrop: true, + trackDoneStatus: false, + Promise, + }; + this.stopDefaults = { + enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.", + dropWaitingJobs: true, + dropErrorMessage: "This limiter has been stopped.", + }; + this._addToQueue = this._addToQueue.bind(this); + options !== null && options !== void 0 ? options : (options = {}); + this._validateOptions(options, invalid); + parser.load(options, this.instanceDefaults, this); + this._queues = new Queues(NUM_PRIORITIES); + this._scheduled = {}; + this._states = new States(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : [])); + this._limiter = null; + this.Events = new Events(this); + this._submitLock = new Sync("submit"); + this._registerLock = new Sync("register"); + const storeOptions = parser.load(options, this.storeDefaults, {}); if (this.datastore === "redis" || this.datastore === "ioredis" || this.connection != null) { - storeInstanceOptions = parser.load(options, this.redisStoreDefaults, {}); - return new RedisDatastore(this, storeOptions, storeInstanceOptions); - } else if (this.datastore === "local") { - storeInstanceOptions = parser.load(options, this.localStoreDefaults, {}); - return new LocalDatastore(this, storeOptions, storeInstanceOptions); - } else { - throw new Bottleneck.prototype.BottleneckError(`Invalid datastore type: ${this.datastore}`); + const opts = parser.load(options, this.redisStoreDefaults, {}); + this._store = new RedisDatastore(this, storeOptions, opts); + } + else if (this.datastore === "local") { + const opts = parser.load(options, this.localStoreDefaults, {}); + this._store = new LocalDatastore(this, storeOptions, opts); } - }.call(this); - - this._queues.on("leftzero", () => { - var ref; - return (ref = this._store.heartbeat) != null ? typeof ref.ref === "function" ? ref.ref() : void 0 : void 0; - }); - - this._queues.on("zero", () => { - var ref; - return (ref = this._store.heartbeat) != null ? typeof ref.unref === "function" ? ref.unref() : void 0 : void 0; - }); - } - + else { + throw new BottleneckError(`Invalid datastore type: ${this.datastore}`); + } + this._queues.on("leftzero", () => { var _a, _b; return (_b = (_a = this._store.heartbeat) === null || _a === void 0 ? void 0 : _a.ref) === null || _b === void 0 ? void 0 : _b.call(_a); }); + this._queues.on("zero", () => { var _a, _b; return (_b = (_a = this._store.heartbeat) === null || _a === void 0 ? void 0 : _a.unref) === null || _b === void 0 ? void 0 : _b.call(_a); }); + } _validateOptions(options, invalid) { - if (!(options != null && typeof options === "object" && invalid.length === 0)) { - throw new Bottleneck.prototype.BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1."); - } + if (options == null || typeof options !== "object" || invalid.length !== 0) { + throw new BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1."); + } } - ready() { - return this._store.ready; + return this._store.ready; } - clients() { - return this._store.clients; + return this._store.clients; } - channel() { - return `b_${this.id}`; + return `b_${this.id}`; } - channel_client() { - return `b_${this.id}_${this._store.clientId}`; + return `b_${this.id}_${this._store.clientId}`; } - publish(message) { - return this._store.__publish__(message); + return this._store.__publish__(message); } - - disconnect(flush = true) { - return this._store.__disconnect__(flush); + disconnect() { + return __awaiter(this, arguments, void 0, function* (flush = true) { + yield this._store.__disconnect__(flush); + }); } - chain(_limiter) { - this._limiter = _limiter; - return this; + this._limiter = _limiter; + return this; } - queued(priority) { - return this._queues.queued(priority); + return this._queues.queued(priority); } - clusterQueued() { - return this._store.__queued__(); + return this._store.__queued__(); } - empty() { - return this.queued() === 0 && this._submitLock.isEmpty(); + return this.queued() === 0 && this._submitLock.isEmpty(); } - running() { - return this._store.__running__(); + return this._store.__running__(); } - done() { - return this._store.__done__(); + return this._store.__done__(); } - jobStatus(id) { - return this._states.jobStatus(id); + return this._states.jobStatus(id); } - jobs(status) { - return this._states.statusJobs(status); + return this._states.statusJobs(status); } - counts() { - return this._states.statusCounts(); + return this._states.statusCounts(); } - _randomIndex() { - return Math.random().toString(36).slice(2); + return Math.random().toString(36).slice(2); } - check(weight = 1) { - return this._store.__check__(weight); + return this._store.__check__(weight); } - _clearGlobalState(index) { - if (this._scheduled[index] != null) { - clearTimeout(this._scheduled[index].expiration); - delete this._scheduled[index]; - return true; - } else { - return false; - } - } - - _free(index, job, options, eventInfo) { - var _this = this; - - return _asyncToGenerator(function* () { - var e, running; - - try { - var _ref = yield _this._store.__free__(index, options.weight); - - running = _ref.running; - - _this.Events.trigger("debug", `Freed ${options.id}`, eventInfo); - - if (running === 0 && _this.empty()) { - return _this.Events.trigger("idle"); - } - } catch (error1) { - e = error1; - return _this.Events.trigger("error", e); + if (this._scheduled[index] != null) { + clearTimeout(this._scheduled[index].expiration); + delete this._scheduled[index]; + return true; + } + else { + return false; } - })(); } - + _free(index, job, options, eventInfo) { + return __awaiter(this, void 0, void 0, function* () { + try { + const { running } = yield this._store.__free__(index, options.weight); + this.Events.trigger("debug", `Freed ${options.id}`, eventInfo); + if (running === 0 && this.empty()) { + return this.Events.trigger("idle"); + } + } + catch (e) { + return this.Events.trigger("error", e); + } + }); + } _run(index, job, wait) { - var clearGlobalState, free, run; - job.doRun(); - clearGlobalState = this._clearGlobalState.bind(this, index); - run = this._run.bind(this, index, job); - free = this._free.bind(this, index, job); - return this._scheduled[index] = { - timeout: setTimeout(() => { - return job.doExecute(this._limiter, clearGlobalState, run, free); - }, wait), - expiration: job.options.expiration != null ? setTimeout(function () { - return job.doExpire(clearGlobalState, run, free); - }, wait + job.options.expiration) : void 0, - job: job - }; - } - + job.doRun(); + const clearGlobalState = this._clearGlobalState.bind(this, index); + const run = this._run.bind(this, index, job); + const free = this._free.bind(this, index, job); + return (this._scheduled[index] = { + timeout: setTimeout(() => { + return job.doExecute(this._limiter, clearGlobalState, run, free); + }, wait), + expiration: job.options.expiration != null + ? setTimeout(() => job.doExpire(clearGlobalState, run, free), wait + job.options.expiration) + : undefined, + job, + }); + } _drainOne(capacity) { - return this._registerLock.schedule(() => { - var args, index, next, options, queue; - - if (this.queued() === 0) { - return this.Promise.resolve(null); - } - - queue = this._queues.getFirst(); - - var _next2 = next = queue.first(); - - options = _next2.options; - args = _next2.args; - - if (capacity != null && options.weight > capacity) { - return this.Promise.resolve(null); - } - - this.Events.trigger("debug", `Draining ${options.id}`, { - args, - options + return __awaiter(this, void 0, void 0, function* () { + return this._registerLock.schedule(() => __awaiter(this, void 0, void 0, function* () { + let next; + if (this.queued() === 0) { + return null; + } + const queue = this._queues.getFirst(); + const { options, args } = (next = queue.first()); + if (capacity != null && options.weight > capacity) { + return null; + } + this.Events.trigger("debug", `Draining ${options.id}`, { args, options }); + const index = this._randomIndex(); + const { success, wait, reservoir } = yield this._store.__register__(index, options.weight, options.expiration); + this.Events.trigger("debug", `Drained ${options.id}`, { success, args, options }); + if (success) { + queue.shift(); + const empty = this.empty(); + if (empty) { + this.Events.trigger("empty"); + } + if (reservoir === 0) { + this.Events.trigger("depleted", empty); + } + this._run(index, next, wait); + return options.weight; + } + else { + return null; + } + })); }); - index = this._randomIndex(); - return this._store.__register__(index, options.weight, options.expiration).then(({ - success, - wait, - reservoir - }) => { - var empty; - this.Events.trigger("debug", `Drained ${options.id}`, { - success, - args, - options - }); - - if (success) { - queue.shift(); - empty = this.empty(); - - if (empty) { - this.Events.trigger("empty"); + } + _drainAll(capacity_1) { + return __awaiter(this, arguments, void 0, function* (capacity, total = 0) { + try { + const drained = yield this._drainOne(capacity); + if (drained != null) { + const newCapacity = capacity != null ? capacity - drained : capacity; + return this._drainAll(newCapacity, total + drained); + } + else { + return total; + } } - - if (reservoir === 0) { - this.Events.trigger("depleted", empty); + catch (e) { + this.Events.trigger("error", e); } - - this._run(index, next, wait); - - return this.Promise.resolve(options.weight); - } else { - return this.Promise.resolve(null); - } }); - }); - } - - _drainAll(capacity, total = 0) { - return this._drainOne(capacity).then(drained => { - var newCapacity; - - if (drained != null) { - newCapacity = capacity != null ? capacity - drained : capacity; - return this._drainAll(newCapacity, total + drained); - } else { - return this.Promise.resolve(total); - } - }).catch(e => { - return this.Events.trigger("error", e); - }); } - _dropAllQueued(message) { - return this._queues.shiftAll(function (job) { - return job.doDrop({ - message - }); - }); - } - - stop(options = {}) { - var done, waitForExecuting; - options = parser.load(options, this.stopDefaults); - - waitForExecuting = at => { - var finished; - - finished = () => { - var counts; - counts = this._states.counts; - return counts[0] + counts[1] + counts[2] + counts[3] === at; - }; - - return new this.Promise((resolve, reject) => { - if (finished()) { - return resolve(); - } else { - return this.on("done", () => { - if (finished()) { - this.removeAllListeners("done"); - return resolve(); - } + return this._queues.shiftAll((job) => job.doDrop({ message })); + } + stop(options) { + options !== null && options !== void 0 ? options : (options = {}); + options = parser.load(options, this.stopDefaults); + const waitForExecuting = (at) => { + const finished = () => { + const { counts } = this._states; + return counts[0] + counts[1] + counts[2] + counts[3] === at; + }; + return new Promise((resolve) => { + if (finished()) { + resolve(); + } + else { + this.on("done", () => { + if (finished()) { + this.removeAllListeners("done"); + resolve(); + } + }); + } }); - } - }); - }; - - done = options.dropWaitingJobs ? (this._run = function (index, next) { - return next.doDrop({ - message: options.dropErrorMessage - }); - }, this._drainOne = () => { - return this.Promise.resolve(null); - }, this._registerLock.schedule(() => { - return this._submitLock.schedule(() => { - var k, ref, v; - ref = this._scheduled; - - for (k in ref) { - v = ref[k]; - - if (this.jobStatus(v.job.options.id) === "RUNNING") { - clearTimeout(v.timeout); - clearTimeout(v.expiration); - v.job.doDrop({ - message: options.dropErrorMessage - }); - } - } - - this._dropAllQueued(options.dropErrorMessage); - - return waitForExecuting(0); - }); - })) : this.schedule({ - priority: NUM_PRIORITIES - 1, - weight: 0 - }, () => { - return waitForExecuting(1); - }); - - this._receive = function (job) { - return job._reject(new Bottleneck.prototype.BottleneckError(options.enqueueErrorMessage)); - }; - - this.stop = () => { - return this.Promise.reject(new Bottleneck.prototype.BottleneckError("stop() has already been called")); - }; - - return done; - } - - _addToQueue(job) { - var _this2 = this; - - return _asyncToGenerator(function* () { - var args, blocked, error, options, reachedHWM, shifted, strategy; - args = job.args; - options = job.options; - - try { - var _ref2 = yield _this2._store.__submit__(_this2.queued(), options.weight); - - reachedHWM = _ref2.reachedHWM; - blocked = _ref2.blocked; - strategy = _ref2.strategy; - } catch (error1) { - error = error1; - - _this2.Events.trigger("debug", `Could not queue ${options.id}`, { - args, - options, - error - }); - - job.doDrop({ - error - }); - return false; + }; + let done; + if (options.dropWaitingJobs) { + this._run = (index, next) => next.doDrop({ message: options.dropErrorMessage }); + this._drainOne = () => this.Promise.resolve(null); + done = this._registerLock.schedule(() => this._submitLock.schedule(() => { + for (const v of Object.values(this._scheduled)) { + if (this.jobStatus(v.job.options.id) === "RUNNING") { + clearTimeout(v.timeout); + clearTimeout(v.expiration); + v.job.doDrop({ message: options.dropErrorMessage }); + } + } + this._dropAllQueued(options.dropErrorMessage); + return waitForExecuting(0); + })); } - - if (blocked) { - job.doDrop(); - return true; - } else if (reachedHWM) { - shifted = strategy === Bottleneck.prototype.strategy.LEAK ? _this2._queues.shiftLastFrom(options.priority) : strategy === Bottleneck.prototype.strategy.OVERFLOW_PRIORITY ? _this2._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck.prototype.strategy.OVERFLOW ? job : void 0; - - if (shifted != null) { - shifted.doDrop(); - } - - if (shifted == null || strategy === Bottleneck.prototype.strategy.OVERFLOW) { - if (shifted == null) { - job.doDrop(); + else { + done = this.schedule({ priority: NUM_PRIORITIES - 1, weight: 0 }, () => waitForExecuting(1)); + } + this._receive = (job) => job._reject(new BottleneckError(options.enqueueErrorMessage)); + this.stop = () => this.Promise.reject(new BottleneckError("stop() has already been called")); + return done; + } + _addToQueue(job) { + return __awaiter(this, void 0, void 0, function* () { + let blocked, reachedHWM, strategy; + const { args, options } = job; + try { + ({ reachedHWM, blocked, strategy } = yield this._store.__submit__(this.queued(), options.weight)); } - + catch (error) { + this.Events.trigger("debug", `Could not queue ${options.id}`, { args, options, error }); + job.doDrop({ error }); + return false; + } + if (blocked) { + job.doDrop(); + return true; + } + else if (reachedHWM) { + let shifted; + if (strategy === Bottleneck.strategy.LEAK) { + shifted = this._queues.shiftLastFrom(options.priority); + } + else if (strategy === Bottleneck.strategy.OVERFLOW_PRIORITY) { + shifted = this._queues.shiftLastFrom(options.priority + 1); + } + else if (strategy === Bottleneck.strategy.OVERFLOW) { + shifted = job; + } + if (shifted != null) { + shifted.doDrop(); + } + if (shifted == null || strategy === Bottleneck.strategy.OVERFLOW) { + if (shifted == null) { + job.doDrop(); + } + return reachedHWM; + } + } + job.doQueue(reachedHWM, blocked); + this._queues.push(job); + yield this._drainAll(); return reachedHWM; - } - } - - job.doQueue(reachedHWM, blocked); - - _this2._queues.push(job); - - yield _this2._drainAll(); - return reachedHWM; - })(); - } - + }); + } _receive(job) { - if (this._states.jobStatus(job.options.id) != null) { - job._reject(new Bottleneck.prototype.BottleneckError(`A job with the same id already exists (id=${job.options.id})`)); - - return false; - } else { - job.doReceive(); - return this._submitLock.schedule(this._addToQueue, job); - } - } - + if (this._states.jobStatus(job.options.id) != null) { + job._reject(new BottleneckError(`A job with the same id already exists (id=${job.options.id})`)); + return false; + } + else { + job.doReceive(); + return this._submitLock.schedule(this._addToQueue, job); + } + } submit(...args) { - var cb, fn, job, options, ref, ref1, task; - - if (typeof args[0] === "function") { - var _ref3, _ref4, _splice$call, _splice$call2; - - ref = args, (_ref3 = ref, _ref4 = _toArray(_ref3), fn = _ref4[0], args = _ref4.slice(1), _ref3), (_splice$call = splice.call(args, -1), _splice$call2 = _slicedToArray(_splice$call, 1), cb = _splice$call2[0], _splice$call); - options = parser.load({}, this.jobDefaults); - } else { - var _ref5, _ref6, _splice$call3, _splice$call4; - - ref1 = args, (_ref5 = ref1, _ref6 = _toArray(_ref5), options = _ref6[0], fn = _ref6[1], args = _ref6.slice(2), _ref5), (_splice$call3 = splice.call(args, -1), _splice$call4 = _slicedToArray(_splice$call3, 1), cb = _splice$call4[0], _splice$call3); - options = parser.load(options, this.jobDefaults); - } - - task = (...args) => { - return new this.Promise(function (resolve, reject) { - return fn(...args, function (...args) { - return (args[0] != null ? reject : resolve)(args); - }); - }); - }; - - job = new Job(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); - job.promise.then(function (args) { - return typeof cb === "function" ? cb(...args) : void 0; - }).catch(function (args) { - if (Array.isArray(args)) { - return typeof cb === "function" ? cb(...args) : void 0; - } else { - return typeof cb === "function" ? cb(args) : void 0; + let cb, fn, options; + if (typeof args[0] === "function") { + cb = args.pop(); + [fn, ...args] = args; + options = parser.load({}, this.jobDefaults); + } + else { + cb = args.pop(); + [options, fn, ...args] = args; + options = parser.load(options, this.jobDefaults); } - }); - return this._receive(job); + const task = (...args) => { + return new Promise((resolve, reject) => fn(...args, (...args) => (args[0] != null ? reject : resolve)(args))); + }; + const job = new Job(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states); + job.promise + .then((args) => (typeof cb === "function" ? cb(...(args || [])) : undefined)) + .catch(function (args) { + if (Array.isArray(args)) { + return typeof cb === "function" ? cb(...args) : undefined; + } + else { + return typeof cb === "function" ? cb(args) : undefined; + } + }); + return this._receive(job); } - schedule(...args) { - var job, options, task; - - if (typeof args[0] === "function") { - var _args = args; - - var _args2 = _toArray(_args); - - task = _args2[0]; - args = _args2.slice(1); - options = {}; - } else { - var _args3 = args; - - var _args4 = _toArray(_args3); - - options = _args4[0]; - task = _args4[1]; - args = _args4.slice(2); - } - - job = new Job(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); - - this._receive(job); - - return job.promise; - } - + let options, task; + if (typeof args[0] === "function") { + [task, ...args] = args; + options = {}; + } + else { + [options, task, ...args] = args; + } + const job = new Job(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states); + this._receive(job); + return job.promise; + } wrap(fn) { - var schedule, wrapped; - schedule = this.schedule.bind(this); - - wrapped = function wrapped(...args) { - return schedule(fn.bind(this), ...args); - }; - - wrapped.withOptions = function (options, ...args) { - return schedule(options, fn, ...args); - }; - - return wrapped; - } - - updateSettings(options = {}) { - var _this3 = this; - - return _asyncToGenerator(function* () { - yield _this3._store.__updateSettings__(parser.overwrite(options, _this3.storeDefaults)); - parser.overwrite(options, _this3.instanceDefaults, _this3); - return _this3; - })(); - } - + const schedule = this.schedule.bind(this); + const wrapped = function (...args) { + return schedule(fn.bind(this), ...args); + }; + wrapped.withOptions = (options, ...args) => schedule(options, fn, ...args); + return wrapped; + } + updateSettings(options) { + return __awaiter(this, void 0, void 0, function* () { + options !== null && options !== void 0 ? options : (options = {}); + yield this._store.__updateSettings__(parser.overwrite(options, this.storeDefaults)); + parser.overwrite(options, this.instanceDefaults, this); + return this; + }); + } currentReservoir() { - return this._store.__currentReservoir__(); + return this._store.__currentReservoir__(); } - incrementReservoir(incr = 0) { - return this._store.__incrementReservoir__(incr); - } - - } - - ; - Bottleneck.default = Bottleneck; - Bottleneck.Events = Events; - Bottleneck.version = Bottleneck.prototype.version = require("./version.json").version; - Bottleneck.strategy = Bottleneck.prototype.strategy = { + return this._store.__incrementReservoir__(incr); + } +} +Bottleneck.BottleneckError = BottleneckError; +Bottleneck.Group = Group; +Bottleneck.RedisConnection = RedisConnection; +Bottleneck.IORedisConnection = IORedisConnection; +Bottleneck.Batcher = Batcher; +Bottleneck.Events = Events; +Bottleneck.strategy = { LEAK: 1, OVERFLOW: 2, OVERFLOW_PRIORITY: 4, - BLOCK: 3 - }; - Bottleneck.BottleneckError = Bottleneck.prototype.BottleneckError = require("./BottleneckError"); - Bottleneck.Group = Bottleneck.prototype.Group = require("./Group"); - Bottleneck.RedisConnection = Bottleneck.prototype.RedisConnection = require("./RedisConnection"); - Bottleneck.IORedisConnection = Bottleneck.prototype.IORedisConnection = require("./IORedisConnection"); - Bottleneck.Batcher = Bottleneck.prototype.Batcher = require("./Batcher"); - Bottleneck.prototype.jobDefaults = { - priority: DEFAULT_PRIORITY, - weight: 1, - expiration: null, - id: "" - }; - Bottleneck.prototype.storeDefaults = { - maxConcurrent: null, - minTime: 0, - highWater: null, - strategy: Bottleneck.prototype.strategy.LEAK, - penalty: null, - reservoir: null, - reservoirRefreshInterval: null, - reservoirRefreshAmount: null, - reservoirIncreaseInterval: null, - reservoirIncreaseAmount: null, - reservoirIncreaseMaximum: null - }; - Bottleneck.prototype.localStoreDefaults = { - Promise: Promise, - timeout: null, - heartbeatInterval: 250 - }; - Bottleneck.prototype.redisStoreDefaults = { - Promise: Promise, - timeout: null, - heartbeatInterval: 5000, - clientTimeout: 10000, - Redis: null, - clientOptions: {}, - clusterNodes: null, - clearDatastore: false, - connection: null - }; - Bottleneck.prototype.instanceDefaults = { - datastore: "local", - connection: null, - id: "", - rejectOnDrop: true, - trackDoneStatus: false, - Promise: Promise - }; - Bottleneck.prototype.stopDefaults = { - enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.", - dropWaitingJobs: true, - dropErrorMessage: "This limiter has been stopped." - }; - return Bottleneck; -}.call(void 0); - -module.exports = Bottleneck; \ No newline at end of file + BLOCK: 3, +}; +module.exports = Bottleneck; +module.exports.default = Bottleneck; diff --git a/lib/BottleneckError.js b/lib/BottleneckError.js index f8eeaff..fe4cc49 100644 --- a/lib/BottleneckError.js +++ b/lib/BottleneckError.js @@ -1,5 +1,3 @@ -"use strict"; - -var BottleneckError; -BottleneckError = class BottleneckError extends Error {}; -module.exports = BottleneckError; \ No newline at end of file +class BottleneckError extends Error { +} +module.exports = BottleneckError; diff --git a/lib/DLList.js b/lib/DLList.js index b469a65..450eb9d 100644 --- a/lib/DLList.js +++ b/lib/DLList.js @@ -1,107 +1,77 @@ -"use strict"; - -var DLList; -DLList = class DLList { - constructor(incr, decr) { - this.incr = incr; - this.decr = decr; - this._first = null; - this._last = null; - this.length = 0; - } - - push(value) { - var node; - this.length++; - - if (typeof this.incr === "function") { - this.incr(); +class DLList { + constructor(incr, decr) { + this.incr = incr; + this.decr = decr; + this._first = null; + this._last = null; + this.length = 0; } - - node = { - value, - prev: this._last, - next: null - }; - - if (this._last != null) { - this._last.next = node; - this._last = node; - } else { - this._first = this._last = node; + push(value) { + var _a; + this.length++; + (_a = this.incr) === null || _a === void 0 ? void 0 : _a.call(this); + const node = { value, prev: this._last, next: null }; + if (this._last != null) { + this._last.next = node; + this._last = node; + } + else { + this._first = this._last = node; + } } - - return void 0; - } - - shift() { - var value; - - if (this._first == null) { - return; - } else { - this.length--; - - if (typeof this.decr === "function") { - this.decr(); - } + shift() { + var _a; + if (this._first == null) { + return; + } + else { + this.length--; + (_a = this.decr) === null || _a === void 0 ? void 0 : _a.call(this); + } + const { value } = this._first; + if ((this._first = this._first.next) != null) { + this._first.prev = null; + } + else { + this._last = null; + } + return value; } - - value = this._first.value; - - if ((this._first = this._first.next) != null) { - this._first.prev = null; - } else { - this._last = null; + first() { + var _a; + return (_a = this._first) === null || _a === void 0 ? void 0 : _a.value; } - - return value; - } - - first() { - if (this._first != null) { - return this._first.value; + getArray() { + let node = this._first; + const result = []; + while (node != null) { + var ref; + result.push(((ref = node), (node = node.next), ref.value)); + } + return result; } - } - - getArray() { - var node, ref, results; - node = this._first; - results = []; - - while (node != null) { - results.push((ref = node, node = node.next, ref.value)); + forEachShift(cb) { + let node = this.shift(); + while (node != null) { + cb(node); + node = this.shift(); + } } - - return results; - } - - forEachShift(cb) { - var node; - node = this.shift(); - - while (node != null) { - cb(node), node = this.shift(); + debug() { + var _a, _b; + let node = this._first; + const result = []; + while (node != null) { + var ref; + result.push(((ref = node), + (node = node.next), + { + value: ref.value, + prev: (_a = ref.prev) === null || _a === void 0 ? void 0 : _a.value, + next: (_b = ref.next) === null || _b === void 0 ? void 0 : _b.value, + })); + } + return result; } - - return void 0; - } - - debug() { - var node, ref, ref1, ref2, results; - node = this._first; - results = []; - - while (node != null) { - results.push((ref = node, node = node.next, { - value: ref.value, - prev: (ref1 = ref.prev) != null ? ref1.value : void 0, - next: (ref2 = ref.next) != null ? ref2.value : void 0 - })); - } - - return results; - } - -}; -module.exports = DLList; \ No newline at end of file +} +module.exports = DLList; diff --git a/lib/Events.js b/lib/Events.js index e843257..aa867ee 100644 --- a/lib/Events.js +++ b/lib/Events.js @@ -1,128 +1,74 @@ -"use strict"; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -var Events; -Events = class Events { - constructor(instance) { - this.instance = instance; - this._events = {}; - - if (this.instance.on != null || this.instance.once != null || this.instance.removeAllListeners != null) { - throw new Error("An Emitter already exists for this object"); - } - - this.instance.on = (name, cb) => { - return this._addListener(name, "many", cb); - }; - - this.instance.once = (name, cb) => { - return this._addListener(name, "once", cb); - }; - - this.instance.removeAllListeners = (name = null) => { - if (name != null) { - return delete this._events[name]; - } else { - return this._events = {}; - } - }; - } - - _addListener(name, status, cb) { - var base; - - if ((base = this._events)[name] == null) { - base[name] = []; - } - - this._events[name].push({ - cb, - status +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - - return this.instance; - } - - listenerCount(name) { - if (this._events[name] != null) { - return this._events[name].length; - } else { - return 0; - } - } - - trigger(name, ...args) { - var _this = this; - - return _asyncToGenerator(function* () { - var e, promises; - - try { - if (name !== "debug") { - _this.trigger("debug", `Event triggered: ${name}`, args); - } - - if (_this._events[name] == null) { - return; +}; +class Events { + constructor(instance) { + this.instance = instance; + this._events = {}; + if (this.instance.on != null || + this.instance.once != null || + this.instance.removeAllListeners != null) { + throw new Error("An Emitter already exists for this object"); } - - _this._events[name] = _this._events[name].filter(function (listener) { - return listener.status !== "none"; - }); - promises = _this._events[name].map( - /*#__PURE__*/ - function () { - var _ref = _asyncToGenerator(function* (listener) { - var e, returned; - - if (listener.status === "none") { - return; + this.instance.on = (name, cb) => this._addListener(name, "many", cb); + this.instance.once = (name, cb) => this._addListener(name, "once", cb); + this.instance.removeAllListeners = (name = null) => { + if (name != null) { + delete this._events[name]; } - - if (listener.status === "once") { - listener.status = "none"; + else { + this._events = {}; } - + }; + } + _addListener(name, status, cb) { + var _a; + var _b; + (_a = (_b = this._events)[name]) !== null && _a !== void 0 ? _a : (_b[name] = []); + this._events[name].push({ cb, status }); + return this.instance; + } + listenerCount(name) { + var _a, _b; + return (_b = (_a = this._events[name]) === null || _a === void 0 ? void 0 : _a.length) !== null && _b !== void 0 ? _b : 0; + } + trigger(name, ...args) { + return __awaiter(this, void 0, void 0, function* () { try { - returned = typeof listener.cb === "function" ? listener.cb(...args) : void 0; - - if (typeof (returned != null ? returned.then : void 0) === "function") { - return yield returned; - } else { - return returned; - } - } catch (error) { - e = error; - - if ("name" !== "error") { - _this.trigger("error", e); - } - - return null; + if (name !== "debug") { + this.trigger("debug", `Event triggered: ${name}`, args); + } + if (this._events[name] == null) + return; + this._events[name] = this._events[name].filter((listener) => listener.status !== "none"); + const allEvents = yield Promise.all(this._events[name].map((listener) => __awaiter(this, void 0, void 0, function* () { + if (listener.status === "once") + listener.status = "none"; + try { + return typeof listener.cb === "function" ? listener.cb(...(args || [])) : undefined; + } + catch (e) { + if (name !== "error") + this.trigger("error", e); + return null; + } + }))); + return allEvents.find((x) => x != null); + } + catch (error) { + const e = error; + if (name !== "error") { + this.trigger("error", e); + } + return null; } - }); - - return function (_x) { - return _ref.apply(this, arguments); - }; - }()); - return (yield Promise.all(promises)).find(function (x) { - return x != null; }); - } catch (error) { - e = error; - - if ("name" !== "error") { - _this.trigger("error", e); - } - - return null; - } - })(); - } - -}; -module.exports = Events; \ No newline at end of file + } +} +module.exports = Events; diff --git a/lib/Group.js b/lib/Group.js index 39676a5..cc2ae44 100644 --- a/lib/Group.js +++ b/lib/Group.js @@ -1,198 +1,133 @@ -"use strict"; - -function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); } - -function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } - -function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } - -function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -var Events, Group, IORedisConnection, RedisConnection, Scripts, parser; -parser = require("./parser"); -Events = require("./Events"); -RedisConnection = require("./RedisConnection"); -IORedisConnection = require("./IORedisConnection"); -Scripts = require("./Scripts"); - -Group = function () { - class Group { - constructor(limiterOptions = {}) { - this.deleteKey = this.deleteKey.bind(this); - this.limiterOptions = limiterOptions; - parser.load(this.limiterOptions, this.defaults, this); - this.Events = new Events(this); - this.instances = {}; - this.Bottleneck = require("./Bottleneck"); - - this._startAutoCleanup(); - - this.sharedConnection = this.connection != null; - - if (this.connection == null) { - if (this.limiterOptions.datastore === "redis") { - this.connection = new RedisConnection(Object.assign({}, this.limiterOptions, { - Events: this.Events - })); - } else if (this.limiterOptions.datastore === "ioredis") { - this.connection = new IORedisConnection(Object.assign({}, this.limiterOptions, { - Events: this.Events - })); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +const parser = require("./parser"); +const Events = require("./Events"); +const RedisConnection = require("./RedisConnection"); +const IORedisConnection = require("./IORedisConnection"); +const Scripts = require("./Scripts"); +class Group { + constructor(limiterOptions) { + this.defaults = { + timeout: 1000 * 60 * 5, + connection: null, + id: "group-key", + }; + this.deleteKey = this.deleteKey.bind(this); + this.limiterOptions = limiterOptions !== null && limiterOptions !== void 0 ? limiterOptions : {}; + parser.load(this.limiterOptions, this.defaults, this); + this.Events = new Events(this); + this.instances = {}; + this._startAutoCleanup(); + this.sharedConnection = this.connection != null; + this.Bottleneck = require("./Bottleneck"); + if (this.connection == null) { + if (this.limiterOptions.datastore === "redis") { + this.connection = new RedisConnection(Object.assign({}, this.limiterOptions, { Events: this.Events })); + } + else if (this.limiterOptions.datastore === "ioredis") { + this.connection = new IORedisConnection(Object.assign({}, this.limiterOptions, { Events: this.Events })); + } } - } } - key(key = "") { - var ref; - return (ref = this.instances[key]) != null ? ref : (() => { - var limiter; - limiter = this.instances[key] = new this.Bottleneck(Object.assign(this.limiterOptions, { - id: `${this.id}-${key}`, - timeout: this.timeout, - connection: this.connection - })); - this.Events.trigger("created", limiter, key); + let limiter = this.instances[key]; + if (!limiter) { + limiter = new this.Bottleneck(Object.assign(this.limiterOptions, { + id: `${this.id}-${key}`, + timeout: this.timeout, + connection: this.connection, + })); + this.Events.trigger("created", limiter, key); + this.instances[key] = limiter; + } return limiter; - })(); } - - deleteKey(key = "") { - var _this = this; - - return _asyncToGenerator(function* () { - var deleted, instance; - instance = _this.instances[key]; - - if (_this.connection) { - deleted = yield _this.connection.__runCommand__(['del', ...Scripts.allKeys(`${_this.id}-${key}`)]); - } - - if (instance != null) { - delete _this.instances[key]; - yield instance.disconnect(); - } - - return instance != null || deleted > 0; - })(); + deleteKey() { + return __awaiter(this, arguments, void 0, function* (key = "") { + let deleted; + const instance = this.instances[key]; + if (this.connection) { + deleted = yield this.connection.__runCommand__([ + "del", + ...Scripts.allKeys(`${this.id}-${key}`), + ]); + } + if (instance != null) { + delete this.instances[key]; + yield instance.disconnect(); + } + return instance != null || deleted > 0; + }); } - limiters() { - var k, ref, results, v; - ref = this.instances; - results = []; - - for (k in ref) { - v = ref[k]; - results.push({ - key: k, - limiter: v - }); - } - - return results; + return Object.entries(this.instances).map(([key, limiter]) => ({ key, limiter })); } - keys() { - return Object.keys(this.instances); + return Object.keys(this.instances); } - clusterKeys() { - var _this2 = this; - - return _asyncToGenerator(function* () { - var cursor, end, found, i, k, keys, len, next, start; - - if (_this2.connection == null) { - return _this2.Promise.resolve(_this2.keys()); - } - - keys = []; - cursor = null; - start = `b_${_this2.id}-`.length; - end = "_settings".length; - - while (cursor !== 0) { - var _ref = yield _this2.connection.__runCommand__(["scan", cursor != null ? cursor : 0, "match", `b_${_this2.id}-*_settings`, "count", 10000]); - - var _ref2 = _slicedToArray(_ref, 2); - - next = _ref2[0]; - found = _ref2[1]; - cursor = ~~next; - - for (i = 0, len = found.length; i < len; i++) { - k = found[i]; - keys.push(k.slice(start, -end)); - } - } - - return keys; - })(); + return __awaiter(this, void 0, void 0, function* () { + if (this.connection == null) { + return Promise.resolve(this.keys()); + } + const keys = []; + let cursor = null; + const start = `b_${this.id}-`.length; + const end = "_settings".length; + while (cursor !== 0) { + const [next, found] = yield this.connection.__runCommand__([ + "scan", + cursor !== null && cursor !== void 0 ? cursor : 0, + "match", + `b_${this.id}-*_settings`, + "count", + 10000, + ]); + cursor = ~~next; + for (const k of found) { + keys.push(k.slice(start, -end)); + } + } + return keys; + }); } - _startAutoCleanup() { - var _this3 = this; - - var base; - clearInterval(this.interval); - return typeof (base = this.interval = setInterval( - /*#__PURE__*/ - _asyncToGenerator(function* () { - var e, k, ref, results, time, v; - time = Date.now(); - ref = _this3.instances; - results = []; - - for (k in ref) { - v = ref[k]; - - try { - if (yield v._store.__groupCheck__(time)) { - results.push(_this3.deleteKey(k)); - } else { - results.push(void 0); + var _a, _b; + clearInterval(this.interval); + this.interval = (_b = (_a = setInterval(() => __awaiter(this, void 0, void 0, function* () { + const time = Date.now(); + for (const [k, v] of Object.entries(this.instances)) { + try { + if (yield v._store.__groupCheck__(time)) { + this.deleteKey(k); + } + } + catch (e) { + v.Events.trigger("error", e); + } } - } catch (error) { - e = error; - results.push(v.Events.trigger("error", e)); - } - } - - return results; - }), this.timeout / 2)).unref === "function" ? base.unref() : void 0; + }), this.timeout / 2)).unref) === null || _b === void 0 ? void 0 : _b.call(_a); } - - updateSettings(options = {}) { - parser.overwrite(options, this.defaults, this); - parser.overwrite(options, options, this.limiterOptions); - - if (options.timeout != null) { - return this._startAutoCleanup(); - } + updateSettings(options) { + options !== null && options !== void 0 ? options : (options = {}); + parser.overwrite(options, this.defaults, this); + parser.overwrite(options, options, this.limiterOptions); + if (options.timeout != null) { + return this._startAutoCleanup(); + } } - disconnect(flush = true) { - var ref; - - if (!this.sharedConnection) { - return (ref = this.connection) != null ? ref.disconnect(flush) : void 0; - } + var _a; + if (!this.sharedConnection) { + return (_a = this.connection) === null || _a === void 0 ? void 0 : _a.disconnect(flush); + } } - - } - - ; - Group.prototype.defaults = { - timeout: 1000 * 60 * 5, - connection: null, - Promise: Promise, - id: "group-key" - }; - return Group; -}.call(void 0); - -module.exports = Group; \ No newline at end of file +} +module.exports = Group; diff --git a/lib/IORedisConnection.js b/lib/IORedisConnection.js index 52b28da..3389463 100644 --- a/lib/IORedisConnection.js +++ b/lib/IORedisConnection.js @@ -1,186 +1,124 @@ -"use strict"; - -function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); } - -function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } - -function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } - -function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -var Events, IORedisConnection, Scripts, parser; -parser = require("./parser"); -Events = require("./Events"); -Scripts = require("./Scripts"); - -IORedisConnection = function () { - class IORedisConnection { - constructor(options = {}) { - parser.load(options, this.defaults, this); - - if (this.Redis == null) { - this.Redis = eval("require")("ioredis"); // Obfuscated or else Webpack/Angular will try to inline the optional ioredis module. To override this behavior: pass the ioredis module to Bottleneck as the 'Redis' option. - } - - if (this.Events == null) { - this.Events = new Events(this); - } - - this.terminated = false; - - if (this.clusterNodes != null) { - this.client = new this.Redis.Cluster(this.clusterNodes, this.clientOptions); - this.subscriber = new this.Redis.Cluster(this.clusterNodes, this.clientOptions); - } else if (this.client != null && this.client.duplicate == null) { - this.subscriber = new this.Redis.Cluster(this.client.startupNodes, this.client.options); - } else { - if (this.client == null) { - this.client = new this.Redis(this.clientOptions); - } - - this.subscriber = this.client.duplicate(); - } - - this.limiters = {}; - this.ready = this.Promise.all([this._setup(this.client, false), this._setup(this.subscriber, true)]).then(() => { - this._loadScripts(); - - return { - client: this.client, - subscriber: this.subscriber +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +const parser = require("./parser"); +const Events = require("./Events"); +const Scripts = require("./Scripts"); +class IORedisConnection { + constructor(options) { + var _a, _b, _c; + this.datastore = "ioredis"; + this.defaults = { + Redis: null, + clientOptions: {}, + clusterNodes: null, + client: null, + Events: null, }; - }); + options !== null && options !== void 0 ? options : (options = {}); + parser.load(options, this.defaults, this); + // Obfuscated or else Webpack/Angular will try to inline the optional ioredis module. To override this behavior: pass the ioredis module to Bottleneck as the 'Redis' option. + (_a = this.Redis) !== null && _a !== void 0 ? _a : (this.Redis = eval("require")("ioredis")); + (_b = this.Events) !== null && _b !== void 0 ? _b : (this.Events = new Events(this)); + this.terminated = false; + if (this.clusterNodes != null) { + this.client = new this.Redis.Cluster(this.clusterNodes, this.clientOptions); + this.subscriber = new this.Redis.Cluster(this.clusterNodes, this.clientOptions); + } + else if (this.client != null && this.client.duplicate == null) { + this.subscriber = new this.Redis.Cluster(this.client.startupNodes, this.client.options); + } + else { + (_c = this.client) !== null && _c !== void 0 ? _c : (this.client = new this.Redis(this.clientOptions)); + this.subscriber = this.client.duplicate(); + } + this.limiters = {}; + this.ready = Promise.all([ + this._setup(this.client, false), + this._setup(this.subscriber, true), + ]).then(() => { + this._loadScripts(); + return { client: this.client, subscriber: this.subscriber }; + }); } - _setup(client, sub) { - client.setMaxListeners(0); - return new this.Promise((resolve, reject) => { - client.on("error", e => { - return this.Events.trigger("error", e); + client.setMaxListeners(0); + return new Promise((resolve) => { + client.on("error", (e) => this.Events.trigger("error", e)); + if (sub) { + client.on("message", (channel, message) => { + var _a; + (_a = this.limiters[channel]) === null || _a === void 0 ? void 0 : _a._store.onMessage(channel, message); + }); + } + if (client.status === "ready") { + resolve(); + } + else { + client.once("ready", resolve); + } }); - - if (sub) { - client.on("message", (channel, message) => { - var ref; - return (ref = this.limiters[channel]) != null ? ref._store.onMessage(channel, message) : void 0; - }); - } - - if (client.status === "ready") { - return resolve(); - } else { - return client.once("ready", resolve); - } - }); } - _loadScripts() { - return Scripts.names.forEach(name => { - return this.client.defineCommand(name, { - lua: Scripts.payload(name) - }); - }); + return Scripts.names.forEach((name) => this.client.defineCommand(name, { lua: Scripts.payload(name) })); } - __runCommand__(cmd) { - var _this = this; - - return _asyncToGenerator(function* () { - var _, deleted; - - yield _this.ready; - - var _ref = yield _this.client.pipeline([cmd]).exec(); - - var _ref2 = _slicedToArray(_ref, 1); - - var _ref2$ = _slicedToArray(_ref2[0], 2); - - _ = _ref2$[0]; - deleted = _ref2$[1]; - return deleted; - })(); + return __awaiter(this, void 0, void 0, function* () { + yield this.ready; + const [[, deleted]] = yield this.client.pipeline([cmd]).exec(); + return deleted; + }); } - __addLimiter__(instance) { - return this.Promise.all([instance.channel(), instance.channel_client()].map(channel => { - return new this.Promise((resolve, reject) => { - return this.subscriber.subscribe(channel, () => { - this.limiters[channel] = instance; - return resolve(); - }); + return __awaiter(this, void 0, void 0, function* () { + yield Promise.all([instance.channel(), instance.channel_client()].map((channel) => { + return new Promise((resolve) => { + this.subscriber.subscribe(channel, () => { + this.limiters[channel] = instance; + resolve(); + }); + }); + })); }); - })); } - __removeLimiter__(instance) { - var _this2 = this; - - return [instance.channel(), instance.channel_client()].forEach( - /*#__PURE__*/ - function () { - var _ref3 = _asyncToGenerator(function* (channel) { - if (!_this2.terminated) { - yield _this2.subscriber.unsubscribe(channel); - } - - return delete _this2.limiters[channel]; + return __awaiter(this, void 0, void 0, function* () { + yield Promise.all([instance.channel(), instance.channel_client()].map((channel) => __awaiter(this, void 0, void 0, function* () { + if (!this.terminated) { + yield this.subscriber.unsubscribe(channel); + } + delete this.limiters[channel]; + }))); }); - - return function (_x) { - return _ref3.apply(this, arguments); - }; - }()); } - __scriptArgs__(name, id, args, cb) { - var keys; - keys = Scripts.keys(name, id); - return [keys.length].concat(keys, args, cb); + const keys = Scripts.keys(name, id); + return [keys.length].concat(keys, args, cb); } - __scriptFn__(name) { - return this.client[name].bind(this.client); + return this.client[name].bind(this.client); } - - disconnect(flush = true) { - var i, k, len, ref; - ref = Object.keys(this.limiters); - - for (i = 0, len = ref.length; i < len; i++) { - k = ref[i]; - clearInterval(this.limiters[k]._store.heartbeat); - } - - this.limiters = {}; - this.terminated = true; - - if (flush) { - return this.Promise.all([this.client.quit(), this.subscriber.quit()]); - } else { - this.client.disconnect(); - this.subscriber.disconnect(); - return this.Promise.resolve(); - } + disconnect() { + return __awaiter(this, arguments, void 0, function* (flush = true) { + for (const v of Object.values(this.limiters)) { + clearInterval(v._store.heartbeat); + } + this.limiters = {}; + this.terminated = true; + if (flush) { + yield Promise.all([this.client.quit(), this.subscriber.quit()]); + } + else { + this.client.disconnect(); + this.subscriber.disconnect(); + } + }); } - - } - - ; - IORedisConnection.prototype.datastore = "ioredis"; - IORedisConnection.prototype.defaults = { - Redis: null, - clientOptions: {}, - clusterNodes: null, - client: null, - Promise: Promise, - Events: null - }; - return IORedisConnection; -}.call(void 0); - -module.exports = IORedisConnection; \ No newline at end of file +} +module.exports = IORedisConnection; diff --git a/lib/Job.js b/lib/Job.js index 09ff6ca..eeffbc4 100644 --- a/lib/Job.js +++ b/lib/Job.js @@ -1,215 +1,156 @@ -"use strict"; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -var BottleneckError, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser; -NUM_PRIORITIES = 10; -DEFAULT_PRIORITY = 5; -parser = require("./parser"); -BottleneckError = require("./BottleneckError"); -Job = class Job { - constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states, Promise) { - this.task = task; - this.args = args; - this.rejectOnDrop = rejectOnDrop; - this.Events = Events; - this._states = _states; - this.Promise = Promise; - this.options = parser.load(options, jobDefaults); - this.options.priority = this._sanitizePriority(this.options.priority); - - if (this.options.id === jobDefaults.id) { - this.options.id = `${this.options.id}-${this._randomIndex()}`; - } - - this.promise = new this.Promise((_resolve, _reject) => { - this._resolve = _resolve; - this._reject = _reject; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - this.retryCount = 0; - } - - _sanitizePriority(priority) { - var sProperty; - sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; - - if (sProperty < 0) { - return 0; - } else if (sProperty > NUM_PRIORITIES - 1) { - return NUM_PRIORITIES - 1; - } else { - return sProperty; +}; +const NUM_PRIORITIES = 10; +const DEFAULT_PRIORITY = 5; +const parser = require("./parser"); +const BottleneckError = require("./BottleneckError"); +class Job { + constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states) { + this.task = task; + this.args = args; + this.rejectOnDrop = rejectOnDrop; + this.Events = Events; + this._states = _states; + this.options = parser.load(options, jobDefaults); + this.options.priority = this._sanitizePriority(this.options.priority); + if (this.options.id === jobDefaults.id) { + this.options.id = `${this.options.id}-${this._randomIndex()}`; + } + this.promise = new Promise((_resolve, _reject) => { + this._resolve = _resolve; + this._reject = _reject; + }); + this.retryCount = 0; } - } - - _randomIndex() { - return Math.random().toString(36).slice(2); - } - - doDrop({ - error, - message = "This job has been dropped by Bottleneck" - } = {}) { - if (this._states.remove(this.options.id)) { - if (this.rejectOnDrop) { - this._reject(error != null ? error : new BottleneckError(message)); - } - - this.Events.trigger("dropped", { - args: this.args, - options: this.options, - task: this.task, - promise: this.promise - }); - return true; - } else { - return false; + _sanitizePriority(priority) { + const sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; + if (sProperty < 0) { + return 0; + } + else if (sProperty > NUM_PRIORITIES - 1) { + return NUM_PRIORITIES - 1; + } + else { + return sProperty; + } } - } - - _assertStatus(expected) { - var status; - status = this._states.jobStatus(this.options.id); - - if (!(status === expected || expected === "DONE" && status === null)) { - throw new BottleneckError(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`); + _randomIndex() { + return Math.random().toString(36).slice(2); } - } - - doReceive() { - this._states.start(this.options.id); - - return this.Events.trigger("received", { - args: this.args, - options: this.options - }); - } - - doQueue(reachedHWM, blocked) { - this._assertStatus("RECEIVED"); - - this._states.next(this.options.id); - - return this.Events.trigger("queued", { - args: this.args, - options: this.options, - reachedHWM, - blocked - }); - } - - doRun() { - if (this.retryCount === 0) { - this._assertStatus("QUEUED"); - - this._states.next(this.options.id); - } else { - this._assertStatus("EXECUTING"); + doDrop(params) { + const { error, message = "This job has been dropped by Bottleneck" } = params || {}; + if (this._states.remove(this.options.id)) { + if (this.rejectOnDrop) { + this._reject(error !== null && error !== void 0 ? error : new BottleneckError(message)); + } + this.Events.trigger("dropped", { + args: this.args, + options: this.options, + task: this.task, + promise: this.promise, + }); + return true; + } + else { + return false; + } } - - return this.Events.trigger("scheduled", { - args: this.args, - options: this.options - }); - } - - doExecute(chained, clearGlobalState, run, free) { - var _this = this; - - return _asyncToGenerator(function* () { - var error, eventInfo, passed; - - if (_this.retryCount === 0) { - _this._assertStatus("RUNNING"); - - _this._states.next(_this.options.id); - } else { - _this._assertStatus("EXECUTING"); - } - - eventInfo = { - args: _this.args, - options: _this.options, - retryCount: _this.retryCount - }; - - _this.Events.trigger("executing", eventInfo); - - try { - passed = yield chained != null ? chained.schedule(_this.options, _this.task, ..._this.args) : _this.task(..._this.args); - - if (clearGlobalState()) { - _this.doDone(eventInfo); - - yield free(_this.options, eventInfo); - - _this._assertStatus("DONE"); - - return _this._resolve(passed); + _assertStatus(expected) { + const status = this._states.jobStatus(this.options.id); + if (!(status === expected || (expected === "DONE" && status === null))) { + throw new BottleneckError(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`); } - } catch (error1) { - error = error1; - return _this._onFailure(error, eventInfo, clearGlobalState, run, free); - } - })(); - } - - doExpire(clearGlobalState, run, free) { - var error, eventInfo; - - if (this._states.jobStatus(this.options.id === "RUNNING")) { - this._states.next(this.options.id); } - - this._assertStatus("EXECUTING"); - - eventInfo = { - args: this.args, - options: this.options, - retryCount: this.retryCount - }; - error = new BottleneckError(`This job timed out after ${this.options.expiration} ms.`); - return this._onFailure(error, eventInfo, clearGlobalState, run, free); - } - - _onFailure(error, eventInfo, clearGlobalState, run, free) { - var _this2 = this; - - return _asyncToGenerator(function* () { - var retry, retryAfter; - - if (clearGlobalState()) { - retry = yield _this2.Events.trigger("failed", error, eventInfo); - - if (retry != null) { - retryAfter = ~~retry; - - _this2.Events.trigger("retry", `Retrying ${_this2.options.id} after ${retryAfter} ms`, eventInfo); - - _this2.retryCount++; - return run(retryAfter); - } else { - _this2.doDone(eventInfo); - - yield free(_this2.options, eventInfo); - - _this2._assertStatus("DONE"); - - return _this2._reject(error); + doReceive() { + this._states.start(this.options.id); + return this.Events.trigger("received", { args: this.args, options: this.options }); + } + doQueue(reachedHWM, blocked) { + this._assertStatus("RECEIVED"); + this._states.next(this.options.id); + return this.Events.trigger("queued", { + args: this.args, + options: this.options, + reachedHWM, + blocked, + }); + } + doRun() { + if (this.retryCount === 0) { + this._assertStatus("QUEUED"); + this._states.next(this.options.id); } - } - })(); - } - - doDone(eventInfo) { - this._assertStatus("EXECUTING"); - - this._states.next(this.options.id); - - return this.Events.trigger("done", eventInfo); - } - -}; -module.exports = Job; \ No newline at end of file + else { + this._assertStatus("EXECUTING"); + } + return this.Events.trigger("scheduled", { args: this.args, options: this.options }); + } + doExecute(chained, clearGlobalState, run, free) { + return __awaiter(this, void 0, void 0, function* () { + if (this.retryCount === 0) { + this._assertStatus("RUNNING"); + this._states.next(this.options.id); + } + else { + this._assertStatus("EXECUTING"); + } + const eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; + this.Events.trigger("executing", eventInfo); + try { + const passed = yield (chained != null + ? chained.schedule(this.options, this.task, ...this.args) + : this.task(...(this.args || []))); + if (clearGlobalState()) { + this.doDone(eventInfo); + yield free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._resolve(passed); + } + } + catch (error) { + return this._onFailure(error, eventInfo, clearGlobalState, run, free); + } + }); + } + doExpire(clearGlobalState, run, free) { + if (this._states.jobStatus(this.options.id === "RUNNING")) { + this._states.next(this.options.id); + } + this._assertStatus("EXECUTING"); + const eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; + const error = new BottleneckError(`This job timed out after ${this.options.expiration} ms.`); + return this._onFailure(error, eventInfo, clearGlobalState, run, free); + } + _onFailure(error, eventInfo, clearGlobalState, run, free) { + return __awaiter(this, void 0, void 0, function* () { + if (clearGlobalState()) { + const retry = yield this.Events.trigger("failed", error, eventInfo); + if (retry != null) { + const retryAfter = ~~retry; + this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo); + this.retryCount++; + return run(retryAfter); + } + else { + this.doDone(eventInfo); + yield free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._reject(error); + } + } + }); + } + doDone(eventInfo) { + this._assertStatus("EXECUTING"); + this._states.next(this.options.id); + return this.Events.trigger("done", eventInfo); + } +} +module.exports = Job; diff --git a/lib/LocalDatastore.js b/lib/LocalDatastore.js index f5bfc7c..02bf2f2 100644 --- a/lib/LocalDatastore.js +++ b/lib/LocalDatastore.js @@ -1,289 +1,205 @@ -"use strict"; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -var BottleneckError, LocalDatastore, parser; -parser = require("./parser"); -BottleneckError = require("./BottleneckError"); -LocalDatastore = class LocalDatastore { - constructor(instance, storeOptions, storeInstanceOptions) { - this.instance = instance; - this.storeOptions = storeOptions; - this.clientId = this.instance._randomIndex(); - parser.load(storeInstanceOptions, storeInstanceOptions, this); - this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); - this._running = 0; - this._done = 0; - this._unblockTime = 0; - this.ready = this.Promise.resolve(); - this.clients = {}; - - this._startHeartbeat(); - } - - _startHeartbeat() { - var base; - - if (this.heartbeat != null) { - clearInterval(this.heartbeat); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +const parser = require("./parser"); +const BottleneckError = require("./BottleneckError"); +class LocalDatastore { + constructor(instance, storeOptions, storeInstanceOptions) { + this.instance = instance; + this.storeOptions = storeOptions; + this.clientId = this.instance._randomIndex(); + parser.load(storeInstanceOptions, storeInstanceOptions, this); + this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); + this._running = 0; + this._done = 0; + this._unblockTime = 0; + this.ready = Promise.resolve(); + this.clients = {}; + this._startHeartbeat(); } - - if (this.storeOptions.reservoirRefreshInterval != null && this.storeOptions.reservoirRefreshAmount != null || this.storeOptions.reservoirIncreaseInterval != null && this.storeOptions.reservoirIncreaseAmount != null) { - return typeof (base = this.heartbeat = setInterval(() => { - var amount, incr, maximum, now, reservoir; - now = Date.now(); - - if (this.storeOptions.reservoirRefreshInterval != null && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) { - this._lastReservoirRefresh = now; - this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount; - - this.instance._drainAll(this.computeCapacity()); + _startHeartbeat() { + var _a, _b; + if (this.heartbeat) { + clearInterval(this.heartbeat); } - - if (this.storeOptions.reservoirIncreaseInterval != null && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) { - var _this$storeOptions = this.storeOptions; - amount = _this$storeOptions.reservoirIncreaseAmount; - maximum = _this$storeOptions.reservoirIncreaseMaximum; - reservoir = _this$storeOptions.reservoir; - this._lastReservoirIncrease = now; - incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; - - if (incr > 0) { - this.storeOptions.reservoir += incr; - return this.instance._drainAll(this.computeCapacity()); - } + if ((this.storeOptions.reservoirRefreshInterval != null && + this.storeOptions.reservoirRefreshAmount != null) || + (this.storeOptions.reservoirIncreaseInterval != null && + this.storeOptions.reservoirIncreaseAmount != null)) { + this.heartbeat = (_b = (_a = setInterval(() => { + const now = Date.now(); + if (this.storeOptions.reservoirRefreshInterval != null && + now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) { + this._lastReservoirRefresh = now; + this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount; + this.instance._drainAll(this.computeCapacity()); + } + if (this.storeOptions.reservoirIncreaseInterval != null && + now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) { + const { reservoirIncreaseAmount: amount, reservoirIncreaseMaximum: maximum, reservoir, } = this.storeOptions; + this._lastReservoirIncrease = now; + const incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; + if (incr > 0) { + this.storeOptions.reservoir += incr; + return this.instance._drainAll(this.computeCapacity()); + } + } + }, this.heartbeatInterval)).unref) === null || _b === void 0 ? void 0 : _b.call(_a); } - }, this.heartbeatInterval)).unref === "function" ? base.unref() : void 0; } - } - - __publish__(message) { - var _this = this; - - return _asyncToGenerator(function* () { - yield _this.yieldLoop(); - return _this.instance.Events.trigger("message", message.toString()); - })(); - } - - __disconnect__(flush) { - var _this2 = this; - - return _asyncToGenerator(function* () { - yield _this2.yieldLoop(); - clearInterval(_this2.heartbeat); - return _this2.Promise.resolve(); - })(); - } - - yieldLoop(t = 0) { - return new this.Promise(function (resolve, reject) { - return setTimeout(resolve, t); - }); - } - - computePenalty() { - var ref; - return (ref = this.storeOptions.penalty) != null ? ref : 15 * this.storeOptions.minTime || 5000; - } - - __updateSettings__(options) { - var _this3 = this; - - return _asyncToGenerator(function* () { - yield _this3.yieldLoop(); - parser.overwrite(options, options, _this3.storeOptions); - - _this3._startHeartbeat(); - - _this3.instance._drainAll(_this3.computeCapacity()); - - return true; - })(); - } - - __running__() { - var _this4 = this; - - return _asyncToGenerator(function* () { - yield _this4.yieldLoop(); - return _this4._running; - })(); - } - - __queued__() { - var _this5 = this; - - return _asyncToGenerator(function* () { - yield _this5.yieldLoop(); - return _this5.instance.queued(); - })(); - } - - __done__() { - var _this6 = this; - - return _asyncToGenerator(function* () { - yield _this6.yieldLoop(); - return _this6._done; - })(); - } - - __groupCheck__(time) { - var _this7 = this; - - return _asyncToGenerator(function* () { - yield _this7.yieldLoop(); - return _this7._nextRequest + _this7.timeout < time; - })(); - } - - computeCapacity() { - var maxConcurrent, reservoir; - var _this$storeOptions2 = this.storeOptions; - maxConcurrent = _this$storeOptions2.maxConcurrent; - reservoir = _this$storeOptions2.reservoir; - - if (maxConcurrent != null && reservoir != null) { - return Math.min(maxConcurrent - this._running, reservoir); - } else if (maxConcurrent != null) { - return maxConcurrent - this._running; - } else if (reservoir != null) { - return reservoir; - } else { - return null; + __publish__(message) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this.instance.Events.trigger("message", message.toString()); + }); + } + __disconnect__() { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + clearInterval(this.heartbeat); + }); + } + yieldLoop(t) { + return new Promise((resolve) => setTimeout(resolve, t !== null && t !== void 0 ? t : 0)); + } + computePenalty() { + return this.storeOptions.penalty != null + ? this.storeOptions.penalty + : 15 * this.storeOptions.minTime || 5000; + } + __updateSettings__(options) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + parser.overwrite(options, options, this.storeOptions); + this._startHeartbeat(); + this.instance._drainAll(this.computeCapacity()); + return true; + }); } - } - - conditionsCheck(weight) { - var capacity; - capacity = this.computeCapacity(); - return capacity == null || weight <= capacity; - } - - __incrementReservoir__(incr) { - var _this8 = this; - - return _asyncToGenerator(function* () { - var reservoir; - yield _this8.yieldLoop(); - reservoir = _this8.storeOptions.reservoir += incr; - - _this8.instance._drainAll(_this8.computeCapacity()); - - return reservoir; - })(); - } - - __currentReservoir__() { - var _this9 = this; - - return _asyncToGenerator(function* () { - yield _this9.yieldLoop(); - return _this9.storeOptions.reservoir; - })(); - } - - isBlocked(now) { - return this._unblockTime >= now; - } - - check(weight, now) { - return this.conditionsCheck(weight) && this._nextRequest - now <= 0; - } - - __check__(weight) { - var _this10 = this; - - return _asyncToGenerator(function* () { - var now; - yield _this10.yieldLoop(); - now = Date.now(); - return _this10.check(weight, now); - })(); - } - - __register__(index, weight, expiration) { - var _this11 = this; - - return _asyncToGenerator(function* () { - var now, wait; - yield _this11.yieldLoop(); - now = Date.now(); - - if (_this11.conditionsCheck(weight)) { - _this11._running += weight; - - if (_this11.storeOptions.reservoir != null) { - _this11.storeOptions.reservoir -= weight; + __running__() { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this._running; + }); + } + __queued__() { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this.instance.queued(); + }); + } + __done__() { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this._done; + }); + } + __groupCheck__(time) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this._nextRequest + this.timeout < time; + }); + } + computeCapacity() { + const { maxConcurrent, reservoir } = this.storeOptions; + if (maxConcurrent != null && reservoir != null) { + return Math.min(maxConcurrent - this._running, reservoir); } - - wait = Math.max(_this11._nextRequest - now, 0); - _this11._nextRequest = now + wait + _this11.storeOptions.minTime; - return { - success: true, - wait, - reservoir: _this11.storeOptions.reservoir - }; - } else { - return { - success: false - }; - } - })(); - } - - strategyIsBlock() { - return this.storeOptions.strategy === 3; - } - - __submit__(queueLength, weight) { - var _this12 = this; - - return _asyncToGenerator(function* () { - var blocked, now, reachedHWM; - yield _this12.yieldLoop(); - - if (_this12.storeOptions.maxConcurrent != null && weight > _this12.storeOptions.maxConcurrent) { - throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${_this12.storeOptions.maxConcurrent}`); - } - - now = Date.now(); - reachedHWM = _this12.storeOptions.highWater != null && queueLength === _this12.storeOptions.highWater && !_this12.check(weight, now); - blocked = _this12.strategyIsBlock() && (reachedHWM || _this12.isBlocked(now)); - - if (blocked) { - _this12._unblockTime = now + _this12.computePenalty(); - _this12._nextRequest = _this12._unblockTime + _this12.storeOptions.minTime; - - _this12.instance._dropAllQueued(); - } - - return { - reachedHWM, - blocked, - strategy: _this12.storeOptions.strategy - }; - })(); - } - - __free__(index, weight) { - var _this13 = this; - - return _asyncToGenerator(function* () { - yield _this13.yieldLoop(); - _this13._running -= weight; - _this13._done += weight; - - _this13.instance._drainAll(_this13.computeCapacity()); - - return { - running: _this13._running - }; - })(); - } - -}; -module.exports = LocalDatastore; \ No newline at end of file + else if (maxConcurrent != null) { + return maxConcurrent - this._running; + } + else if (reservoir != null) { + return reservoir; + } + else { + return null; + } + } + conditionsCheck(weight) { + const capacity = this.computeCapacity(); + return capacity == null || weight <= capacity; + } + __incrementReservoir__(incr) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + const reservoir = (this.storeOptions.reservoir += incr); + this.instance._drainAll(this.computeCapacity()); + return reservoir; + }); + } + __currentReservoir__() { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this.storeOptions.reservoir; + }); + } + isBlocked(now) { + return this._unblockTime >= now; + } + check(weight, now) { + return this.conditionsCheck(weight) && this._nextRequest - now <= 0; + } + __check__(weight) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + const now = Date.now(); + return this.check(weight, now); + }); + } + __register__(index, weight, _expiration) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + const now = Date.now(); + if (this.conditionsCheck(weight)) { + this._running += weight; + if (this.storeOptions.reservoir != null) { + this.storeOptions.reservoir -= weight; + } + const wait = Math.max(this._nextRequest - now, 0); + this._nextRequest = now + wait + this.storeOptions.minTime; + return { success: true, wait, reservoir: this.storeOptions.reservoir }; + } + else { + return { success: false }; + } + }); + } + strategyIsBlock() { + return this.storeOptions.strategy === 3; + } + __submit__(queueLength, weight) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + if (this.storeOptions.maxConcurrent != null && weight > this.storeOptions.maxConcurrent) { + throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`); + } + const now = Date.now(); + const reachedHWM = this.storeOptions.highWater != null && + queueLength === this.storeOptions.highWater && + !this.check(weight, now); + const blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now)); + if (blocked) { + this._unblockTime = now + this.computePenalty(); + this._nextRequest = this._unblockTime + this.storeOptions.minTime; + this.instance._dropAllQueued(); + } + return { reachedHWM, blocked, strategy: this.storeOptions.strategy }; + }); + } + __free__(index, weight) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + this._running -= weight; + this._done += weight; + this.instance._drainAll(this.computeCapacity()); + return { running: this._running }; + }); + } +} +module.exports = LocalDatastore; diff --git a/lib/Queues.js b/lib/Queues.js index 1e4129a..50e01af 100644 --- a/lib/Queues.js +++ b/lib/Queues.js @@ -1,77 +1,48 @@ -"use strict"; - -var DLList, Events, Queues; -DLList = require("./DLList"); -Events = require("./Events"); -Queues = class Queues { - constructor(num_priorities) { - var i; - this.Events = new Events(this); - this._length = 0; - - this._lists = function () { - var j, ref, results; - results = []; - - for (i = j = 1, ref = num_priorities; 1 <= ref ? j <= ref : j >= ref; i = 1 <= ref ? ++j : --j) { - results.push(new DLList(() => { - return this.incr(); - }, () => { - return this.decr(); - })); - } - - return results; - }.call(this); - } - - incr() { - if (this._length++ === 0) { - return this.Events.trigger("leftzero"); +const DLList = require("./DLList"); +const Events = require("./Events"); +class Queues { + constructor(num_priorities) { + this.Events = new Events(this); + this._length = 0; + this._lists = []; + for (let i = 0; i < num_priorities; i++) { + const list = new DLList(() => this.incr(), () => this.decr()); + this._lists.push(list); + } } - } - - decr() { - if (--this._length === 0) { - return this.Events.trigger("zero"); + incr() { + if (this._length++ === 0) { + return this.Events.trigger("leftzero"); + } } - } - - push(job) { - return this._lists[job.options.priority].push(job); - } - - queued(priority) { - if (priority != null) { - return this._lists[priority].length; - } else { - return this._length; + decr() { + if (--this._length === 0) { + return this.Events.trigger("zero"); + } } - } - - shiftAll(fn) { - return this._lists.forEach(function (list) { - return list.forEachShift(fn); - }); - } - - getFirst(arr = this._lists) { - var j, len, list; - - for (j = 0, len = arr.length; j < len; j++) { - list = arr[j]; - - if (list.length > 0) { - return list; - } + push(job) { + return this._lists[job.options.priority].push(job); } - - return []; - } - - shiftLastFrom(priority) { - return this.getFirst(this._lists.slice(priority).reverse()).shift(); - } - -}; -module.exports = Queues; \ No newline at end of file + queued(priority) { + if (priority != null) { + return this._lists[priority].length; + } + else { + return this._length; + } + } + shiftAll(fn) { + return this._lists.forEach((list) => list.forEachShift(fn)); + } + getFirst(arr) { + for (const list of arr !== null && arr !== void 0 ? arr : this._lists) { + if (list.length > 0) + return list; + } + return []; + } + shiftLastFrom(priority) { + return this.getFirst(this._lists.slice(priority).reverse()).shift(); + } +} +module.exports = Queues; diff --git a/lib/RedisConnection.js b/lib/RedisConnection.js index b110704..023d81f 100644 --- a/lib/RedisConnection.js +++ b/lib/RedisConnection.js @@ -1,193 +1,140 @@ -"use strict"; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -var Events, RedisConnection, Scripts, parser; -parser = require("./parser"); -Events = require("./Events"); -Scripts = require("./Scripts"); - -RedisConnection = function () { - class RedisConnection { - constructor(options = {}) { - parser.load(options, this.defaults, this); - - if (this.Redis == null) { - this.Redis = eval("require")("redis"); // Obfuscated or else Webpack/Angular will try to inline the optional redis module. To override this behavior: pass the redis module to Bottleneck as the 'Redis' option. - } - - if (this.Events == null) { - this.Events = new Events(this); - } - - this.terminated = false; - - if (this.client == null) { - this.client = this.Redis.createClient(this.clientOptions); - } - - this.subscriber = this.client.duplicate(); - this.limiters = {}; - this.shas = {}; - this.ready = this.Promise.all([this._setup(this.client, false), this._setup(this.subscriber, true)]).then(() => { - return this._loadScripts(); - }).then(() => { - return { - client: this.client, - subscriber: this.subscriber +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +const parser = require("./parser"); +const Events = require("./Events"); +const Scripts = require("./Scripts"); +class RedisConnection { + constructor(options) { + var _a, _b, _c; + this.defaults = { + Redis: null, + clientOptions: {}, + client: null, + Events: null, }; - }); + this.datastore = "redis"; + options !== null && options !== void 0 ? options : (options = {}); + parser.load(options, this.defaults, this); + // Obfuscated or else Webpack/Angular will try to inline the optional redis module. To override this behavior: pass the redis module to Bottleneck as the 'Redis' option. + (_a = this.Redis) !== null && _a !== void 0 ? _a : (this.Redis = eval("require")("redis")); + (_b = this.Events) !== null && _b !== void 0 ? _b : (this.Events = new Events(this)); + this.terminated = false; + (_c = this.client) !== null && _c !== void 0 ? _c : (this.client = this.Redis.createClient(this.clientOptions)); + this.subscriber = this.client.duplicate(); + this.limiters = {}; + this.shas = {}; + this.ready = Promise.all([this._setup(this.client, false), this._setup(this.subscriber, true)]) + .then(() => this._loadScripts()) + .then(() => ({ client: this.client, subscriber: this.subscriber })); } - _setup(client, sub) { - client.setMaxListeners(0); - return new this.Promise((resolve, reject) => { - client.on("error", e => { - return this.Events.trigger("error", e); + client.setMaxListeners(0); + return new Promise((resolve) => { + client.on("error", (e) => this.Events.trigger("error", e)); + if (sub) { + client.on("message", (channel, message) => { + var _a; + (_a = this.limiters[channel]) === null || _a === void 0 ? void 0 : _a._store.onMessage(channel, message); + }); + } + if (client.ready) { + resolve(); + } + else { + client.once("ready", resolve); + } }); - - if (sub) { - client.on("message", (channel, message) => { - var ref; - return (ref = this.limiters[channel]) != null ? ref._store.onMessage(channel, message) : void 0; - }); - } - - if (client.ready) { - return resolve(); - } else { - return client.once("ready", resolve); - } - }); } - _loadScript(name) { - return new this.Promise((resolve, reject) => { - var payload; - payload = Scripts.payload(name); - return this.client.multi([["script", "load", payload]]).exec((err, replies) => { - if (err != null) { - return reject(err); - } - - this.shas[name] = replies[0]; - return resolve(replies[0]); + return new Promise((resolve, reject) => { + const payload = Scripts.payload(name); + this.client.multi([["script", "load", payload]]).exec((err, replies) => { + if (err != null) { + reject(err); + } + this.shas[name] = replies[0]; + resolve(replies[0]); + }); }); - }); } - _loadScripts() { - return this.Promise.all(Scripts.names.map(k => { - return this._loadScript(k); - })); + return Promise.all(Scripts.names.map((k) => this._loadScript(k))); } - __runCommand__(cmd) { - var _this = this; - - return _asyncToGenerator(function* () { - yield _this.ready; - return new _this.Promise((resolve, reject) => { - return _this.client.multi([cmd]).exec_atomic(function (err, replies) { - if (err != null) { - return reject(err); - } else { - return resolve(replies[0]); - } - }); + return __awaiter(this, void 0, void 0, function* () { + yield this.ready; + return new Promise((resolve, reject) => { + this.client.multi([cmd]).exec_atomic(function (err, replies) { + if (err != null) { + reject(err); + } + else { + resolve(replies[0]); + } + }); + }); }); - })(); } - __addLimiter__(instance) { - return this.Promise.all([instance.channel(), instance.channel_client()].map(channel => { - return new this.Promise((resolve, reject) => { - var handler; - - handler = chan => { - if (chan === channel) { - this.subscriber.removeListener("subscribe", handler); - this.limiters[channel] = instance; - return resolve(); - } - }; - - this.subscriber.on("subscribe", handler); - return this.subscriber.subscribe(channel); + return __awaiter(this, void 0, void 0, function* () { + yield Promise.all([instance.channel(), instance.channel_client()].map((channel) => { + return new Promise((resolve) => { + var handler = (chan) => { + if (chan === channel) { + this.subscriber.removeListener("subscribe", handler); + this.limiters[channel] = instance; + resolve(); + } + }; + this.subscriber.on("subscribe", handler); + this.subscriber.subscribe(channel); + }); + })); }); - })); } - __removeLimiter__(instance) { - var _this2 = this; - - return this.Promise.all([instance.channel(), instance.channel_client()].map( - /*#__PURE__*/ - function () { - var _ref = _asyncToGenerator(function* (channel) { - if (!_this2.terminated) { - yield new _this2.Promise((resolve, reject) => { - return _this2.subscriber.unsubscribe(channel, function (err, chan) { - if (err != null) { - return reject(err); + return __awaiter(this, void 0, void 0, function* () { + yield Promise.all([instance.channel(), instance.channel_client()].map((channel) => __awaiter(this, void 0, void 0, function* () { + if (!this.terminated) { + yield new Promise((resolve, reject) => { + return this.subscriber.unsubscribe(channel, function (err, chan) { + if (err != null) { + return reject(err); + } + if (chan === channel) { + return resolve(); + } + }); + }); } - - if (chan === channel) { - return resolve(); - } - }); - }); - } - - return delete _this2.limiters[channel]; + delete this.limiters[channel]; + }))); }); - - return function (_x) { - return _ref.apply(this, arguments); - }; - }())); } - __scriptArgs__(name, id, args, cb) { - var keys; - keys = Scripts.keys(name, id); - return [this.shas[name], keys.length].concat(keys, args, cb); + const keys = Scripts.keys(name, id); + return [this.shas[name], keys.length].concat(keys, args, cb); } - - __scriptFn__(name) { - return this.client.evalsha.bind(this.client); + __scriptFn__() { + return this.client.evalsha.bind(this.client); } - - disconnect(flush = true) { - var i, k, len, ref; - ref = Object.keys(this.limiters); - - for (i = 0, len = ref.length; i < len; i++) { - k = ref[i]; - clearInterval(this.limiters[k]._store.heartbeat); - } - - this.limiters = {}; - this.terminated = true; - this.client.end(flush); - this.subscriber.end(flush); - return this.Promise.resolve(); + disconnect() { + return __awaiter(this, arguments, void 0, function* (flush = true) { + for (const v of Object.values(this.limiters)) { + clearInterval(v._store.heartbeat); + } + this.limiters = {}; + this.terminated = true; + this.client.end(flush); + this.subscriber.end(flush); + }); } - - } - - ; - RedisConnection.prototype.datastore = "redis"; - RedisConnection.prototype.defaults = { - Redis: null, - clientOptions: {}, - client: null, - Promise: Promise, - Events: null - }; - return RedisConnection; -}.call(void 0); - -module.exports = RedisConnection; \ No newline at end of file +} +module.exports = RedisConnection; diff --git a/lib/RedisDatastore.js b/lib/RedisDatastore.js index e260531..1b1ab17 100644 --- a/lib/RedisDatastore.js +++ b/lib/RedisDatastore.js @@ -1,352 +1,252 @@ -"use strict"; - -function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest(); } - -function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } - -function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } - -function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; } - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -var BottleneckError, IORedisConnection, RedisConnection, RedisDatastore, parser; -parser = require("./parser"); -BottleneckError = require("./BottleneckError"); -RedisConnection = require("./RedisConnection"); -IORedisConnection = require("./IORedisConnection"); -RedisDatastore = class RedisDatastore { - constructor(instance, storeOptions, storeInstanceOptions) { - this.instance = instance; - this.storeOptions = storeOptions; - this.originalId = this.instance.id; - this.clientId = this.instance._randomIndex(); - parser.load(storeInstanceOptions, storeInstanceOptions, this); - this.clients = {}; - this.capacityPriorityCounters = {}; - this.sharedConnection = this.connection != null; - - if (this.connection == null) { - this.connection = this.instance.datastore === "redis" ? new RedisConnection({ - Redis: this.Redis, - clientOptions: this.clientOptions, - Promise: this.Promise, - Events: this.instance.Events - }) : this.instance.datastore === "ioredis" ? new IORedisConnection({ - Redis: this.Redis, - clientOptions: this.clientOptions, - clusterNodes: this.clusterNodes, - Promise: this.Promise, - Events: this.instance.Events - }) : void 0; - } - - this.instance.connection = this.connection; - this.instance.datastore = this.connection.datastore; - this.ready = this.connection.ready.then(clients => { - this.clients = clients; - return this.runScript("init", this.prepareInitSettings(this.clearDatastore)); - }).then(() => { - return this.connection.__addLimiter__(this.instance); - }).then(() => { - return this.runScript("register_client", [this.instance.queued()]); - }).then(() => { - var base; - - if (typeof (base = this.heartbeat = setInterval(() => { - return this.runScript("heartbeat", []).catch(e => { - return this.instance.Events.trigger("error", e); - }); - }, this.heartbeatInterval)).unref === "function") { - base.unref(); - } - - return this.clients; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - } - - __publish__(message) { - var _this = this; - - return _asyncToGenerator(function* () { - var client; - - var _ref = yield _this.ready; - - client = _ref.client; - return client.publish(_this.instance.channel(), `message:${message.toString()}`); - })(); - } - - onMessage(channel, message) { - var _this2 = this; - - return _asyncToGenerator(function* () { - var capacity, counter, data, drained, e, newCapacity, pos, priorityClient, rawCapacity, type; - - try { - pos = message.indexOf(":"); - var _ref2 = [message.slice(0, pos), message.slice(pos + 1)]; - type = _ref2[0]; - data = _ref2[1]; - - if (type === "capacity") { - return yield _this2.instance._drainAll(data.length > 0 ? ~~data : void 0); - } else if (type === "capacity-priority") { - var _data$split = data.split(":"); - - var _data$split2 = _slicedToArray(_data$split, 3); - - rawCapacity = _data$split2[0]; - priorityClient = _data$split2[1]; - counter = _data$split2[2]; - capacity = rawCapacity.length > 0 ? ~~rawCapacity : void 0; - - if (priorityClient === _this2.clientId) { - drained = yield _this2.instance._drainAll(capacity); - newCapacity = capacity != null ? capacity - (drained || 0) : ""; - return yield _this2.clients.client.publish(_this2.instance.channel(), `capacity-priority:${newCapacity}::${counter}`); - } else if (priorityClient === "") { - clearTimeout(_this2.capacityPriorityCounters[counter]); - delete _this2.capacityPriorityCounters[counter]; - return _this2.instance._drainAll(capacity); - } else { - return _this2.capacityPriorityCounters[counter] = setTimeout( - /*#__PURE__*/ - _asyncToGenerator(function* () { - var e; - - try { - delete _this2.capacityPriorityCounters[counter]; - yield _this2.runScript("blacklist_client", [priorityClient]); - return yield _this2.instance._drainAll(capacity); - } catch (error) { - e = error; - return _this2.instance.Events.trigger("error", e); - } - }), 1000); - } - } else if (type === "message") { - return _this2.instance.Events.trigger("message", data); - } else if (type === "blocked") { - return yield _this2.instance._dropAllQueued(); +}; +const parser = require("./parser"); +const BottleneckError = require("./BottleneckError"); +const RedisConnection = require("./RedisConnection"); +const IORedisConnection = require("./IORedisConnection"); +class RedisDatastore { + constructor(instance, storeOptions, storeInstanceOptions) { + this.instance = instance; + this.storeOptions = storeOptions; + this.originalId = this.instance.id; + this.clientId = this.instance._randomIndex(); + parser.load(storeInstanceOptions, storeInstanceOptions, this); + this.clients = {}; + this.capacityPriorityCounters = {}; + this.sharedConnection = this.connection != null; + if (!this.connection) { + if (this.instance.datastore === "redis") { + this.connection = new RedisConnection({ + Redis: this.Redis, + clientOptions: this.clientOptions, + Promise: Promise, + Events: this.instance.Events, + }); + } + else if (this.instance.datastore === "ioredis") { + this.connection = new IORedisConnection({ + Redis: this.Redis, + clientOptions: this.clientOptions, + clusterNodes: this.clusterNodes, + Promise: Promise, + Events: this.instance.Events, + }); + } } - } catch (error) { - e = error; - return _this2.instance.Events.trigger("error", e); - } - })(); - } - - __disconnect__(flush) { - clearInterval(this.heartbeat); - - if (this.sharedConnection) { - return this.connection.__removeLimiter__(this.instance); - } else { - return this.connection.disconnect(flush); + this.instance.connection = this.connection; + this.instance.datastore = this.connection.datastore; + this.ready = this.connection.ready + .then((clients) => { + this.clients = clients; + return this.runScript("init", this.prepareInitSettings(this.clearDatastore)); + }) + .then(() => this.connection.__addLimiter__(this.instance)) + .then(() => this.runScript("register_client", [this.instance.queued()])) + .then(() => { + var _a, _b; + this.heartbeat = (_b = (_a = setInterval(() => { + return this.runScript("heartbeat", []).catch((e) => this.instance.Events.trigger("error", e)); + }, this.heartbeatInterval)).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + return this.clients; + }); + } + __publish__(message) { + return __awaiter(this, void 0, void 0, function* () { + const { client } = yield this.ready; + return client.publish(this.instance.channel(), `message:${message.toString()}`); + }); + } + onMessage(channel, message) { + return __awaiter(this, void 0, void 0, function* () { + try { + const pos = message.indexOf(":"); + const [type, data] = [message.slice(0, pos), message.slice(pos + 1)]; + if (type === "capacity") { + return yield this.instance._drainAll(data.length > 0 ? ~~data : undefined); + } + else if (type === "capacity-priority") { + const [rawCapacity, priorityClient, counter] = data.split(":"); + const capacity = rawCapacity.length > 0 ? ~~rawCapacity : undefined; + if (priorityClient === this.clientId) { + const drained = yield this.instance._drainAll(capacity); + const newCapacity = capacity != null ? capacity - (drained || 0) : ""; + return yield this.clients.client.publish(this.instance.channel(), `capacity-priority:${newCapacity}::${counter}`); + } + else if (priorityClient === "") { + clearTimeout(this.capacityPriorityCounters[counter]); + delete this.capacityPriorityCounters[counter]; + return this.instance._drainAll(capacity); + } + else { + return (this.capacityPriorityCounters[counter] = setTimeout(() => __awaiter(this, void 0, void 0, function* () { + try { + delete this.capacityPriorityCounters[counter]; + yield this.runScript("blacklist_client", [priorityClient]); + return yield this.instance._drainAll(capacity); + } + catch (e) { + return this.instance.Events.trigger("error", e); + } + }), 1000)); + } + } + else if (type === "message") { + return this.instance.Events.trigger("message", data); + } + else if (type === "blocked") { + return yield this.instance._dropAllQueued(); + } + } + catch (error) { + const e = error; + return this.instance.Events.trigger("error", e); + } + }); } - } - - runScript(name, args) { - var _this3 = this; - - return _asyncToGenerator(function* () { - if (!(name === "init" || name === "register_client")) { - yield _this3.ready; - } - - return new _this3.Promise((resolve, reject) => { - var all_args, arr; - all_args = [Date.now(), _this3.clientId].concat(args); - - _this3.instance.Events.trigger("debug", `Calling Redis script: ${name}.lua`, all_args); - - arr = _this3.connection.__scriptArgs__(name, _this3.originalId, all_args, function (err, replies) { - if (err != null) { - return reject(err); - } - - return resolve(replies); + __disconnect__(flush) { + return __awaiter(this, void 0, void 0, function* () { + clearInterval(this.heartbeat); + if (this.sharedConnection) { + yield this.connection.__removeLimiter__(this.instance); + } + else { + return this.connection.disconnect(flush); + } }); - return _this3.connection.__scriptFn__(name)(...arr); - }).catch(e => { - if (typeof e.message === "string" && e.message.match(/^(.*\s)?SETTINGS_KEY_NOT_FOUND$/) !== null) { - if (name === "heartbeat") { - return _this3.Promise.resolve(); - } else { - return _this3.runScript("init", _this3.prepareInitSettings(false)).then(() => { - return _this3.runScript(name, args); + } + runScript(name, args) { + return __awaiter(this, void 0, void 0, function* () { + if (name !== "init" && name !== "register_client") { + yield this.ready; + } + return new Promise((resolve, reject) => { + const all_args = [Date.now(), this.clientId].concat(args); + this.instance.Events.trigger("debug", `Calling Redis script: ${name}.lua`, all_args); + const arr = this.connection.__scriptArgs__(name, this.originalId, all_args, function (err, replies) { + if (err != null) { + return reject(err); + } + return resolve(replies); + }); + return this.connection.__scriptFn__(name)(...(arr || [])); + }).catch((e) => { + if (typeof e.message === "string" && + e.message.match(/^(.*\s)?SETTINGS_KEY_NOT_FOUND$/) !== null) { + if (name === "heartbeat") { + return Promise.resolve(); + } + else { + return this.runScript("init", this.prepareInitSettings(false)).then(() => this.runScript(name, args)); + } + } + else if (typeof e.message === "string" && + e.message.match(/^(.*\s)?UNKNOWN_CLIENT$/) !== null) { + return this.runScript("register_client", [this.instance.queued()]).then(() => this.runScript(name, args)); + } + else { + return Promise.reject(e); + } }); - } - } else if (typeof e.message === "string" && e.message.match(/^(.*\s)?UNKNOWN_CLIENT$/) !== null) { - return _this3.runScript("register_client", [_this3.instance.queued()]).then(() => { - return _this3.runScript(name, args); - }); - } else { - return _this3.Promise.reject(e); - } - }); - })(); - } - - prepareArray(arr) { - var i, len, results, x; - results = []; - - for (i = 0, len = arr.length; i < len; i++) { - x = arr[i]; - results.push(x != null ? x.toString() : ""); + }); } - - return results; - } - - prepareObject(obj) { - var arr, k, v; - arr = []; - - for (k in obj) { - v = obj[k]; - arr.push(k, v != null ? v.toString() : ""); + prepareArray(arr) { + return arr.map((x) => (x != null ? x.toString() : "")); } - - return arr; - } - - prepareInitSettings(clear) { - var args; - args = this.prepareObject(Object.assign({}, this.storeOptions, { - id: this.originalId, - version: this.instance.version, - groupTimeout: this.timeout, - clientTimeout: this.clientTimeout - })); - args.unshift(clear ? 1 : 0, this.instance.version); - return args; - } - - convertBool(b) { - return !!b; - } - - __updateSettings__(options) { - var _this4 = this; - - return _asyncToGenerator(function* () { - yield _this4.runScript("update_settings", _this4.prepareObject(options)); - return parser.overwrite(options, options, _this4.storeOptions); - })(); - } - - __running__() { - return this.runScript("running", []); - } - - __queued__() { - return this.runScript("queued", []); - } - - __done__() { - return this.runScript("done", []); - } - - __groupCheck__() { - var _this5 = this; - - return _asyncToGenerator(function* () { - return _this5.convertBool((yield _this5.runScript("group_check", []))); - })(); - } - - __incrementReservoir__(incr) { - return this.runScript("increment_reservoir", [incr]); - } - - __currentReservoir__() { - return this.runScript("current_reservoir", []); - } - - __check__(weight) { - var _this6 = this; - - return _asyncToGenerator(function* () { - return _this6.convertBool((yield _this6.runScript("check", _this6.prepareArray([weight])))); - })(); - } - - __register__(index, weight, expiration) { - var _this7 = this; - - return _asyncToGenerator(function* () { - var reservoir, success, wait; - - var _ref4 = yield _this7.runScript("register", _this7.prepareArray([index, weight, expiration])); - - var _ref5 = _slicedToArray(_ref4, 3); - - success = _ref5[0]; - wait = _ref5[1]; - reservoir = _ref5[2]; - return { - success: _this7.convertBool(success), - wait, - reservoir - }; - })(); - } - - __submit__(queueLength, weight) { - var _this8 = this; - - return _asyncToGenerator(function* () { - var blocked, e, maxConcurrent, overweight, reachedHWM, strategy; - - try { - var _ref6 = yield _this8.runScript("submit", _this8.prepareArray([queueLength, weight])); - - var _ref7 = _slicedToArray(_ref6, 3); - - reachedHWM = _ref7[0]; - blocked = _ref7[1]; - strategy = _ref7[2]; - return { - reachedHWM: _this8.convertBool(reachedHWM), - blocked: _this8.convertBool(blocked), - strategy - }; - } catch (error) { - e = error; - - if (e.message.indexOf("OVERWEIGHT") === 0) { - var _e$message$split = e.message.split(":"); - - var _e$message$split2 = _slicedToArray(_e$message$split, 3); - - overweight = _e$message$split2[0]; - weight = _e$message$split2[1]; - maxConcurrent = _e$message$split2[2]; - throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${maxConcurrent}`); - } else { - throw e; + prepareObject(obj) { + const arr = []; + for (const [k, v] of Object.entries(obj)) { + arr.push(k, v != null ? v.toString() : ""); } - } - })(); - } - - __free__(index, weight) { - var _this9 = this; - - return _asyncToGenerator(function* () { - var running; - running = yield _this9.runScript("free", _this9.prepareArray([index])); - return { - running - }; - })(); - } - -}; -module.exports = RedisDatastore; \ No newline at end of file + return arr; + } + prepareInitSettings(clear) { + const args = this.prepareObject(Object.assign({}, this.storeOptions, { + id: this.originalId, + version: this.instance.version, + groupTimeout: this.timeout, + clientTimeout: this.clientTimeout, + })); + args.unshift(clear ? 1 : 0, this.instance.version); + return args; + } + convertBool(b) { + return !!b; + } + __updateSettings__(options) { + return __awaiter(this, void 0, void 0, function* () { + yield this.runScript("update_settings", this.prepareObject(options)); + return parser.overwrite(options, options, this.storeOptions); + }); + } + __running__() { + return this.runScript("running", []); + } + __queued__() { + return this.runScript("queued", []); + } + __done__() { + return this.runScript("done", []); + } + __groupCheck__() { + return __awaiter(this, void 0, void 0, function* () { + return this.convertBool(yield this.runScript("group_check", [])); + }); + } + __incrementReservoir__(incr) { + return this.runScript("increment_reservoir", [incr]); + } + __currentReservoir__() { + return this.runScript("current_reservoir", []); + } + __check__(weight) { + return __awaiter(this, void 0, void 0, function* () { + return this.convertBool(yield this.runScript("check", this.prepareArray([weight]))); + }); + } + __register__(index, weight, expiration) { + return __awaiter(this, void 0, void 0, function* () { + const [success, wait, reservoir] = yield this.runScript("register", this.prepareArray([index, weight, expiration])); + return { + success: this.convertBool(success), + wait, + reservoir, + }; + }); + } + __submit__(queueLength, weight) { + return __awaiter(this, void 0, void 0, function* () { + try { + const [reachedHWM, blocked, strategy] = Array.from(yield this.runScript("submit", this.prepareArray([queueLength, weight]))); + return { + reachedHWM: this.convertBool(reachedHWM), + blocked: this.convertBool(blocked), + strategy, + }; + } + catch (e) { + if (/^(ERR )?OVERWEIGHT/.test(e.message)) { + let maxConcurrent; + [, weight, maxConcurrent] = e.message.split(":"); + throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${maxConcurrent}`); + } + else { + throw e; + } + } + }); + } + __free__(index, _weight) { + return __awaiter(this, void 0, void 0, function* () { + const running = yield this.runScript("free", this.prepareArray([index])); + return { running }; + }); + } +} +module.exports = RedisDatastore; diff --git a/lib/Scripts.js b/lib/Scripts.js index 395340e..f1d0443 100644 --- a/lib/Scripts.js +++ b/lib/Scripts.js @@ -1,162 +1,135 @@ -"use strict"; - -var headers, lua, templates; -lua = require("./lua.json"); -headers = { - refs: lua["refs.lua"], - validate_keys: lua["validate_keys.lua"], - validate_client: lua["validate_client.lua"], - refresh_expiration: lua["refresh_expiration.lua"], - process_tick: lua["process_tick.lua"], - conditions_check: lua["conditions_check.lua"], - get_time: lua["get_time.lua"] +const lua = require("../ref/lua.json"); +const headers = { + refs: lua["refs.lua"], + validate_keys: lua["validate_keys.lua"], + validate_client: lua["validate_client.lua"], + refresh_expiration: lua["refresh_expiration.lua"], + process_tick: lua["process_tick.lua"], + conditions_check: lua["conditions_check.lua"], + get_time: lua["get_time.lua"], }; - -exports.allKeys = function (id) { - return [ - /* - HASH - */ - `b_${id}_settings`, - /* - HASH - job index -> weight - */ - `b_${id}_job_weights`, - /* - ZSET - job index -> expiration - */ - `b_${id}_job_expirations`, - /* - HASH - job index -> client - */ - `b_${id}_job_clients`, - /* - ZSET - client -> sum running - */ - `b_${id}_client_running`, - /* - HASH - client -> num queued - */ - `b_${id}_client_num_queued`, - /* - ZSET - client -> last job registered - */ - `b_${id}_client_last_registered`, - /* - ZSET - client -> last seen - */ - `b_${id}_client_last_seen`]; -}; - -templates = { - init: { - keys: exports.allKeys, - headers: ["process_tick"], - refresh_expiration: true, - code: lua["init.lua"] - }, - group_check: { - keys: exports.allKeys, - headers: [], - refresh_expiration: false, - code: lua["group_check.lua"] - }, - register_client: { - keys: exports.allKeys, - headers: ["validate_keys"], - refresh_expiration: true, - code: lua["register_client.lua"] - }, - blacklist_client: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client"], - refresh_expiration: false, - code: lua["blacklist_client.lua"] - }, - heartbeat: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: false, - code: lua["heartbeat.lua"] - }, - update_settings: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: true, - code: lua["update_settings.lua"] - }, - running: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: false, - code: lua["running.lua"] - }, - queued: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client"], - refresh_expiration: false, - code: lua["queued.lua"] - }, - done: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: false, - code: lua["done.lua"] - }, - check: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], - refresh_expiration: false, - code: lua["check.lua"] - }, - submit: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], - refresh_expiration: true, - code: lua["submit.lua"] - }, - register: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], - refresh_expiration: true, - code: lua["register.lua"] - }, - free: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: true, - code: lua["free.lua"] - }, - current_reservoir: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: false, - code: lua["current_reservoir.lua"] - }, - increment_reservoir: { - keys: exports.allKeys, - headers: ["validate_keys", "validate_client", "process_tick"], - refresh_expiration: true, - code: lua["increment_reservoir.lua"] - } +exports.allKeys = (id) => [ + // HASH + `b_${id}_settings`, + // HASH + // job index -> weight + `b_${id}_job_weights`, + // ZSET + // job index -> expiration + `b_${id}_job_expirations`, + // HASH + // job index -> client + `b_${id}_job_clients`, + // ZSET + // client -> sum running + `b_${id}_client_running`, + // HASH + // client -> num queued + `b_${id}_client_num_queued`, + // ZSET + // client -> last job registered + `b_${id}_client_last_registered`, + // ZSET + // client -> last seen + `b_${id}_client_last_seen`, +]; +const templates = { + init: { + keys: exports.allKeys, + headers: ["process_tick"], + refresh_expiration: true, + code: lua["init.lua"], + }, + group_check: { + keys: exports.allKeys, + headers: [], + refresh_expiration: false, + code: lua["group_check.lua"], + }, + register_client: { + keys: exports.allKeys, + headers: ["validate_keys"], + refresh_expiration: true, + code: lua["register_client.lua"], + }, + blacklist_client: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client"], + refresh_expiration: false, + code: lua["blacklist_client.lua"], + }, + heartbeat: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: false, + code: lua["heartbeat.lua"], + }, + update_settings: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: true, + code: lua["update_settings.lua"], + }, + running: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: false, + code: lua["running.lua"], + }, + queued: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client"], + refresh_expiration: false, + code: lua["queued.lua"], + }, + done: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: false, + code: lua["done.lua"], + }, + check: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], + refresh_expiration: false, + code: lua["check.lua"], + }, + submit: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], + refresh_expiration: true, + code: lua["submit.lua"], + }, + register: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], + refresh_expiration: true, + code: lua["register.lua"], + }, + free: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: true, + code: lua["free.lua"], + }, + current_reservoir: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: false, + code: lua["current_reservoir.lua"], + }, + increment_reservoir: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: true, + code: lua["increment_reservoir.lua"], + }, }; exports.names = Object.keys(templates); - -exports.keys = function (name, id) { - return templates[name].keys(id); -}; - +exports.keys = (name, id) => templates[name].keys(id); exports.payload = function (name) { - var template; - template = templates[name]; - return Array.prototype.concat(headers.refs, template.headers.map(function (h) { - return headers[h]; - }), template.refresh_expiration ? headers.refresh_expiration : "", template.code).join("\n"); -}; \ No newline at end of file + const template = templates[name]; + return Array.prototype + .concat(headers.refs, template.headers.map((h) => headers[h]), template.refresh_expiration ? headers.refresh_expiration : "", template.code) + .join("\n"); +}; diff --git a/lib/States.js b/lib/States.js index 9b8ac14..077e7d8 100644 --- a/lib/States.js +++ b/lib/States.js @@ -1,88 +1,63 @@ -"use strict"; - -var BottleneckError, States; -BottleneckError = require("./BottleneckError"); -States = class States { - constructor(status1) { - this.status = status1; - this._jobs = {}; - this.counts = this.status.map(function () { - return 0; - }); - } - - next(id) { - var current, next; - current = this._jobs[id]; - next = current + 1; - - if (current != null && next < this.status.length) { - this.counts[current]--; - this.counts[next]++; - return this._jobs[id]++; - } else if (current != null) { - this.counts[current]--; - return delete this._jobs[id]; +const BottleneckError = require("./BottleneckError"); +class States { + constructor(status) { + this.status = status; + this._jobs = {}; + this.counts = this.status.map(() => 0); } - } - - start(id) { - var initial; - initial = 0; - this._jobs[id] = initial; - return this.counts[initial]++; - } - - remove(id) { - var current; - current = this._jobs[id]; - - if (current != null) { - this.counts[current]--; - delete this._jobs[id]; + next(id) { + const current = this._jobs[id]; + const next = current + 1; + if (current != null && next < this.status.length) { + this.counts[current]--; + this.counts[next]++; + this._jobs[id]++; + } + else if (current != null) { + this.counts[current]--; + delete this._jobs[id]; + } + } + start(id) { + const initial = 0; + this._jobs[id] = initial; + return this.counts[initial]++; + } + remove(id) { + const current = this._jobs[id]; + if (current != null) { + this.counts[current]--; + delete this._jobs[id]; + } + return current != null; + } + jobStatus(id) { + var _a; + return (_a = this.status[this._jobs[id]]) !== null && _a !== void 0 ? _a : null; } - - return current != null; - } - - jobStatus(id) { - var ref; - return (ref = this.status[this._jobs[id]]) != null ? ref : null; - } - - statusJobs(status) { - var k, pos, ref, results, v; - - if (status != null) { - pos = this.status.indexOf(status); - - if (pos < 0) { - throw new BottleneckError(`status must be one of ${this.status.join(', ')}`); - } - - ref = this._jobs; - results = []; - - for (k in ref) { - v = ref[k]; - - if (v === pos) { - results.push(k); + statusJobs(status) { + if (status != null) { + const pos = this.status.indexOf(status); + if (pos < 0) { + throw new BottleneckError(`status must be one of ${this.status.join(", ")}`); + } + const result = []; + for (const [k, v] of Object.entries(this._jobs)) { + if (v === pos) { + result.push(k); + } + } + return result; } - } - - return results; - } else { - return Object.keys(this._jobs); + else { + return Object.keys(this._jobs); + } + } + statusCounts() { + return this.counts.reduce((acc, v, i) => { + acc[this.status[i]] = v; + return acc; + }, {}); } - } - - statusCounts() { - return this.counts.reduce((acc, v, i) => { - acc[this.status[i]] = v; - return acc; - }, {}); - } - -}; -module.exports = States; \ No newline at end of file +} +module.exports = States; diff --git a/lib/Sync.js b/lib/Sync.js index f51eee4..596215c 100644 --- a/lib/Sync.js +++ b/lib/Sync.js @@ -1,80 +1,52 @@ -"use strict"; - -function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } - -function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } - -var DLList, Sync; -DLList = require("./DLList"); -Sync = class Sync { - constructor(name, Promise) { - this.schedule = this.schedule.bind(this); - this.name = name; - this.Promise = Promise; - this._running = 0; - this._queue = new DLList(); - } - - isEmpty() { - return this._queue.length === 0; - } - - _tryToRun() { - var _this = this; - - return _asyncToGenerator(function* () { - var args, cb, error, reject, resolve, returned, task; - - if (_this._running < 1 && _this._queue.length > 0) { - _this._running++; - - var _this$_queue$shift = _this._queue.shift(); - - task = _this$_queue$shift.task; - args = _this$_queue$shift.args; - resolve = _this$_queue$shift.resolve; - reject = _this$_queue$shift.reject; - cb = yield _asyncToGenerator(function* () { - try { - returned = yield task(...args); - return function () { - return resolve(returned); - }; - } catch (error1) { - error = error1; - return function () { - return reject(error); - }; - } - })(); - _this._running--; - - _this._tryToRun(); - - return cb(); - } - })(); - } - - schedule(task, ...args) { - var promise, reject, resolve; - resolve = reject = null; - promise = new this.Promise(function (_resolve, _reject) { - resolve = _resolve; - return reject = _reject; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - - this._queue.push({ - task, - args, - resolve, - reject - }); - - this._tryToRun(); - - return promise; - } - }; -module.exports = Sync; \ No newline at end of file +const DLList = require("./DLList"); +class Sync { + constructor(name) { + this.schedule = this.schedule.bind(this); + this.name = name; + this._running = 0; + this._queue = new DLList(); + } + isEmpty() { + return this._queue.length === 0; + } + _tryToRun() { + return __awaiter(this, void 0, void 0, function* () { + if (this._running < 1 && this._queue.length > 0) { + this._running++; + const { task, args, resolve, reject } = this._queue.shift(); + let cb; + try { + const returned = yield task(...(args || [])); + cb = () => resolve(returned); + } + catch (error) { + cb = () => reject(error); + } + this._running--; + this._tryToRun(); + cb(); + } + }); + } + schedule(task, ...args) { + let reject; + let resolve = (reject = null); + const promise = new Promise(function (_resolve, _reject) { + resolve = _resolve; + reject = _reject; + }); + this._queue.push({ task, args, resolve, reject }); + this._tryToRun(); + return promise; + } +} +module.exports = Sync; diff --git a/lib/es5.js b/lib/es5.js index 822a26d..739efd8 100644 --- a/lib/es5.js +++ b/lib/es5.js @@ -1,5 +1,2 @@ -"use strict"; - require("regenerator-runtime/runtime"); - -module.exports = require("./Bottleneck"); \ No newline at end of file +module.exports = require("./Bottleneck"); diff --git a/lib/index.js b/lib/index.js index 3d447c1..fae69f0 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,3 +1 @@ -"use strict"; - -module.exports = require("./Bottleneck"); \ No newline at end of file +module.exports = require("./Bottleneck"); diff --git a/lib/parser.js b/lib/parser.js index 8686191..79a83a8 100644 --- a/lib/parser.js +++ b/lib/parser.js @@ -1,26 +1,17 @@ -"use strict"; - -exports.load = function (received, defaults, onto = {}) { - var k, ref, v; - - for (k in defaults) { - v = defaults[k]; - onto[k] = (ref = received[k]) != null ? ref : v; - } - - return onto; +exports.load = function (received, defaults, onto) { + var _a; + onto !== null && onto !== void 0 ? onto : (onto = {}); + for (const [k, v] of Object.entries(defaults)) { + onto[k] = (_a = received[k]) !== null && _a !== void 0 ? _a : v; + } + return onto; }; - -exports.overwrite = function (received, defaults, onto = {}) { - var k, v; - - for (k in received) { - v = received[k]; - - if (defaults[k] !== void 0) { - onto[k] = v; +exports.overwrite = function (received, defaults, onto) { + onto !== null && onto !== void 0 ? onto : (onto = {}); + for (const [k, v] of Object.entries(received)) { + if (defaults[k] !== undefined) { + onto[k] = v; + } } - } - - return onto; -}; \ No newline at end of file + return onto; +}; diff --git a/lib/version.json b/lib/version.json deleted file mode 100644 index fed4c3c..0000000 --- a/lib/version.json +++ /dev/null @@ -1 +0,0 @@ -{"version":"3.0.7"} diff --git a/light.d.ts b/light.d.ts index c02fedd..cc45c3b 100644 --- a/light.d.ts +++ b/light.d.ts @@ -1,134 +1,134 @@ declare module "bottleneck/light" { - namespace Bottleneck { + namespace Bottleneck { type ConstructorOptions = { - /** - * How many jobs can be running at the same time. - */ - readonly maxConcurrent?: number | null; - /** - * How long to wait after launching a job before launching another one. - */ - readonly minTime?: number | null; - /** - * How long can the queue get? When the queue length exceeds that value, the selected `strategy` is executed to shed the load. - */ - readonly highWater?: number | null; - /** - * Which strategy to use if the queue gets longer than the high water mark. - */ - readonly strategy?: Bottleneck.Strategy | null; - /** - * The `penalty` value used by the `Bottleneck.strategy.BLOCK` strategy. - */ - readonly penalty?: number | null; - /** - * How many jobs can be executed before the limiter stops executing jobs. If `reservoir` reaches `0`, no jobs will be executed until it is no longer `0`. - */ - readonly reservoir?: number | null; - /** - * Every `reservoirRefreshInterval` milliseconds, the `reservoir` value will be automatically reset to `reservoirRefreshAmount`. - */ - readonly reservoirRefreshInterval?: number | null; - /** - * The value to reset `reservoir` to when `reservoirRefreshInterval` is in use. - */ - readonly reservoirRefreshAmount?: number | null; - /** - * The increment applied to `reservoir` when `reservoirIncreaseInterval` is in use. - */ - readonly reservoirIncreaseAmount?: number | null; - /** - * Every `reservoirIncreaseInterval` milliseconds, the `reservoir` value will be automatically incremented by `reservoirIncreaseAmount`. - */ - readonly reservoirIncreaseInterval?: number | null; - /** - * The maximum value that `reservoir` can reach when `reservoirIncreaseInterval` is in use. - */ - readonly reservoirIncreaseMaximum?: number | null; - /** - * Optional identifier - */ - readonly id?: string | null; - /** - * Set to true to leave your failed jobs hanging instead of failing them. - */ - readonly rejectOnDrop?: boolean | null; - /** - * Set to true to keep track of done jobs with counts() and jobStatus(). Uses more memory. - */ - readonly trackDoneStatus?: boolean | null; - /** - * Where the limiter stores its internal state. The default (`local`) keeps the state in the limiter itself. Set it to `redis` to enable Clustering. - */ - readonly datastore?: string | null; - /** - * Override the Promise library used by Bottleneck. - */ - readonly Promise?: any; - /** - * This object is passed directly to the redis client library you've selected. - */ - readonly clientOptions?: any; - /** - * **ioredis only.** When `clusterNodes` is not null, the client will be instantiated by calling `new Redis.Cluster(clusterNodes, clientOptions)`. - */ - readonly clusterNodes?: any; - /** - * An existing Bottleneck.RedisConnection or Bottleneck.IORedisConnection object to use. - * If using, `datastore`, `clientOptions` and `clusterNodes` will be ignored. - */ - /** - * Optional Redis/IORedis library from `require('ioredis')` or equivalent. If not, Bottleneck will attempt to require Redis/IORedis at runtime. - */ - readonly Redis?: any; - /** - * Bottleneck connection object created from `new Bottleneck.RedisConnection` or `new Bottleneck.IORedisConnection`. - */ - readonly connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection | null; - /** - * When set to `true`, on initial startup, the limiter will wipe any existing Bottleneck state data on the Redis db. - */ - readonly clearDatastore?: boolean | null; - /** - * The Redis TTL in milliseconds for the keys created by the limiter. When `timeout` is set, the limiter's state will be automatically removed from Redis after timeout milliseconds of inactivity. Note: timeout is 300000 (5 minutes) by default when using a Group. - */ - readonly timeout?: number | null; - /** - * Every `heartbeatInterval` milliseconds, the `reservoir` is assessed. - */ - readonly heartbeatInterval?: number | null; + /** + * How many jobs can be running at the same time. + */ + readonly maxConcurrent?: number | null; + /** + * How long to wait after launching a job before launching another one. + */ + readonly minTime?: number | null; + /** + * How long can the queue get? When the queue length exceeds that value, the selected `strategy` is executed to shed the load. + */ + readonly highWater?: number | null; + /** + * Which strategy to use if the queue gets longer than the high water mark. + */ + readonly strategy?: Bottleneck.Strategy | null; + /** + * The `penalty` value used by the `Bottleneck.strategy.BLOCK` strategy. + */ + readonly penalty?: number | null; + /** + * How many jobs can be executed before the limiter stops executing jobs. If `reservoir` reaches `0`, no jobs will be executed until it is no longer `0`. + */ + readonly reservoir?: number | null; + /** + * Every `reservoirRefreshInterval` milliseconds, the `reservoir` value will be automatically reset to `reservoirRefreshAmount`. + */ + readonly reservoirRefreshInterval?: number | null; + /** + * The value to reset `reservoir` to when `reservoirRefreshInterval` is in use. + */ + readonly reservoirRefreshAmount?: number | null; + /** + * The increment applied to `reservoir` when `reservoirIncreaseInterval` is in use. + */ + readonly reservoirIncreaseAmount?: number | null; + /** + * Every `reservoirIncreaseInterval` milliseconds, the `reservoir` value will be automatically incremented by `reservoirIncreaseAmount`. + */ + readonly reservoirIncreaseInterval?: number | null; + /** + * The maximum value that `reservoir` can reach when `reservoirIncreaseInterval` is in use. + */ + readonly reservoirIncreaseMaximum?: number | null; + /** + * Optional identifier + */ + readonly id?: string | null; + /** + * Set to true to leave your failed jobs hanging instead of failing them. + */ + readonly rejectOnDrop?: boolean | null; + /** + * Set to true to keep track of done jobs with counts() and jobStatus(). Uses more memory. + */ + readonly trackDoneStatus?: boolean | null; + /** + * Where the limiter stores its internal state. The default (`local`) keeps the state in the limiter itself. Set it to `redis` to enable Clustering. + */ + readonly datastore?: string | null; + /** + * Override the Promise library used by Bottleneck. + */ + readonly Promise?: any; + /** + * This object is passed directly to the redis client library you've selected. + */ + readonly clientOptions?: any; + /** + * **ioredis only.** When `clusterNodes` is not null, the client will be instantiated by calling `new Redis.Cluster(clusterNodes, clientOptions)`. + */ + readonly clusterNodes?: any; + /** + * An existing Bottleneck.RedisConnection or Bottleneck.IORedisConnection object to use. + * If using, `datastore`, `clientOptions` and `clusterNodes` will be ignored. + */ + /** + * Optional Redis/IORedis library from `require('ioredis')` or equivalent. If not, Bottleneck will attempt to require Redis/IORedis at runtime. + */ + readonly Redis?: any; + /** + * Bottleneck connection object created from `new Bottleneck.RedisConnection` or `new Bottleneck.IORedisConnection`. + */ + readonly connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection | null; + /** + * When set to `true`, on initial startup, the limiter will wipe any existing Bottleneck state data on the Redis db. + */ + readonly clearDatastore?: boolean | null; + /** + * The Redis TTL in milliseconds for the keys created by the limiter. When `timeout` is set, the limiter's state will be automatically removed from Redis after timeout milliseconds of inactivity. Note: timeout is 300000 (5 minutes) by default when using a Group. + */ + readonly timeout?: number | null; + /** + * Every `heartbeatInterval` milliseconds, the `reservoir` is assessed. + */ + readonly heartbeatInterval?: number | null; }; type JobOptions = { - /** - * A priority between `0` and `9`. A job with a priority of `4` will _always_ be executed before a job with a priority of `5`. - */ - readonly priority?: number | null; - /** - * Must be an integer equal to or higher than `0`. The `weight` is what increases the number of running jobs (up to `maxConcurrent`, if using) and decreases the `reservoir` value (if using). - */ - readonly weight?: number | null; - /** - * The number milliseconds a job has to finish. Jobs that take longer than their `expiration` will be failed with a `BottleneckError`. - */ - readonly expiration?: number | null; - /** - * Optional identifier, helps with debug output. - */ - readonly id?: string | null; + /** + * A priority between `0` and `9`. A job with a priority of `4` will _always_ be executed before a job with a priority of `5`. + */ + readonly priority?: number | null; + /** + * Must be an integer equal to or higher than `0`. The `weight` is what increases the number of running jobs (up to `maxConcurrent`, if using) and decreases the `reservoir` value (if using). + */ + readonly weight?: number | null; + /** + * The number milliseconds a job has to finish. Jobs that take longer than their `expiration` will be failed with a `BottleneckError`. + */ + readonly expiration?: number | null; + /** + * Optional identifier, helps with debug output. + */ + readonly id?: string | null; }; type StopOptions = { - /** - * When `true`, drop all the RECEIVED, QUEUED and RUNNING jobs. When `false`, allow those jobs to complete before resolving the Promise returned by this method. - */ - readonly dropWaitingJobs?: boolean | null; - /** - * The error message used to drop jobs when `dropWaitingJobs` is `true`. - */ - readonly dropErrorMessage?: string | null; - /** - * The error message used to reject a job added to the limiter after `stop()` has been called. - */ - readonly enqueueErrorMessage?: string | null; + /** + * When `true`, drop all the RECEIVED, QUEUED and RUNNING jobs. When `false`, allow those jobs to complete before resolving the Promise returned by this method. + */ + readonly dropWaitingJobs?: boolean | null; + /** + * The error message used to drop jobs when `dropWaitingJobs` is `true`. + */ + readonly dropErrorMessage?: string | null; + /** + * The error message used to reject a job added to the limiter after `stop()` has been called. + */ + readonly enqueueErrorMessage?: string | null; }; type Callback = (err: any, result: T) => void; type ClientsList = { client?: any; subscriber?: any }; @@ -136,279 +136,278 @@ declare module "bottleneck/light" { interface Strategy {} type EventInfo = { - readonly args: any[]; - readonly options: { - readonly id: string; - readonly priority: number; - readonly weight: number; - readonly expiration?: number; - }; + readonly args: any[]; + readonly options: { + readonly id: string; + readonly priority: number; + readonly weight: number; + readonly expiration?: number; + }; }; type EventInfoDropped = EventInfo & { - readonly task: Function; - readonly promise: Promise; + readonly task: Function; + readonly promise: Promise; }; type EventInfoQueued = EventInfo & { - readonly reachedHWM: boolean; - readonly blocked: boolean; + readonly reachedHWM: boolean; + readonly blocked: boolean; }; - type EventInfoRetryable = EventInfo & { readonly retryCount: number; }; + type EventInfoRetryable = EventInfo & { readonly retryCount: number }; enum Status { - RECEIVED = "RECEIVED", - QUEUED = "QUEUED", - RUNNING = "RUNNING", - EXECUTING = "EXECUTING", - DONE = "DONE" + RECEIVED = "RECEIVED", + QUEUED = "QUEUED", + RUNNING = "RUNNING", + EXECUTING = "EXECUTING", + DONE = "DONE", } type Counts = { - RECEIVED: number, - QUEUED: number, - RUNNING: number, - EXECUTING: number, - DONE?: number + RECEIVED: number; + QUEUED: number; + RUNNING: number; + EXECUTING: number; + DONE?: number; }; type RedisConnectionOptions = { - /** - * This object is passed directly to NodeRedis' createClient() method. - */ - readonly clientOptions?: any; - /** - * An existing NodeRedis client to use. If using, `clientOptions` will be ignored. - */ - readonly client?: any; - /** - * Optional Redis library from `require('redis')` or equivalent. If not, Bottleneck will attempt to require Redis at runtime. - */ - readonly Redis?: any; + /** + * This object is passed directly to NodeRedis' createClient() method. + */ + readonly clientOptions?: any; + /** + * An existing NodeRedis client to use. If using, `clientOptions` will be ignored. + */ + readonly client?: any; + /** + * Optional Redis library from `require('redis')` or equivalent. If not, Bottleneck will attempt to require Redis at runtime. + */ + readonly Redis?: any; }; type IORedisConnectionOptions = { - /** - * This object is passed directly to ioredis' constructor method. - */ - readonly clientOptions?: any; - /** - * When `clusterNodes` is not null, the client will be instantiated by calling `new Redis.Cluster(clusterNodes, clientOptions)`. - */ - readonly clusterNodes?: any; - /** - * An existing ioredis client to use. If using, `clientOptions` and `clusterNodes` will be ignored. - */ - readonly client?: any; - /** - * Optional IORedis library from `require('ioredis')` or equivalent. If not, Bottleneck will attempt to require IORedis at runtime. - */ - readonly Redis?: any; + /** + * This object is passed directly to ioredis' constructor method. + */ + readonly clientOptions?: any; + /** + * When `clusterNodes` is not null, the client will be instantiated by calling `new Redis.Cluster(clusterNodes, clientOptions)`. + */ + readonly clusterNodes?: any; + /** + * An existing ioredis client to use. If using, `clientOptions` and `clusterNodes` will be ignored. + */ + readonly client?: any; + /** + * Optional IORedis library from `require('ioredis')` or equivalent. If not, Bottleneck will attempt to require IORedis at runtime. + */ + readonly Redis?: any; }; type BatcherOptions = { - /** - * Maximum acceptable time (in milliseconds) a request can have to wait before being flushed to the `"batch"` event. - */ - readonly maxTime?: number | null; - /** - * Maximum number of requests in a batch. - */ - readonly maxSize?: number | null; + /** + * Maximum acceptable time (in milliseconds) a request can have to wait before being flushed to the `"batch"` event. + */ + readonly maxTime?: number | null; + /** + * Maximum number of requests in a batch. + */ + readonly maxSize?: number | null; }; - class BottleneckError extends Error { - } + class BottleneckError extends Error {} class RedisConnection { - constructor(options?: Bottleneck.RedisConnectionOptions); - - /** - * Register an event listener. - * @param name - The event name. - * @param fn - The callback function. - */ - on(name: "error", fn: (error: any) => void): void; - - /** - * Register an event listener for one event only. - * @param name - The event name. - * @param fn - The callback function. - */ - once(name: "error", fn: (error: any) => void): void; - - /** - * Waits until the connection is ready and returns the raw Node_Redis clients. - */ - ready(): Promise; - - /** - * Close the redis clients. - * @param flush - Write transient data before closing. - */ - disconnect(flush?: boolean): Promise; + constructor(options?: Bottleneck.RedisConnectionOptions); + + /** + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: "error", fn: (error: any) => void): void; + + /** + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: "error", fn: (error: any) => void): void; + + /** + * Waits until the connection is ready and returns the raw Node_Redis clients. + */ + ready(): Promise; + + /** + * Close the redis clients. + * @param flush - Write transient data before closing. + */ + disconnect(flush?: boolean): Promise; } class IORedisConnection { - constructor(options?: Bottleneck.IORedisConnectionOptions); - - /** - * Register an event listener. - * @param name - The event name. - * @param fn - The callback function. - */ - on(name: "error", fn: (error: any) => void): void; - - /** - * Register an event listener for one event only. - * @param name - The event name. - * @param fn - The callback function. - */ - once(name: "error", fn: (error: any) => void): void; - - /** - * Waits until the connection is ready and returns the raw ioredis clients. - */ - ready(): Promise; - - /** - * Close the redis clients. - * @param flush - Write transient data before closing. - */ - disconnect(flush?: boolean): Promise; + constructor(options?: Bottleneck.IORedisConnectionOptions); + + /** + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: "error", fn: (error: any) => void): void; + + /** + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: "error", fn: (error: any) => void): void; + + /** + * Waits until the connection is ready and returns the raw ioredis clients. + */ + ready(): Promise; + + /** + * Close the redis clients. + * @param flush - Write transient data before closing. + */ + disconnect(flush?: boolean): Promise; } class Batcher { - constructor(options?: Bottleneck.BatcherOptions); - - /** - * Register an event listener. - * @param name - The event name. - * @param fn - The callback function. - */ - on(name: string, fn: Function): void; - on(name: "error", fn: (error: any) => void): void; - on(name: "batch", fn: (batch: any[]) => void): void; - - /** - * Register an event listener for one event only. - * @param name - The event name. - * @param fn - The callback function. - */ - once(name: string, fn: Function): void; - once(name: "error", fn: (error: any) => void): void; - once(name: "batch", fn: (batch: any[]) => void): void; - - /** - * Add a request to the Batcher. Batches are flushed to the "batch" event. - */ - add(data: any): Promise; + constructor(options?: Bottleneck.BatcherOptions); + + /** + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: string, fn: Function): void; + on(name: "error", fn: (error: any) => void): void; + on(name: "batch", fn: (batch: any[]) => void): void; + + /** + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: string, fn: Function): void; + once(name: "error", fn: (error: any) => void): void; + once(name: "batch", fn: (batch: any[]) => void): void; + + /** + * Add a request to the Batcher. Batches are flushed to the "batch" event. + */ + add(data: any): Promise; } class Group { - constructor(options?: Bottleneck.ConstructorOptions); - - id: string; - datastore: string; - connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection; - - /** - * Returns the limiter for the specified key. - * @param str - The limiter key. - */ - key(str: string): Bottleneck; - - /** - * Register an event listener. - * @param name - The event name. - * @param fn - The callback function. - */ - on(name: string, fn: Function): void; - on(name: "error", fn: (error: any) => void): void; - on(name: "created", fn: (limiter: Bottleneck, key: string) => void): void; - - /** - * Register an event listener for one event only. - * @param name - The event name. - * @param fn - The callback function. - */ - once(name: string, fn: Function): void; - once(name: "error", fn: (error: any) => void): void; - once(name: "created", fn: (limiter: Bottleneck, key: string) => void): void; - - /** - * Removes all registered event listeners. - * @param name - The optional event name to remove listeners from. - */ - removeAllListeners(name?: string): void; - - /** - * Updates the group settings. - * @param options - The new settings. - */ - updateSettings(options: Bottleneck.ConstructorOptions): void; - - /** - * Deletes the limiter for the given key. - * Returns true if a key was deleted. - * @param str - The key - */ - deleteKey(str: string): Promise; - - /** - * Disconnects the underlying redis clients, unless the Group was created with the `connection` option. - * @param flush - Write transient data before closing. - */ - disconnect(flush?: boolean): Promise; - - /** - * Returns all the key-limiter pairs. - */ - limiters(): Bottleneck.GroupLimiterPair[]; - - /** - * Returns all Group keys in the local instance - */ - keys(): string[]; - - /** - * Returns all Group keys in the Cluster - */ - clusterKeys(): Promise; + constructor(options?: Bottleneck.ConstructorOptions); + + id: string; + datastore: string; + connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection; + + /** + * Returns the limiter for the specified key. + * @param str - The limiter key. + */ + key(str: string): Bottleneck; + + /** + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: string, fn: Function): void; + on(name: "error", fn: (error: any) => void): void; + on(name: "created", fn: (limiter: Bottleneck, key: string) => void): void; + + /** + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: string, fn: Function): void; + once(name: "error", fn: (error: any) => void): void; + once(name: "created", fn: (limiter: Bottleneck, key: string) => void): void; + + /** + * Removes all registered event listeners. + * @param name - The optional event name to remove listeners from. + */ + removeAllListeners(name?: string): void; + + /** + * Updates the group settings. + * @param options - The new settings. + */ + updateSettings(options: Bottleneck.ConstructorOptions): void; + + /** + * Deletes the limiter for the given key. + * Returns true if a key was deleted. + * @param str - The key + */ + deleteKey(str: string): Promise; + + /** + * Disconnects the underlying redis clients, unless the Group was created with the `connection` option. + * @param flush - Write transient data before closing. + */ + disconnect(flush?: boolean): Promise; + + /** + * Returns all the key-limiter pairs. + */ + limiters(): Bottleneck.GroupLimiterPair[]; + + /** + * Returns all Group keys in the local instance + */ + keys(): string[]; + + /** + * Returns all Group keys in the Cluster + */ + clusterKeys(): Promise; } class Events { - constructor(object: Object); - - /** - * Returns the number of limiters for the event name - * @param name - The event name. - */ - listenerCount(name: string): number; - - /** - * Returns a promise with the first non-null/non-undefined result from a listener - * @param name - The event name. - * @param args - The arguments to pass to the event listeners. - */ - trigger(name: string, ...args: any[]): Promise; + constructor(object: Object); + + /** + * Returns the number of limiters for the event name + * @param name - The event name. + */ + listenerCount(name: string): number; + + /** + * Returns a promise with the first non-null/non-undefined result from a listener + * @param name - The event name. + * @param args - The arguments to pass to the event listeners. + */ + trigger(name: string, ...args: any[]): Promise; } -} -class Bottleneck { + } + class Bottleneck { public static readonly strategy: { - /** - * When adding a new job to a limiter, if the queue length reaches `highWater`, drop the oldest job with the lowest priority. This is useful when jobs that have been waiting for too long are not important anymore. If all the queued jobs are more important (based on their `priority` value) than the one being added, it will not be added. - */ - readonly LEAK: Bottleneck.Strategy; - /** - * Same as `LEAK`, except it will only drop jobs that are less important than the one being added. If all the queued jobs are as or more important than the new one, it will not be added. - */ - readonly OVERFLOW_PRIORITY: Bottleneck.Strategy; - /** - * When adding a new job to a limiter, if the queue length reaches `highWater`, do not add the new job. This strategy totally ignores priority levels. - */ - readonly OVERFLOW: Bottleneck.Strategy; - /** - * When adding a new job to a limiter, if the queue length reaches `highWater`, the limiter falls into "blocked mode". All queued jobs are dropped and no new jobs will be accepted until the limiter unblocks. It will unblock after `penalty` milliseconds have passed without receiving a new job. `penalty` is equal to `15 * minTime` (or `5000` if `minTime` is `0`) by default and can be changed by calling `changePenalty()`. This strategy is ideal when bruteforce attacks are to be expected. This strategy totally ignores priority levels. - */ - readonly BLOCK: Bottleneck.Strategy; + /** + * When adding a new job to a limiter, if the queue length reaches `highWater`, drop the oldest job with the lowest priority. This is useful when jobs that have been waiting for too long are not important anymore. If all the queued jobs are more important (based on their `priority` value) than the one being added, it will not be added. + */ + readonly LEAK: Bottleneck.Strategy; + /** + * Same as `LEAK`, except it will only drop jobs that are less important than the one being added. If all the queued jobs are as or more important than the new one, it will not be added. + */ + readonly OVERFLOW_PRIORITY: Bottleneck.Strategy; + /** + * When adding a new job to a limiter, if the queue length reaches `highWater`, do not add the new job. This strategy totally ignores priority levels. + */ + readonly OVERFLOW: Bottleneck.Strategy; + /** + * When adding a new job to a limiter, if the queue length reaches `highWater`, the limiter falls into "blocked mode". All queued jobs are dropped and no new jobs will be accepted until the limiter unblocks. It will unblock after `penalty` milliseconds have passed without receiving a new job. `penalty` is equal to `15 * minTime` (or `5000` if `minTime` is `0`) by default and can be changed by calling `changePenalty()`. This strategy is ideal when bruteforce attacks are to be expected. This strategy totally ignores priority levels. + */ + readonly BLOCK: Bottleneck.Strategy; }; constructor(options?: Bottleneck.ConstructorOptions); @@ -418,161 +417,190 @@ class Bottleneck { connection?: Bottleneck.RedisConnection | Bottleneck.IORedisConnection; /** - * Returns a promise which will be resolved once the limiter is ready to accept jobs - * or rejected if it fails to start up. - */ + * Returns a promise which will be resolved once the limiter is ready to accept jobs + * or rejected if it fails to start up. + */ ready(): Promise; /** - * Returns a datastore-specific object of redis clients. - */ + * Returns a datastore-specific object of redis clients. + */ clients(): Bottleneck.ClientsList; /** - * Returns the name of the Redis pubsub channel used for this limiter - */ + * Returns the name of the Redis pubsub channel used for this limiter + */ channel(): string; /** - * Disconnects the underlying redis clients, unless the limiter was created with the `connection` option. - * @param flush - Write transient data before closing. - */ + * Disconnects the underlying redis clients, unless the limiter was created with the `connection` option. + * @param flush - Write transient data before closing. + */ disconnect(flush?: boolean): Promise; /** - * Broadcast a string to every limiter in the Cluster. - */ + * Broadcast a string to every limiter in the Cluster. + */ publish(message: string): Promise; /** - * Returns an object with the current number of jobs per status. - */ + * Returns an object with the current number of jobs per status. + */ counts(): Bottleneck.Counts; /** - * Returns the status of the job with the provided job id. - */ + * Returns the status of the job with the provided job id. + */ jobStatus(id: string): Bottleneck.Status; /** - * Returns the status of the job with the provided job id. - */ + * Returns the status of the job with the provided job id. + */ jobs(status?: Bottleneck.Status): string[]; /** - * Returns the number of requests queued. - * @param priority - Returns the number of requests queued with the specified priority. - */ + * Returns the number of requests queued. + * @param priority - Returns the number of requests queued with the specified priority. + */ queued(priority?: number): number; /** - * Returns the number of requests queued across the Cluster. - */ + * Returns the number of requests queued across the Cluster. + */ clusterQueued(): Promise; /** - * Returns whether there are any jobs currently in the queue or in the process of being added to the queue. - */ + * Returns whether there are any jobs currently in the queue or in the process of being added to the queue. + */ empty(): boolean; /** - * Returns the total weight of jobs in a RUNNING or EXECUTING state in the Cluster. - */ + * Returns the total weight of jobs in a RUNNING or EXECUTING state in the Cluster. + */ running(): Promise; /** - * Returns the total weight of jobs in a DONE state in the Cluster. - */ + * Returns the total weight of jobs in a DONE state in the Cluster. + */ done(): Promise; /** - * If a request was added right now, would it be run immediately? - * @param weight - The weight of the request - */ + * If a request was added right now, would it be run immediately? + * @param weight - The weight of the request + */ check(weight?: number): Promise; /** - * Register an event listener. - * @param name - The event name. - * @param fn - The callback function. - */ - on(name: "error", fn: (error: any) => void): void; - on(name: "empty", fn: () => void): void; - on(name: "idle", fn: () => void): void; - on(name: "depleted", fn: (empty: boolean) => void): void; - on(name: "message", fn: (message: string) => void): void; - on(name: "debug", fn: (message: string, info: any) => void): void; - on(name: "dropped", fn: (dropped: Bottleneck.EventInfoDropped) => void): void; - on(name: "received", fn: (info: Bottleneck.EventInfo) => void): void; - on(name: "queued", fn: (info: Bottleneck.EventInfoQueued) => void): void; + * Register an event listener. + * @param name - The event name. + * @param fn - The callback function. + */ + on(name: "error", fn: (error: any) => void): void; + on(name: "empty", fn: () => void): void; + on(name: "idle", fn: () => void): void; + on(name: "depleted", fn: (empty: boolean) => void): void; + on(name: "message", fn: (message: string) => void): void; + on(name: "debug", fn: (message: string, info: any) => void): void; + on(name: "dropped", fn: (dropped: Bottleneck.EventInfoDropped) => void): void; + on(name: "received", fn: (info: Bottleneck.EventInfo) => void): void; + on(name: "queued", fn: (info: Bottleneck.EventInfoQueued) => void): void; on(name: "scheduled", fn: (info: Bottleneck.EventInfo) => void): void; on(name: "executing", fn: (info: Bottleneck.EventInfoRetryable) => void): void; - on(name: "failed", fn: (error: any, info: Bottleneck.EventInfoRetryable) => Promise | number | void | null): void; - on(name: "retry", fn: (message: string, info: Bottleneck.EventInfoRetryable) => void): void; - on(name: "done", fn: (info: Bottleneck.EventInfoRetryable) => void): void; + on( + name: "failed", + fn: ( + error: any, + info: Bottleneck.EventInfoRetryable, + ) => Promise | number | void | null, + ): void; + on(name: "retry", fn: (message: string, info: Bottleneck.EventInfoRetryable) => void): void; + on(name: "done", fn: (info: Bottleneck.EventInfoRetryable) => void): void; /** - * Register an event listener for one event only. - * @param name - The event name. - * @param fn - The callback function. - */ - once(name: "error", fn: (error: any) => void): void; - once(name: "empty", fn: () => void): void; - once(name: "idle", fn: () => void): void; - once(name: "depleted", fn: (empty: boolean) => void): void; - once(name: "message", fn: (message: string) => void): void; - once(name: "debug", fn: (message: string, info: any) => void): void; - once(name: "dropped", fn: (dropped: Bottleneck.EventInfoDropped) => void): void; - once(name: "received", fn: (info: Bottleneck.EventInfo) => void): void; - once(name: "queued", fn: (info: Bottleneck.EventInfoQueued) => void): void; + * Register an event listener for one event only. + * @param name - The event name. + * @param fn - The callback function. + */ + once(name: "error", fn: (error: any) => void): void; + once(name: "empty", fn: () => void): void; + once(name: "idle", fn: () => void): void; + once(name: "depleted", fn: (empty: boolean) => void): void; + once(name: "message", fn: (message: string) => void): void; + once(name: "debug", fn: (message: string, info: any) => void): void; + once(name: "dropped", fn: (dropped: Bottleneck.EventInfoDropped) => void): void; + once(name: "received", fn: (info: Bottleneck.EventInfo) => void): void; + once(name: "queued", fn: (info: Bottleneck.EventInfoQueued) => void): void; once(name: "scheduled", fn: (info: Bottleneck.EventInfo) => void): void; once(name: "executing", fn: (info: Bottleneck.EventInfoRetryable) => void): void; - once(name: "failed", fn: (error: any, info: Bottleneck.EventInfoRetryable) => Promise | number | void | null): void; - once(name: "retry", fn: (message: string, info: Bottleneck.EventInfoRetryable) => void): void; - once(name: "done", fn: (info: Bottleneck.EventInfoRetryable) => void): void; + once( + name: "failed", + fn: ( + error: any, + info: Bottleneck.EventInfoRetryable, + ) => Promise | number | void | null, + ): void; + once(name: "retry", fn: (message: string, info: Bottleneck.EventInfoRetryable) => void): void; + once(name: "done", fn: (info: Bottleneck.EventInfoRetryable) => void): void; /** - * Removes all registered event listeners. - * @param name - The optional event name to remove listeners from. - */ + * Removes all registered event listeners. + * @param name - The optional event name to remove listeners from. + */ removeAllListeners(name?: string): void; /** - * Changes the settings for future requests. - * @param options - The new settings. - */ + * Changes the settings for future requests. + * @param options - The new settings. + */ updateSettings(options?: Bottleneck.ConstructorOptions): Promise; /** - * Adds to the reservoir count and returns the new value. - */ + * Adds to the reservoir count and returns the new value. + */ incrementReservoir(incrementBy: number): Promise; /** - * The `stop()` method is used to safely shutdown a limiter. It prevents any new jobs from being added to the limiter and waits for all Executing jobs to complete. - */ + * The `stop()` method is used to safely shutdown a limiter. It prevents any new jobs from being added to the limiter and waits for all Executing jobs to complete. + */ stop(options?: Bottleneck.StopOptions): Promise; /** - * Returns the current reservoir count, if any. - */ + * Returns the current reservoir count, if any. + */ currentReservoir(): Promise; /** - * Chain this limiter to another. - * @param limiter - The limiter that requests to this limiter must also follow. - */ + * Chain this limiter to another. + * @param limiter - The limiter that requests to this limiter must also follow. + */ chain(limiter?: Bottleneck): Bottleneck; - wrap(fn: (...args: Args) => PromiseLike): ((...args: Args) => Promise) & { withOptions: (options: Bottleneck.JobOptions, ...args: Args) => Promise; }; - - submit(fn: (...args: [...Args, Bottleneck.Callback]) => void, ...args: [...Args, Bottleneck.Callback]): void; - submit(options: Bottleneck.JobOptions, fn: (...args: [...Args, Bottleneck.Callback]) => void, ...args: [...Args, Bottleneck.Callback]): void; - - schedule(fn: (...args: Args) => PromiseLike, ...args: Args): Promise; - schedule(options: Bottleneck.JobOptions, fn: (...args: Args) => PromiseLike, ...args: Args): Promise; -} + wrap( + fn: (...args: Args) => PromiseLike, + ): ((...args: Args) => Promise) & { + withOptions: (options: Bottleneck.JobOptions, ...args: Args) => Promise; + }; -export default Bottleneck + submit( + fn: (...args: [...Args, Bottleneck.Callback]) => void, + ...args: [...Args, Bottleneck.Callback] + ): void; + submit( + options: Bottleneck.JobOptions, + fn: (...args: [...Args, Bottleneck.Callback]) => void, + ...args: [...Args, Bottleneck.Callback] + ): void; + + schedule( + fn: (...args: Args) => PromiseLike, + ...args: Args + ): Promise; + schedule( + options: Bottleneck.JobOptions, + fn: (...args: Args) => PromiseLike, + ...args: Args + ): Promise; + } + + export default Bottleneck; } - diff --git a/light.js b/light.js index a07dea1..ed5605a 100644 --- a/light.js +++ b/light.js @@ -1,1525 +1,1510 @@ +/* eslint-disable no-undef */ /** - * This file contains the Bottleneck library (MIT), compiled to ES2017, and without Clustering support. - * https://github.com/SGrondin/bottleneck + * This file contains the Bottleneck library (MIT) without Clustering support. + * https://github.com/sderrow/bottleneck */ (function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : typeof define === 'function' && define.amd ? define(factory) : - (global.Bottleneck = factory()); -}(this, (function () { 'use strict'; + (global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.Bottleneck = factory()); +})(this, (function () { 'use strict'; - var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; - - function getCjsExportFromNamespace (n) { - return n && n['default'] || n; + function getDefaultExportFromCjs (x) { + return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x; } - var load = function(received, defaults, onto = {}) { - var k, ref, v; - for (k in defaults) { - v = defaults[k]; - onto[k] = (ref = received[k]) != null ? ref : v; - } - return onto; - }; - - var overwrite = function(received, defaults, onto = {}) { - var k, v; - for (k in received) { - v = received[k]; - if (defaults[k] !== void 0) { - onto[k] = v; - } - } - return onto; - }; - - var parser = { - load: load, - overwrite: overwrite - }; - - var DLList; - - DLList = class DLList { - constructor(incr, decr) { - this.incr = incr; - this.decr = decr; - this._first = null; - this._last = null; - this.length = 0; - } - - push(value) { - var node; - this.length++; - if (typeof this.incr === "function") { - this.incr(); - } - node = { - value, - prev: this._last, - next: null - }; - if (this._last != null) { - this._last.next = node; - this._last = node; - } else { - this._first = this._last = node; - } - return void 0; - } - - shift() { - var value; - if (this._first == null) { - return; - } else { - this.length--; - if (typeof this.decr === "function") { - this.decr(); - } - } - value = this._first.value; - if ((this._first = this._first.next) != null) { - this._first.prev = null; - } else { - this._last = null; - } - return value; - } - - first() { - if (this._first != null) { - return this._first.value; - } - } - - getArray() { - var node, ref, results; - node = this._first; - results = []; - while (node != null) { - results.push((ref = node, node = node.next, ref.value)); - } - return results; - } - - forEachShift(cb) { - var node; - node = this.shift(); - while (node != null) { - (cb(node), node = this.shift()); - } - return void 0; - } - - debug() { - var node, ref, ref1, ref2, results; - node = this._first; - results = []; - while (node != null) { - results.push((ref = node, node = node.next, { - value: ref.value, - prev: (ref1 = ref.prev) != null ? ref1.value : void 0, - next: (ref2 = ref.next) != null ? ref2.value : void 0 - })); - } - return results; - } - - }; - - var DLList_1 = DLList; - - var Events; - - Events = class Events { - constructor(instance) { - this.instance = instance; - this._events = {}; - if ((this.instance.on != null) || (this.instance.once != null) || (this.instance.removeAllListeners != null)) { - throw new Error("An Emitter already exists for this object"); - } - this.instance.on = (name, cb) => { - return this._addListener(name, "many", cb); - }; - this.instance.once = (name, cb) => { - return this._addListener(name, "once", cb); - }; - this.instance.removeAllListeners = (name = null) => { - if (name != null) { - return delete this._events[name]; - } else { - return this._events = {}; - } - }; - } - - _addListener(name, status, cb) { - var base; - if ((base = this._events)[name] == null) { - base[name] = []; - } - this._events[name].push({cb, status}); - return this.instance; - } - - listenerCount(name) { - if (this._events[name] != null) { - return this._events[name].length; - } else { - return 0; - } - } - - async trigger(name, ...args) { - var e, promises; - try { - if (name !== "debug") { - this.trigger("debug", `Event triggered: ${name}`, args); - } - if (this._events[name] == null) { - return; - } - this._events[name] = this._events[name].filter(function(listener) { - return listener.status !== "none"; - }); - promises = this._events[name].map(async(listener) => { - var e, returned; - if (listener.status === "none") { - return; - } - if (listener.status === "once") { - listener.status = "none"; - } - try { - returned = typeof listener.cb === "function" ? listener.cb(...args) : void 0; - if (typeof (returned != null ? returned.then : void 0) === "function") { - return (await returned); - } else { - return returned; - } - } catch (error) { - e = error; - { - this.trigger("error", e); - } - return null; - } - }); - return ((await Promise.all(promises))).find(function(x) { - return x != null; - }); - } catch (error) { - e = error; - { - this.trigger("error", e); - } - return null; - } - } - - }; - - var Events_1 = Events; - - var DLList$1, Events$1, Queues; - - DLList$1 = DLList_1; - - Events$1 = Events_1; - - Queues = class Queues { - constructor(num_priorities) { - var i; - this.Events = new Events$1(this); - this._length = 0; - this._lists = (function() { - var j, ref, results; - results = []; - for (i = j = 1, ref = num_priorities; (1 <= ref ? j <= ref : j >= ref); i = 1 <= ref ? ++j : --j) { - results.push(new DLList$1((() => { - return this.incr(); - }), (() => { - return this.decr(); - }))); - } - return results; - }).call(this); - } - - incr() { - if (this._length++ === 0) { - return this.Events.trigger("leftzero"); - } - } - - decr() { - if (--this._length === 0) { - return this.Events.trigger("zero"); - } - } - - push(job) { - return this._lists[job.options.priority].push(job); - } - - queued(priority) { - if (priority != null) { - return this._lists[priority].length; - } else { - return this._length; - } - } - - shiftAll(fn) { - return this._lists.forEach(function(list) { - return list.forEachShift(fn); - }); - } - - getFirst(arr = this._lists) { - var j, len, list; - for (j = 0, len = arr.length; j < len; j++) { - list = arr[j]; - if (list.length > 0) { - return list; - } - } - return []; - } - - shiftLastFrom(priority) { - return this.getFirst(this._lists.slice(priority).reverse()).shift(); - } - - }; - - var Queues_1 = Queues; - - var BottleneckError; - - BottleneckError = class BottleneckError extends Error {}; - - var BottleneckError_1 = BottleneckError; - - var BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1; - - NUM_PRIORITIES = 10; - - DEFAULT_PRIORITY = 5; - - parser$1 = parser; - - BottleneckError$1 = BottleneckError_1; - - Job = class Job { - constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states, Promise) { - this.task = task; - this.args = args; - this.rejectOnDrop = rejectOnDrop; - this.Events = Events; - this._states = _states; - this.Promise = Promise; - this.options = parser$1.load(options, jobDefaults); - this.options.priority = this._sanitizePriority(this.options.priority); - if (this.options.id === jobDefaults.id) { - this.options.id = `${this.options.id}-${this._randomIndex()}`; - } - this.promise = new this.Promise((_resolve, _reject) => { - this._resolve = _resolve; - this._reject = _reject; - }); - this.retryCount = 0; - } - - _sanitizePriority(priority) { - var sProperty; - sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; - if (sProperty < 0) { - return 0; - } else if (sProperty > NUM_PRIORITIES - 1) { - return NUM_PRIORITIES - 1; - } else { - return sProperty; - } - } - - _randomIndex() { - return Math.random().toString(36).slice(2); - } - - doDrop({error, message = "This job has been dropped by Bottleneck"} = {}) { - if (this._states.remove(this.options.id)) { - if (this.rejectOnDrop) { - this._reject(error != null ? error : new BottleneckError$1(message)); - } - this.Events.trigger("dropped", {args: this.args, options: this.options, task: this.task, promise: this.promise}); - return true; - } else { - return false; - } - } - - _assertStatus(expected) { - var status; - status = this._states.jobStatus(this.options.id); - if (!(status === expected || (expected === "DONE" && status === null))) { - throw new BottleneckError$1(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`); - } - } - - doReceive() { - this._states.start(this.options.id); - return this.Events.trigger("received", {args: this.args, options: this.options}); - } - - doQueue(reachedHWM, blocked) { - this._assertStatus("RECEIVED"); - this._states.next(this.options.id); - return this.Events.trigger("queued", {args: this.args, options: this.options, reachedHWM, blocked}); - } - - doRun() { - if (this.retryCount === 0) { - this._assertStatus("QUEUED"); - this._states.next(this.options.id); - } else { - this._assertStatus("EXECUTING"); - } - return this.Events.trigger("scheduled", {args: this.args, options: this.options}); - } - - async doExecute(chained, clearGlobalState, run, free) { - var error, eventInfo, passed; - if (this.retryCount === 0) { - this._assertStatus("RUNNING"); - this._states.next(this.options.id); - } else { - this._assertStatus("EXECUTING"); - } - eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount}; - this.Events.trigger("executing", eventInfo); - try { - passed = (await (chained != null ? chained.schedule(this.options, this.task, ...this.args) : this.task(...this.args))); - if (clearGlobalState()) { - this.doDone(eventInfo); - await free(this.options, eventInfo); - this._assertStatus("DONE"); - return this._resolve(passed); - } - } catch (error1) { - error = error1; - return this._onFailure(error, eventInfo, clearGlobalState, run, free); - } - } - - doExpire(clearGlobalState, run, free) { - var error, eventInfo; - if (this._states.jobStatus(this.options.id === "RUNNING")) { - this._states.next(this.options.id); - } - this._assertStatus("EXECUTING"); - eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount}; - error = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`); - return this._onFailure(error, eventInfo, clearGlobalState, run, free); - } - - async _onFailure(error, eventInfo, clearGlobalState, run, free) { - var retry, retryAfter; - if (clearGlobalState()) { - retry = (await this.Events.trigger("failed", error, eventInfo)); - if (retry != null) { - retryAfter = ~~retry; - this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo); - this.retryCount++; - return run(retryAfter); - } else { - this.doDone(eventInfo); - await free(this.options, eventInfo); - this._assertStatus("DONE"); - return this._reject(error); - } - } - } - - doDone(eventInfo) { - this._assertStatus("EXECUTING"); - this._states.next(this.options.id); - return this.Events.trigger("done", eventInfo); - } - - }; - - var Job_1 = Job; - - var BottleneckError$2, LocalDatastore, parser$2; - - parser$2 = parser; - - BottleneckError$2 = BottleneckError_1; - - LocalDatastore = class LocalDatastore { - constructor(instance, storeOptions, storeInstanceOptions) { - this.instance = instance; - this.storeOptions = storeOptions; - this.clientId = this.instance._randomIndex(); - parser$2.load(storeInstanceOptions, storeInstanceOptions, this); - this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); - this._running = 0; - this._done = 0; - this._unblockTime = 0; - this.ready = this.Promise.resolve(); - this.clients = {}; - this._startHeartbeat(); - } - - _startHeartbeat() { - var base; - if (this.heartbeat != null) { - clearInterval(this.heartbeat); - } - if (((this.storeOptions.reservoirRefreshInterval != null) && (this.storeOptions.reservoirRefreshAmount != null)) || ((this.storeOptions.reservoirIncreaseInterval != null) && (this.storeOptions.reservoirIncreaseAmount != null))) { - return typeof (base = (this.heartbeat = setInterval(() => { - var amount, incr, maximum, now, reservoir; - now = Date.now(); - if ((this.storeOptions.reservoirRefreshInterval != null) && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) { - this._lastReservoirRefresh = now; - this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount; - this.instance._drainAll(this.computeCapacity()); - } - if ((this.storeOptions.reservoirIncreaseInterval != null) && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) { - ({ - reservoirIncreaseAmount: amount, - reservoirIncreaseMaximum: maximum, - reservoir - } = this.storeOptions); - this._lastReservoirIncrease = now; - incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; - if (incr > 0) { - this.storeOptions.reservoir += incr; - return this.instance._drainAll(this.computeCapacity()); - } - } - }, this.heartbeatInterval))).unref === "function" ? base.unref() : void 0; - } - } - - async __publish__(message) { - await this.yieldLoop(); - return this.instance.Events.trigger("message", message.toString()); - } - - async __disconnect__(flush) { - await this.yieldLoop(); - clearInterval(this.heartbeat); - return this.Promise.resolve(); - } - - yieldLoop(t = 0) { - return new this.Promise(function(resolve, reject) { - return setTimeout(resolve, t); - }); - } - - computePenalty() { - var ref; - return (ref = this.storeOptions.penalty) != null ? ref : (15 * this.storeOptions.minTime) || 5000; - } - - async __updateSettings__(options) { - await this.yieldLoop(); - parser$2.overwrite(options, options, this.storeOptions); - this._startHeartbeat(); - this.instance._drainAll(this.computeCapacity()); - return true; - } - - async __running__() { - await this.yieldLoop(); - return this._running; - } - - async __queued__() { - await this.yieldLoop(); - return this.instance.queued(); - } - - async __done__() { - await this.yieldLoop(); - return this._done; - } - - async __groupCheck__(time) { - await this.yieldLoop(); - return (this._nextRequest + this.timeout) < time; - } - - computeCapacity() { - var maxConcurrent, reservoir; - ({maxConcurrent, reservoir} = this.storeOptions); - if ((maxConcurrent != null) && (reservoir != null)) { - return Math.min(maxConcurrent - this._running, reservoir); - } else if (maxConcurrent != null) { - return maxConcurrent - this._running; - } else if (reservoir != null) { - return reservoir; - } else { - return null; - } - } - - conditionsCheck(weight) { - var capacity; - capacity = this.computeCapacity(); - return (capacity == null) || weight <= capacity; - } - - async __incrementReservoir__(incr) { - var reservoir; - await this.yieldLoop(); - reservoir = this.storeOptions.reservoir += incr; - this.instance._drainAll(this.computeCapacity()); - return reservoir; - } - - async __currentReservoir__() { - await this.yieldLoop(); - return this.storeOptions.reservoir; - } - - isBlocked(now) { - return this._unblockTime >= now; - } - - check(weight, now) { - return this.conditionsCheck(weight) && (this._nextRequest - now) <= 0; - } - - async __check__(weight) { - var now; - await this.yieldLoop(); - now = Date.now(); - return this.check(weight, now); - } - - async __register__(index, weight, expiration) { - var now, wait; - await this.yieldLoop(); - now = Date.now(); - if (this.conditionsCheck(weight)) { - this._running += weight; - if (this.storeOptions.reservoir != null) { - this.storeOptions.reservoir -= weight; - } - wait = Math.max(this._nextRequest - now, 0); - this._nextRequest = now + wait + this.storeOptions.minTime; - return { - success: true, - wait, - reservoir: this.storeOptions.reservoir - }; - } else { - return { - success: false - }; - } - } - - strategyIsBlock() { - return this.storeOptions.strategy === 3; - } - - async __submit__(queueLength, weight) { - var blocked, now, reachedHWM; - await this.yieldLoop(); - if ((this.storeOptions.maxConcurrent != null) && weight > this.storeOptions.maxConcurrent) { - throw new BottleneckError$2(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`); - } - now = Date.now(); - reachedHWM = (this.storeOptions.highWater != null) && queueLength === this.storeOptions.highWater && !this.check(weight, now); - blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now)); - if (blocked) { - this._unblockTime = now + this.computePenalty(); - this._nextRequest = this._unblockTime + this.storeOptions.minTime; - this.instance._dropAllQueued(); - } - return { - reachedHWM, - blocked, - strategy: this.storeOptions.strategy - }; - } - - async __free__(index, weight) { - await this.yieldLoop(); - this._running -= weight; - this._done += weight; - this.instance._drainAll(this.computeCapacity()); - return { - running: this._running - }; - } - - }; - - var LocalDatastore_1 = LocalDatastore; - - var BottleneckError$3, States; - - BottleneckError$3 = BottleneckError_1; - - States = class States { - constructor(status1) { - this.status = status1; - this._jobs = {}; - this.counts = this.status.map(function() { - return 0; - }); - } - - next(id) { - var current, next; - current = this._jobs[id]; - next = current + 1; - if ((current != null) && next < this.status.length) { - this.counts[current]--; - this.counts[next]++; - return this._jobs[id]++; - } else if (current != null) { - this.counts[current]--; - return delete this._jobs[id]; - } - } - - start(id) { - var initial; - initial = 0; - this._jobs[id] = initial; - return this.counts[initial]++; - } - - remove(id) { - var current; - current = this._jobs[id]; - if (current != null) { - this.counts[current]--; - delete this._jobs[id]; - } - return current != null; - } - - jobStatus(id) { - var ref; - return (ref = this.status[this._jobs[id]]) != null ? ref : null; - } - - statusJobs(status) { - var k, pos, ref, results, v; - if (status != null) { - pos = this.status.indexOf(status); - if (pos < 0) { - throw new BottleneckError$3(`status must be one of ${this.status.join(', ')}`); - } - ref = this._jobs; - results = []; - for (k in ref) { - v = ref[k]; - if (v === pos) { - results.push(k); - } - } - return results; - } else { - return Object.keys(this._jobs); - } - } - - statusCounts() { - return this.counts.reduce(((acc, v, i) => { - acc[this.status[i]] = v; - return acc; - }), {}); - } - - }; - - var States_1 = States; - - var DLList$2, Sync; - - DLList$2 = DLList_1; - - Sync = class Sync { - constructor(name, Promise) { - this.schedule = this.schedule.bind(this); - this.name = name; - this.Promise = Promise; - this._running = 0; - this._queue = new DLList$2(); - } - - isEmpty() { - return this._queue.length === 0; - } - - async _tryToRun() { - var args, cb, error, reject, resolve, returned, task; - if ((this._running < 1) && this._queue.length > 0) { - this._running++; - ({task, args, resolve, reject} = this._queue.shift()); - cb = (await (async function() { - try { - returned = (await task(...args)); - return function() { - return resolve(returned); - }; - } catch (error1) { - error = error1; - return function() { - return reject(error); - }; - } - })()); - this._running--; - this._tryToRun(); - return cb(); - } - } - - schedule(task, ...args) { - var promise, reject, resolve; - resolve = reject = null; - promise = new this.Promise(function(_resolve, _reject) { - resolve = _resolve; - return reject = _reject; - }); - this._queue.push({task, args, resolve, reject}); - this._tryToRun(); - return promise; - } - - }; - - var Sync_1 = Sync; - - var version = "3.0.7"; - var version$1 = { - version: version - }; - - var version$2 = /*#__PURE__*/Object.freeze({ - version: version, - default: version$1 - }); - - var require$$2 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); - - var require$$3 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); - - var require$$4 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); - - var Events$2, Group, IORedisConnection$1, RedisConnection$1, Scripts$1, parser$3; - - parser$3 = parser; - - Events$2 = Events_1; - - RedisConnection$1 = require$$2; - - IORedisConnection$1 = require$$3; - - Scripts$1 = require$$4; - - Group = (function() { - class Group { - constructor(limiterOptions = {}) { - this.deleteKey = this.deleteKey.bind(this); - this.limiterOptions = limiterOptions; - parser$3.load(this.limiterOptions, this.defaults, this); - this.Events = new Events$2(this); - this.instances = {}; - this.Bottleneck = Bottleneck_1; - this._startAutoCleanup(); - this.sharedConnection = this.connection != null; - if (this.connection == null) { - if (this.limiterOptions.datastore === "redis") { - this.connection = new RedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events})); - } else if (this.limiterOptions.datastore === "ioredis") { - this.connection = new IORedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events})); - } - } - } - - key(key = "") { - var ref; - return (ref = this.instances[key]) != null ? ref : (() => { - var limiter; - limiter = this.instances[key] = new this.Bottleneck(Object.assign(this.limiterOptions, { - id: `${this.id}-${key}`, - timeout: this.timeout, - connection: this.connection - })); - this.Events.trigger("created", limiter, key); - return limiter; - })(); - } - - async deleteKey(key = "") { - var deleted, instance; - instance = this.instances[key]; - if (this.connection) { - deleted = (await this.connection.__runCommand__(['del', ...Scripts$1.allKeys(`${this.id}-${key}`)])); - } - if (instance != null) { - delete this.instances[key]; - await instance.disconnect(); - } - return (instance != null) || deleted > 0; - } - - limiters() { - var k, ref, results, v; - ref = this.instances; - results = []; - for (k in ref) { - v = ref[k]; - results.push({ - key: k, - limiter: v - }); - } - return results; - } - - keys() { - return Object.keys(this.instances); - } - - async clusterKeys() { - var cursor, end, found, i, k, keys, len, next, start; - if (this.connection == null) { - return this.Promise.resolve(this.keys()); - } - keys = []; - cursor = null; - start = `b_${this.id}-`.length; - end = "_settings".length; - while (cursor !== 0) { - [next, found] = (await this.connection.__runCommand__(["scan", cursor != null ? cursor : 0, "match", `b_${this.id}-*_settings`, "count", 10000])); - cursor = ~~next; - for (i = 0, len = found.length; i < len; i++) { - k = found[i]; - keys.push(k.slice(start, -end)); - } - } - return keys; - } - - _startAutoCleanup() { - var base; - clearInterval(this.interval); - return typeof (base = (this.interval = setInterval(async() => { - var e, k, ref, results, time, v; - time = Date.now(); - ref = this.instances; - results = []; - for (k in ref) { - v = ref[k]; - try { - if ((await v._store.__groupCheck__(time))) { - results.push(this.deleteKey(k)); - } else { - results.push(void 0); - } - } catch (error) { - e = error; - results.push(v.Events.trigger("error", e)); - } - } - return results; - }, this.timeout / 2))).unref === "function" ? base.unref() : void 0; - } - - updateSettings(options = {}) { - parser$3.overwrite(options, this.defaults, this); - parser$3.overwrite(options, options, this.limiterOptions); - if (options.timeout != null) { - return this._startAutoCleanup(); - } - } - - disconnect(flush = true) { - var ref; - if (!this.sharedConnection) { - return (ref = this.connection) != null ? ref.disconnect(flush) : void 0; - } - } - - } - Group.prototype.defaults = { - timeout: 1000 * 60 * 5, - connection: null, - Promise: Promise, - id: "group-key" - }; - - return Group; - - }).call(commonjsGlobal); - - var Group_1 = Group; - - var Batcher, Events$3, parser$4; - - parser$4 = parser; - - Events$3 = Events_1; - - Batcher = (function() { - class Batcher { - constructor(options = {}) { - this.options = options; - parser$4.load(this.options, this.defaults, this); - this.Events = new Events$3(this); - this._arr = []; - this._resetPromise(); - this._lastFlush = Date.now(); - } - - _resetPromise() { - return this._promise = new this.Promise((res, rej) => { - return this._resolve = res; - }); - } - - _flush() { - clearTimeout(this._timeout); - this._lastFlush = Date.now(); - this._resolve(); - this.Events.trigger("batch", this._arr); - this._arr = []; - return this._resetPromise(); - } - - add(data) { - var ret; - this._arr.push(data); - ret = this._promise; - if (this._arr.length === this.maxSize) { - this._flush(); - } else if ((this.maxTime != null) && this._arr.length === 1) { - this._timeout = setTimeout(() => { - return this._flush(); - }, this.maxTime); - } - return ret; - } - - } - Batcher.prototype.defaults = { - maxTime: null, - maxSize: null, - Promise: Promise - }; - - return Batcher; - - }).call(commonjsGlobal); - - var Batcher_1 = Batcher; - - var require$$4$1 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); - - var require$$8 = getCjsExportFromNamespace(version$2); - - var Bottleneck, DEFAULT_PRIORITY$1, Events$4, Job$1, LocalDatastore$1, NUM_PRIORITIES$1, Queues$1, RedisDatastore$1, States$1, Sync$1, parser$5, - splice = [].splice; - - NUM_PRIORITIES$1 = 10; - - DEFAULT_PRIORITY$1 = 5; - - parser$5 = parser; - - Queues$1 = Queues_1; - - Job$1 = Job_1; - - LocalDatastore$1 = LocalDatastore_1; - - RedisDatastore$1 = require$$4$1; - - Events$4 = Events_1; - - States$1 = States_1; - - Sync$1 = Sync_1; - - Bottleneck = (function() { - class Bottleneck { - constructor(options = {}, ...invalid) { - var storeInstanceOptions, storeOptions; - this._addToQueue = this._addToQueue.bind(this); - this._validateOptions(options, invalid); - parser$5.load(options, this.instanceDefaults, this); - this._queues = new Queues$1(NUM_PRIORITIES$1); - this._scheduled = {}; - this._states = new States$1(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : [])); - this._limiter = null; - this.Events = new Events$4(this); - this._submitLock = new Sync$1("submit", this.Promise); - this._registerLock = new Sync$1("register", this.Promise); - storeOptions = parser$5.load(options, this.storeDefaults, {}); - this._store = (function() { - if (this.datastore === "redis" || this.datastore === "ioredis" || (this.connection != null)) { - storeInstanceOptions = parser$5.load(options, this.redisStoreDefaults, {}); - return new RedisDatastore$1(this, storeOptions, storeInstanceOptions); - } else if (this.datastore === "local") { - storeInstanceOptions = parser$5.load(options, this.localStoreDefaults, {}); - return new LocalDatastore$1(this, storeOptions, storeInstanceOptions); - } else { - throw new Bottleneck.prototype.BottleneckError(`Invalid datastore type: ${this.datastore}`); - } - }).call(this); - this._queues.on("leftzero", () => { - var ref; - return (ref = this._store.heartbeat) != null ? typeof ref.ref === "function" ? ref.ref() : void 0 : void 0; - }); - this._queues.on("zero", () => { - var ref; - return (ref = this._store.heartbeat) != null ? typeof ref.unref === "function" ? ref.unref() : void 0 : void 0; - }); - } - - _validateOptions(options, invalid) { - if (!((options != null) && typeof options === "object" && invalid.length === 0)) { - throw new Bottleneck.prototype.BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1."); - } - } - - ready() { - return this._store.ready; - } - - clients() { - return this._store.clients; - } - - channel() { - return `b_${this.id}`; - } - - channel_client() { - return `b_${this.id}_${this._store.clientId}`; - } - - publish(message) { - return this._store.__publish__(message); - } - - disconnect(flush = true) { - return this._store.__disconnect__(flush); - } - - chain(_limiter) { - this._limiter = _limiter; - return this; - } - - queued(priority) { - return this._queues.queued(priority); - } - - clusterQueued() { - return this._store.__queued__(); - } - - empty() { - return this.queued() === 0 && this._submitLock.isEmpty(); - } - - running() { - return this._store.__running__(); - } - - done() { - return this._store.__done__(); - } - - jobStatus(id) { - return this._states.jobStatus(id); - } - - jobs(status) { - return this._states.statusJobs(status); - } - - counts() { - return this._states.statusCounts(); - } - - _randomIndex() { - return Math.random().toString(36).slice(2); - } - - check(weight = 1) { - return this._store.__check__(weight); - } - - _clearGlobalState(index) { - if (this._scheduled[index] != null) { - clearTimeout(this._scheduled[index].expiration); - delete this._scheduled[index]; - return true; - } else { - return false; - } - } - - async _free(index, job, options, eventInfo) { - var e, running; + function getAugmentedNamespace(n) { + if (Object.prototype.hasOwnProperty.call(n, '__esModule')) return n; + var f = n.default; + if (typeof f == "function") { + var a = function a () { + var isInstance = false; try { - ({running} = (await this._store.__free__(index, options.weight))); - this.Events.trigger("debug", `Freed ${options.id}`, eventInfo); - if (running === 0 && this.empty()) { - return this.Events.trigger("idle"); - } - } catch (error1) { - e = error1; - return this.Events.trigger("error", e); - } - } - - _run(index, job, wait) { - var clearGlobalState, free, run; - job.doRun(); - clearGlobalState = this._clearGlobalState.bind(this, index); - run = this._run.bind(this, index, job); - free = this._free.bind(this, index, job); - return this._scheduled[index] = { - timeout: setTimeout(() => { - return job.doExecute(this._limiter, clearGlobalState, run, free); - }, wait), - expiration: job.options.expiration != null ? setTimeout(function() { - return job.doExpire(clearGlobalState, run, free); - }, wait + job.options.expiration) : void 0, - job: job - }; - } - - _drainOne(capacity) { - return this._registerLock.schedule(() => { - var args, index, next, options, queue; - if (this.queued() === 0) { - return this.Promise.resolve(null); - } - queue = this._queues.getFirst(); - ({options, args} = next = queue.first()); - if ((capacity != null) && options.weight > capacity) { - return this.Promise.resolve(null); - } - this.Events.trigger("debug", `Draining ${options.id}`, {args, options}); - index = this._randomIndex(); - return this._store.__register__(index, options.weight, options.expiration).then(({success, wait, reservoir}) => { - var empty; - this.Events.trigger("debug", `Drained ${options.id}`, {success, args, options}); - if (success) { - queue.shift(); - empty = this.empty(); - if (empty) { - this.Events.trigger("empty"); - } - if (reservoir === 0) { - this.Events.trigger("depleted", empty); - } - this._run(index, next, wait); - return this.Promise.resolve(options.weight); - } else { - return this.Promise.resolve(null); - } - }); - }); - } - - _drainAll(capacity, total = 0) { - return this._drainOne(capacity).then((drained) => { - var newCapacity; - if (drained != null) { - newCapacity = capacity != null ? capacity - drained : capacity; - return this._drainAll(newCapacity, total + drained); - } else { - return this.Promise.resolve(total); - } - }).catch((e) => { - return this.Events.trigger("error", e); - }); - } - - _dropAllQueued(message) { - return this._queues.shiftAll(function(job) { - return job.doDrop({message}); - }); - } - - stop(options = {}) { - var done, waitForExecuting; - options = parser$5.load(options, this.stopDefaults); - waitForExecuting = (at) => { - var finished; - finished = () => { - var counts; - counts = this._states.counts; - return (counts[0] + counts[1] + counts[2] + counts[3]) === at; - }; - return new this.Promise((resolve, reject) => { - if (finished()) { - return resolve(); - } else { - return this.on("done", () => { - if (finished()) { - this.removeAllListeners("done"); - return resolve(); - } - }); - } - }); - }; - done = options.dropWaitingJobs ? (this._run = function(index, next) { - return next.doDrop({ - message: options.dropErrorMessage - }); - }, this._drainOne = () => { - return this.Promise.resolve(null); - }, this._registerLock.schedule(() => { - return this._submitLock.schedule(() => { - var k, ref, v; - ref = this._scheduled; - for (k in ref) { - v = ref[k]; - if (this.jobStatus(v.job.options.id) === "RUNNING") { - clearTimeout(v.timeout); - clearTimeout(v.expiration); - v.job.doDrop({ - message: options.dropErrorMessage - }); - } - } - this._dropAllQueued(options.dropErrorMessage); - return waitForExecuting(0); - }); - })) : this.schedule({ - priority: NUM_PRIORITIES$1 - 1, - weight: 0 - }, () => { - return waitForExecuting(1); - }); - this._receive = function(job) { - return job._reject(new Bottleneck.prototype.BottleneckError(options.enqueueErrorMessage)); - }; - this.stop = () => { - return this.Promise.reject(new Bottleneck.prototype.BottleneckError("stop() has already been called")); - }; - return done; - } - - async _addToQueue(job) { - var args, blocked, error, options, reachedHWM, shifted, strategy; - ({args, options} = job); - try { - ({reachedHWM, blocked, strategy} = (await this._store.__submit__(this.queued(), options.weight))); - } catch (error1) { - error = error1; - this.Events.trigger("debug", `Could not queue ${options.id}`, {args, options, error}); - job.doDrop({error}); - return false; - } - if (blocked) { - job.doDrop(); - return true; - } else if (reachedHWM) { - shifted = strategy === Bottleneck.prototype.strategy.LEAK ? this._queues.shiftLastFrom(options.priority) : strategy === Bottleneck.prototype.strategy.OVERFLOW_PRIORITY ? this._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck.prototype.strategy.OVERFLOW ? job : void 0; - if (shifted != null) { - shifted.doDrop(); - } - if ((shifted == null) || strategy === Bottleneck.prototype.strategy.OVERFLOW) { - if (shifted == null) { - job.doDrop(); - } - return reachedHWM; - } - } - job.doQueue(reachedHWM, blocked); - this._queues.push(job); - await this._drainAll(); - return reachedHWM; - } + isInstance = this instanceof a; + } catch {} + if (isInstance) { + return Reflect.construct(f, arguments, this.constructor); + } + return f.apply(this, arguments); + }; + a.prototype = f.prototype; + } else a = {}; + Object.defineProperty(a, '__esModule', {value: true}); + Object.keys(n).forEach(function (k) { + var d = Object.getOwnPropertyDescriptor(n, k); + Object.defineProperty(a, k, d.get ? d : { + enumerable: true, + get: function () { + return n[k]; + } + }); + }); + return a; + } - _receive(job) { - if (this._states.jobStatus(job.options.id) != null) { - job._reject(new Bottleneck.prototype.BottleneckError(`A job with the same id already exists (id=${job.options.id})`)); - return false; - } else { - job.doReceive(); - return this._submitLock.schedule(this._addToQueue, job); - } - } + var Bottleneck = {exports: {}}; + + var parser = {}; + + var hasRequiredParser; + + function requireParser () { + if (hasRequiredParser) return parser; + hasRequiredParser = 1; + parser.load = function (received, defaults, onto) { + var _a; + onto !== null && onto !== void 0 ? onto : (onto = {}); + for (const [k, v] of Object.entries(defaults)) { + onto[k] = (_a = received[k]) !== null && _a !== void 0 ? _a : v; + } + return onto; + }; + parser.overwrite = function (received, defaults, onto) { + onto !== null && onto !== void 0 ? onto : (onto = {}); + for (const [k, v] of Object.entries(received)) { + if (defaults[k] !== undefined) { + onto[k] = v; + } + } + return onto; + }; + return parser; + } - submit(...args) { - var cb, fn, job, options, ref, ref1, task; - if (typeof args[0] === "function") { - ref = args, [fn, ...args] = ref, [cb] = splice.call(args, -1); - options = parser$5.load({}, this.jobDefaults); - } else { - ref1 = args, [options, fn, ...args] = ref1, [cb] = splice.call(args, -1); - options = parser$5.load(options, this.jobDefaults); - } - task = (...args) => { - return new this.Promise(function(resolve, reject) { - return fn(...args, function(...args) { - return (args[0] != null ? reject : resolve)(args); - }); - }); - }; - job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); - job.promise.then(function(args) { - return typeof cb === "function" ? cb(...args) : void 0; - }).catch(function(args) { - if (Array.isArray(args)) { - return typeof cb === "function" ? cb(...args) : void 0; - } else { - return typeof cb === "function" ? cb(args) : void 0; - } - }); - return this._receive(job); - } + var DLList_1; + var hasRequiredDLList; + + function requireDLList () { + if (hasRequiredDLList) return DLList_1; + hasRequiredDLList = 1; + class DLList { + constructor(incr, decr) { + this.incr = incr; + this.decr = decr; + this._first = null; + this._last = null; + this.length = 0; + } + push(value) { + var _a; + this.length++; + (_a = this.incr) === null || _a === void 0 ? void 0 : _a.call(this); + const node = { value, prev: this._last, next: null }; + if (this._last != null) { + this._last.next = node; + this._last = node; + } + else { + this._first = this._last = node; + } + } + shift() { + var _a; + if (this._first == null) { + return; + } + else { + this.length--; + (_a = this.decr) === null || _a === void 0 ? void 0 : _a.call(this); + } + const { value } = this._first; + if ((this._first = this._first.next) != null) { + this._first.prev = null; + } + else { + this._last = null; + } + return value; + } + first() { + var _a; + return (_a = this._first) === null || _a === void 0 ? void 0 : _a.value; + } + getArray() { + let node = this._first; + const result = []; + while (node != null) { + var ref; + result.push(((ref = node), (node = node.next), ref.value)); + } + return result; + } + forEachShift(cb) { + let node = this.shift(); + while (node != null) { + cb(node); + node = this.shift(); + } + } + debug() { + var _a, _b; + let node = this._first; + const result = []; + while (node != null) { + var ref; + result.push(((ref = node), + (node = node.next), + { + value: ref.value, + prev: (_a = ref.prev) === null || _a === void 0 ? void 0 : _a.value, + next: (_b = ref.next) === null || _b === void 0 ? void 0 : _b.value, + })); + } + return result; + } + } + DLList_1 = DLList; + return DLList_1; + } - schedule(...args) { - var job, options, task; - if (typeof args[0] === "function") { - [task, ...args] = args; - options = {}; - } else { - [options, task, ...args] = args; - } - job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); - this._receive(job); - return job.promise; - } + var Events_1; + var hasRequiredEvents; + + function requireEvents () { + if (hasRequiredEvents) return Events_1; + hasRequiredEvents = 1; + var __awaiter = (Events_1 && Events_1.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + class Events { + constructor(instance) { + this.instance = instance; + this._events = {}; + if (this.instance.on != null || + this.instance.once != null || + this.instance.removeAllListeners != null) { + throw new Error("An Emitter already exists for this object"); + } + this.instance.on = (name, cb) => this._addListener(name, "many", cb); + this.instance.once = (name, cb) => this._addListener(name, "once", cb); + this.instance.removeAllListeners = (name = null) => { + if (name != null) { + delete this._events[name]; + } + else { + this._events = {}; + } + }; + } + _addListener(name, status, cb) { + var _a; + var _b; + (_a = (_b = this._events)[name]) !== null && _a !== void 0 ? _a : (_b[name] = []); + this._events[name].push({ cb, status }); + return this.instance; + } + listenerCount(name) { + var _a, _b; + return (_b = (_a = this._events[name]) === null || _a === void 0 ? void 0 : _a.length) !== null && _b !== void 0 ? _b : 0; + } + trigger(name, ...args) { + return __awaiter(this, void 0, void 0, function* () { + try { + if (name !== "debug") { + this.trigger("debug", `Event triggered: ${name}`, args); + } + if (this._events[name] == null) + return; + this._events[name] = this._events[name].filter((listener) => listener.status !== "none"); + const allEvents = yield Promise.all(this._events[name].map((listener) => __awaiter(this, void 0, void 0, function* () { + if (listener.status === "once") + listener.status = "none"; + try { + return typeof listener.cb === "function" ? listener.cb(...(args || [])) : undefined; + } + catch (e) { + if (name !== "error") + this.trigger("error", e); + return null; + } + }))); + return allEvents.find((x) => x != null); + } + catch (error) { + const e = error; + if (name !== "error") { + this.trigger("error", e); + } + return null; + } + }); + } + } + Events_1 = Events; + return Events_1; + } - wrap(fn) { - var schedule, wrapped; - schedule = this.schedule.bind(this); - wrapped = function(...args) { - return schedule(fn.bind(this), ...args); - }; - wrapped.withOptions = function(options, ...args) { - return schedule(options, fn, ...args); - }; - return wrapped; - } + var Queues_1; + var hasRequiredQueues; + + function requireQueues () { + if (hasRequiredQueues) return Queues_1; + hasRequiredQueues = 1; + const DLList = requireDLList(); + const Events = requireEvents(); + class Queues { + constructor(num_priorities) { + this.Events = new Events(this); + this._length = 0; + this._lists = []; + for (let i = 0; i < num_priorities; i++) { + const list = new DLList(() => this.incr(), () => this.decr()); + this._lists.push(list); + } + } + incr() { + if (this._length++ === 0) { + return this.Events.trigger("leftzero"); + } + } + decr() { + if (--this._length === 0) { + return this.Events.trigger("zero"); + } + } + push(job) { + return this._lists[job.options.priority].push(job); + } + queued(priority) { + if (priority != null) { + return this._lists[priority].length; + } + else { + return this._length; + } + } + shiftAll(fn) { + return this._lists.forEach((list) => list.forEachShift(fn)); + } + getFirst(arr) { + for (const list of arr !== null && arr !== void 0 ? arr : this._lists) { + if (list.length > 0) + return list; + } + return []; + } + shiftLastFrom(priority) { + return this.getFirst(this._lists.slice(priority).reverse()).shift(); + } + } + Queues_1 = Queues; + return Queues_1; + } - async updateSettings(options = {}) { - await this._store.__updateSettings__(parser$5.overwrite(options, this.storeDefaults)); - parser$5.overwrite(options, this.instanceDefaults, this); - return this; - } + var BottleneckError_1; + var hasRequiredBottleneckError; - currentReservoir() { - return this._store.__currentReservoir__(); - } + function requireBottleneckError () { + if (hasRequiredBottleneckError) return BottleneckError_1; + hasRequiredBottleneckError = 1; + class BottleneckError extends Error { + } + BottleneckError_1 = BottleneckError; + return BottleneckError_1; + } - incrementReservoir(incr = 0) { - return this._store.__incrementReservoir__(incr); - } + var Job_1; + var hasRequiredJob; + + function requireJob () { + if (hasRequiredJob) return Job_1; + hasRequiredJob = 1; + var __awaiter = (Job_1 && Job_1.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + const NUM_PRIORITIES = 10; + const DEFAULT_PRIORITY = 5; + const parser = requireParser(); + const BottleneckError = requireBottleneckError(); + class Job { + constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states) { + this.task = task; + this.args = args; + this.rejectOnDrop = rejectOnDrop; + this.Events = Events; + this._states = _states; + this.options = parser.load(options, jobDefaults); + this.options.priority = this._sanitizePriority(this.options.priority); + if (this.options.id === jobDefaults.id) { + this.options.id = `${this.options.id}-${this._randomIndex()}`; + } + this.promise = new Promise((_resolve, _reject) => { + this._resolve = _resolve; + this._reject = _reject; + }); + this.retryCount = 0; + } + _sanitizePriority(priority) { + const sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; + if (sProperty < 0) { + return 0; + } + else if (sProperty > NUM_PRIORITIES - 1) { + return NUM_PRIORITIES - 1; + } + else { + return sProperty; + } + } + _randomIndex() { + return Math.random().toString(36).slice(2); + } + doDrop(params) { + const { error, message = "This job has been dropped by Bottleneck" } = params || {}; + if (this._states.remove(this.options.id)) { + if (this.rejectOnDrop) { + this._reject(error !== null && error !== void 0 ? error : new BottleneckError(message)); + } + this.Events.trigger("dropped", { + args: this.args, + options: this.options, + task: this.task, + promise: this.promise, + }); + return true; + } + else { + return false; + } + } + _assertStatus(expected) { + const status = this._states.jobStatus(this.options.id); + if (!(status === expected || (expected === "DONE" && status === null))) { + throw new BottleneckError(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`); + } + } + doReceive() { + this._states.start(this.options.id); + return this.Events.trigger("received", { args: this.args, options: this.options }); + } + doQueue(reachedHWM, blocked) { + this._assertStatus("RECEIVED"); + this._states.next(this.options.id); + return this.Events.trigger("queued", { + args: this.args, + options: this.options, + reachedHWM, + blocked, + }); + } + doRun() { + if (this.retryCount === 0) { + this._assertStatus("QUEUED"); + this._states.next(this.options.id); + } + else { + this._assertStatus("EXECUTING"); + } + return this.Events.trigger("scheduled", { args: this.args, options: this.options }); + } + doExecute(chained, clearGlobalState, run, free) { + return __awaiter(this, void 0, void 0, function* () { + if (this.retryCount === 0) { + this._assertStatus("RUNNING"); + this._states.next(this.options.id); + } + else { + this._assertStatus("EXECUTING"); + } + const eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; + this.Events.trigger("executing", eventInfo); + try { + const passed = yield (chained != null + ? chained.schedule(this.options, this.task, ...this.args) + : this.task(...(this.args || []))); + if (clearGlobalState()) { + this.doDone(eventInfo); + yield free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._resolve(passed); + } + } + catch (error) { + return this._onFailure(error, eventInfo, clearGlobalState, run, free); + } + }); + } + doExpire(clearGlobalState, run, free) { + if (this._states.jobStatus(this.options.id === "RUNNING")) { + this._states.next(this.options.id); + } + this._assertStatus("EXECUTING"); + const eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; + const error = new BottleneckError(`This job timed out after ${this.options.expiration} ms.`); + return this._onFailure(error, eventInfo, clearGlobalState, run, free); + } + _onFailure(error, eventInfo, clearGlobalState, run, free) { + return __awaiter(this, void 0, void 0, function* () { + if (clearGlobalState()) { + const retry = yield this.Events.trigger("failed", error, eventInfo); + if (retry != null) { + const retryAfter = ~~retry; + this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo); + this.retryCount++; + return run(retryAfter); + } + else { + this.doDone(eventInfo); + yield free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._reject(error); + } + } + }); + } + doDone(eventInfo) { + this._assertStatus("EXECUTING"); + this._states.next(this.options.id); + return this.Events.trigger("done", eventInfo); + } + } + Job_1 = Job; + return Job_1; + } - } - Bottleneck.default = Bottleneck; + var LocalDatastore_1; + var hasRequiredLocalDatastore; + + function requireLocalDatastore () { + if (hasRequiredLocalDatastore) return LocalDatastore_1; + hasRequiredLocalDatastore = 1; + var __awaiter = (LocalDatastore_1 && LocalDatastore_1.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + const parser = requireParser(); + const BottleneckError = requireBottleneckError(); + class LocalDatastore { + constructor(instance, storeOptions, storeInstanceOptions) { + this.instance = instance; + this.storeOptions = storeOptions; + this.clientId = this.instance._randomIndex(); + parser.load(storeInstanceOptions, storeInstanceOptions, this); + this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); + this._running = 0; + this._done = 0; + this._unblockTime = 0; + this.ready = Promise.resolve(); + this.clients = {}; + this._startHeartbeat(); + } + _startHeartbeat() { + var _a, _b; + if (this.heartbeat) { + clearInterval(this.heartbeat); + } + if ((this.storeOptions.reservoirRefreshInterval != null && + this.storeOptions.reservoirRefreshAmount != null) || + (this.storeOptions.reservoirIncreaseInterval != null && + this.storeOptions.reservoirIncreaseAmount != null)) { + this.heartbeat = (_b = (_a = setInterval(() => { + const now = Date.now(); + if (this.storeOptions.reservoirRefreshInterval != null && + now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) { + this._lastReservoirRefresh = now; + this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount; + this.instance._drainAll(this.computeCapacity()); + } + if (this.storeOptions.reservoirIncreaseInterval != null && + now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) { + const { reservoirIncreaseAmount: amount, reservoirIncreaseMaximum: maximum, reservoir, } = this.storeOptions; + this._lastReservoirIncrease = now; + const incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; + if (incr > 0) { + this.storeOptions.reservoir += incr; + return this.instance._drainAll(this.computeCapacity()); + } + } + }, this.heartbeatInterval)).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + } + __publish__(message) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this.instance.Events.trigger("message", message.toString()); + }); + } + __disconnect__() { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + clearInterval(this.heartbeat); + }); + } + yieldLoop(t) { + return new Promise((resolve) => setTimeout(resolve, t !== null && t !== void 0 ? t : 0)); + } + computePenalty() { + return this.storeOptions.penalty != null + ? this.storeOptions.penalty + : 15 * this.storeOptions.minTime || 5000; + } + __updateSettings__(options) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + parser.overwrite(options, options, this.storeOptions); + this._startHeartbeat(); + this.instance._drainAll(this.computeCapacity()); + return true; + }); + } + __running__() { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this._running; + }); + } + __queued__() { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this.instance.queued(); + }); + } + __done__() { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this._done; + }); + } + __groupCheck__(time) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this._nextRequest + this.timeout < time; + }); + } + computeCapacity() { + const { maxConcurrent, reservoir } = this.storeOptions; + if (maxConcurrent != null && reservoir != null) { + return Math.min(maxConcurrent - this._running, reservoir); + } + else if (maxConcurrent != null) { + return maxConcurrent - this._running; + } + else if (reservoir != null) { + return reservoir; + } + else { + return null; + } + } + conditionsCheck(weight) { + const capacity = this.computeCapacity(); + return capacity == null || weight <= capacity; + } + __incrementReservoir__(incr) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + const reservoir = (this.storeOptions.reservoir += incr); + this.instance._drainAll(this.computeCapacity()); + return reservoir; + }); + } + __currentReservoir__() { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + return this.storeOptions.reservoir; + }); + } + isBlocked(now) { + return this._unblockTime >= now; + } + check(weight, now) { + return this.conditionsCheck(weight) && this._nextRequest - now <= 0; + } + __check__(weight) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + const now = Date.now(); + return this.check(weight, now); + }); + } + __register__(index, weight, _expiration) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + const now = Date.now(); + if (this.conditionsCheck(weight)) { + this._running += weight; + if (this.storeOptions.reservoir != null) { + this.storeOptions.reservoir -= weight; + } + const wait = Math.max(this._nextRequest - now, 0); + this._nextRequest = now + wait + this.storeOptions.minTime; + return { success: true, wait, reservoir: this.storeOptions.reservoir }; + } + else { + return { success: false }; + } + }); + } + strategyIsBlock() { + return this.storeOptions.strategy === 3; + } + __submit__(queueLength, weight) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + if (this.storeOptions.maxConcurrent != null && weight > this.storeOptions.maxConcurrent) { + throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`); + } + const now = Date.now(); + const reachedHWM = this.storeOptions.highWater != null && + queueLength === this.storeOptions.highWater && + !this.check(weight, now); + const blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now)); + if (blocked) { + this._unblockTime = now + this.computePenalty(); + this._nextRequest = this._unblockTime + this.storeOptions.minTime; + this.instance._dropAllQueued(); + } + return { reachedHWM, blocked, strategy: this.storeOptions.strategy }; + }); + } + __free__(index, weight) { + return __awaiter(this, void 0, void 0, function* () { + yield this.yieldLoop(); + this._running -= weight; + this._done += weight; + this.instance._drainAll(this.computeCapacity()); + return { running: this._running }; + }); + } + } + LocalDatastore_1 = LocalDatastore; + return LocalDatastore_1; + } - Bottleneck.Events = Events$4; + var RedisDatastore = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); - Bottleneck.version = Bottleneck.prototype.version = require$$8.version; + var RedisDatastore$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + default: RedisDatastore + }); - Bottleneck.strategy = Bottleneck.prototype.strategy = { - LEAK: 1, - OVERFLOW: 2, - OVERFLOW_PRIORITY: 4, - BLOCK: 3 - }; + var require$$4$1 = /*@__PURE__*/getAugmentedNamespace(RedisDatastore$1); + + var States_1; + var hasRequiredStates; + + function requireStates () { + if (hasRequiredStates) return States_1; + hasRequiredStates = 1; + const BottleneckError = requireBottleneckError(); + class States { + constructor(status) { + this.status = status; + this._jobs = {}; + this.counts = this.status.map(() => 0); + } + next(id) { + const current = this._jobs[id]; + const next = current + 1; + if (current != null && next < this.status.length) { + this.counts[current]--; + this.counts[next]++; + this._jobs[id]++; + } + else if (current != null) { + this.counts[current]--; + delete this._jobs[id]; + } + } + start(id) { + const initial = 0; + this._jobs[id] = initial; + return this.counts[initial]++; + } + remove(id) { + const current = this._jobs[id]; + if (current != null) { + this.counts[current]--; + delete this._jobs[id]; + } + return current != null; + } + jobStatus(id) { + var _a; + return (_a = this.status[this._jobs[id]]) !== null && _a !== void 0 ? _a : null; + } + statusJobs(status) { + if (status != null) { + const pos = this.status.indexOf(status); + if (pos < 0) { + throw new BottleneckError(`status must be one of ${this.status.join(", ")}`); + } + const result = []; + for (const [k, v] of Object.entries(this._jobs)) { + if (v === pos) { + result.push(k); + } + } + return result; + } + else { + return Object.keys(this._jobs); + } + } + statusCounts() { + return this.counts.reduce((acc, v, i) => { + acc[this.status[i]] = v; + return acc; + }, {}); + } + } + States_1 = States; + return States_1; + } - Bottleneck.BottleneckError = Bottleneck.prototype.BottleneckError = BottleneckError_1; + var Sync_1; + var hasRequiredSync; + + function requireSync () { + if (hasRequiredSync) return Sync_1; + hasRequiredSync = 1; + var __awaiter = (Sync_1 && Sync_1.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + const DLList = requireDLList(); + class Sync { + constructor(name) { + this.schedule = this.schedule.bind(this); + this.name = name; + this._running = 0; + this._queue = new DLList(); + } + isEmpty() { + return this._queue.length === 0; + } + _tryToRun() { + return __awaiter(this, void 0, void 0, function* () { + if (this._running < 1 && this._queue.length > 0) { + this._running++; + const { task, args, resolve, reject } = this._queue.shift(); + let cb; + try { + const returned = yield task(...(args || [])); + cb = () => resolve(returned); + } + catch (error) { + cb = () => reject(error); + } + this._running--; + this._tryToRun(); + cb(); + } + }); + } + schedule(task, ...args) { + let reject; + let resolve = (reject = null); + const promise = new Promise(function (_resolve, _reject) { + resolve = _resolve; + reject = _reject; + }); + this._queue.push({ task, args, resolve, reject }); + this._tryToRun(); + return promise; + } + } + Sync_1 = Sync; + return Sync_1; + } - Bottleneck.Group = Bottleneck.prototype.Group = Group_1; + var RedisConnection = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); - Bottleneck.RedisConnection = Bottleneck.prototype.RedisConnection = require$$2; + var RedisConnection$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + default: RedisConnection + }); - Bottleneck.IORedisConnection = Bottleneck.prototype.IORedisConnection = require$$3; + var require$$10 = /*@__PURE__*/getAugmentedNamespace(RedisConnection$1); - Bottleneck.Batcher = Bottleneck.prototype.Batcher = Batcher_1; + var IORedisConnection = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); - Bottleneck.prototype.jobDefaults = { - priority: DEFAULT_PRIORITY$1, - weight: 1, - expiration: null, - id: "" - }; + var IORedisConnection$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + default: IORedisConnection + }); - Bottleneck.prototype.storeDefaults = { - maxConcurrent: null, - minTime: 0, - highWater: null, - strategy: Bottleneck.prototype.strategy.LEAK, - penalty: null, - reservoir: null, - reservoirRefreshInterval: null, - reservoirRefreshAmount: null, - reservoirIncreaseInterval: null, - reservoirIncreaseAmount: null, - reservoirIncreaseMaximum: null - }; + var require$$11 = /*@__PURE__*/getAugmentedNamespace(IORedisConnection$1); - Bottleneck.prototype.localStoreDefaults = { - Promise: Promise, - timeout: null, - heartbeatInterval: 250 - }; + var Scripts = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); - Bottleneck.prototype.redisStoreDefaults = { - Promise: Promise, - timeout: null, - heartbeatInterval: 5000, - clientTimeout: 10000, - Redis: null, - clientOptions: {}, - clusterNodes: null, - clearDatastore: false, - connection: null - }; + var Scripts$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + default: Scripts + }); - Bottleneck.prototype.instanceDefaults = { - datastore: "local", - connection: null, - id: "", - rejectOnDrop: true, - trackDoneStatus: false, - Promise: Promise - }; + var require$$4 = /*@__PURE__*/getAugmentedNamespace(Scripts$1); + + var Group_1; + var hasRequiredGroup; + + function requireGroup () { + if (hasRequiredGroup) return Group_1; + hasRequiredGroup = 1; + var __awaiter = (Group_1 && Group_1.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + const parser = requireParser(); + const Events = requireEvents(); + const RedisConnection = require$$10; + const IORedisConnection = require$$11; + const Scripts = require$$4; + class Group { + constructor(limiterOptions) { + this.defaults = { + timeout: 1000 * 60 * 5, + connection: null, + id: "group-key", + }; + this.deleteKey = this.deleteKey.bind(this); + this.limiterOptions = limiterOptions !== null && limiterOptions !== void 0 ? limiterOptions : {}; + parser.load(this.limiterOptions, this.defaults, this); + this.Events = new Events(this); + this.instances = {}; + this._startAutoCleanup(); + this.sharedConnection = this.connection != null; + this.Bottleneck = requireBottleneck(); + if (this.connection == null) { + if (this.limiterOptions.datastore === "redis") { + this.connection = new RedisConnection(Object.assign({}, this.limiterOptions, { Events: this.Events })); + } + else if (this.limiterOptions.datastore === "ioredis") { + this.connection = new IORedisConnection(Object.assign({}, this.limiterOptions, { Events: this.Events })); + } + } + } + key(key = "") { + let limiter = this.instances[key]; + if (!limiter) { + limiter = new this.Bottleneck(Object.assign(this.limiterOptions, { + id: `${this.id}-${key}`, + timeout: this.timeout, + connection: this.connection, + })); + this.Events.trigger("created", limiter, key); + this.instances[key] = limiter; + } + return limiter; + } + deleteKey() { + return __awaiter(this, arguments, void 0, function* (key = "") { + let deleted; + const instance = this.instances[key]; + if (this.connection) { + deleted = yield this.connection.__runCommand__([ + "del", + ...Scripts.allKeys(`${this.id}-${key}`), + ]); + } + if (instance != null) { + delete this.instances[key]; + yield instance.disconnect(); + } + return instance != null || deleted > 0; + }); + } + limiters() { + return Object.entries(this.instances).map(([key, limiter]) => ({ key, limiter })); + } + keys() { + return Object.keys(this.instances); + } + clusterKeys() { + return __awaiter(this, void 0, void 0, function* () { + if (this.connection == null) { + return Promise.resolve(this.keys()); + } + const keys = []; + let cursor = null; + const start = `b_${this.id}-`.length; + const end = "_settings".length; + while (cursor !== 0) { + const [next, found] = yield this.connection.__runCommand__([ + "scan", + cursor !== null && cursor !== void 0 ? cursor : 0, + "match", + `b_${this.id}-*_settings`, + "count", + 10000, + ]); + cursor = ~~next; + for (const k of found) { + keys.push(k.slice(start, -end)); + } + } + return keys; + }); + } + _startAutoCleanup() { + var _a, _b; + clearInterval(this.interval); + this.interval = (_b = (_a = setInterval(() => __awaiter(this, void 0, void 0, function* () { + const time = Date.now(); + for (const [k, v] of Object.entries(this.instances)) { + try { + if (yield v._store.__groupCheck__(time)) { + this.deleteKey(k); + } + } + catch (e) { + v.Events.trigger("error", e); + } + } + }), this.timeout / 2)).unref) === null || _b === void 0 ? void 0 : _b.call(_a); + } + updateSettings(options) { + options !== null && options !== void 0 ? options : (options = {}); + parser.overwrite(options, this.defaults, this); + parser.overwrite(options, options, this.limiterOptions); + if (options.timeout != null) { + return this._startAutoCleanup(); + } + } + disconnect(flush = true) { + var _a; + if (!this.sharedConnection) { + return (_a = this.connection) === null || _a === void 0 ? void 0 : _a.disconnect(flush); + } + } + } + Group_1 = Group; + return Group_1; + } - Bottleneck.prototype.stopDefaults = { - enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.", - dropWaitingJobs: true, - dropErrorMessage: "This limiter has been stopped." - }; + var Batcher_1; + var hasRequiredBatcher; + + function requireBatcher () { + if (hasRequiredBatcher) return Batcher_1; + hasRequiredBatcher = 1; + const parser = requireParser(); + const Events = requireEvents(); + class Batcher { + constructor(options) { + this.defaults = { maxTime: null, maxSize: null }; + this.options = options !== null && options !== void 0 ? options : {}; + parser.load(this.options, this.defaults, this); + this.Events = new Events(this); + this._arr = []; + this._resetPromise(); + this._lastFlush = Date.now(); + } + _resetPromise() { + this._promise = new Promise((res) => { + this._resolve = res; + }); + } + _flush() { + clearTimeout(this._timeout); + this._lastFlush = Date.now(); + this._resolve(); + this.Events.trigger("batch", this._arr); + this._arr = []; + this._resetPromise(); + } + add(data) { + this._arr.push(data); + const existingPromise = this._promise; + if (this._arr.length === this.maxSize) { + this._flush(); + } + else if (this.maxTime != null && this._arr.length === 1) { + this._timeout = setTimeout(() => { + this._flush(); + }, this.maxTime); + } + return existingPromise; + } + } + Batcher_1 = Batcher; + return Batcher_1; + } - return Bottleneck; + var version = "4.0.0-rc.0"; + var require$$13 = { + version: version}; + + var Bottleneck_1 = Bottleneck.exports; + + var hasRequiredBottleneck; + + function requireBottleneck () { + if (hasRequiredBottleneck) return Bottleneck.exports; + hasRequiredBottleneck = 1; + var __awaiter = (Bottleneck_1 && Bottleneck_1.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + const NUM_PRIORITIES = 10; + const DEFAULT_PRIORITY = 5; + const parser = requireParser(); + const Queues = requireQueues(); + const Job = requireJob(); + const LocalDatastore = requireLocalDatastore(); + const RedisDatastore = require$$4$1; + const Events = requireEvents(); + const States = requireStates(); + const Sync = requireSync(); + const BottleneckError = requireBottleneckError(); + const Group = requireGroup(); + const RedisConnection = require$$10; + const IORedisConnection = require$$11; + const Batcher = requireBatcher(); + const version = require$$13.version; + let Bottleneck$1 = class Bottleneck { + constructor(options, ...invalid) { + this.version = version; + this.jobDefaults = { + priority: DEFAULT_PRIORITY, + weight: 1, + expiration: null, + id: "", + }; + this.storeDefaults = { + maxConcurrent: null, + minTime: 0, + highWater: null, + strategy: Bottleneck.strategy.LEAK, + penalty: null, + reservoir: null, + reservoirRefreshInterval: null, + reservoirRefreshAmount: null, + reservoirIncreaseInterval: null, + reservoirIncreaseAmount: null, + reservoirIncreaseMaximum: null, + }; + this.localStoreDefaults = { + Promise, + timeout: null, + heartbeatInterval: 250, + }; + this.redisStoreDefaults = { + Promise, + timeout: null, + heartbeatInterval: 5000, + clientTimeout: 10000, + Redis: null, + clientOptions: {}, + clusterNodes: null, + clearDatastore: false, + connection: null, + }; + this.instanceDefaults = { + datastore: "local", + connection: null, + id: "", + rejectOnDrop: true, + trackDoneStatus: false, + Promise, + }; + this.stopDefaults = { + enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.", + dropWaitingJobs: true, + dropErrorMessage: "This limiter has been stopped.", + }; + this._addToQueue = this._addToQueue.bind(this); + options !== null && options !== void 0 ? options : (options = {}); + this._validateOptions(options, invalid); + parser.load(options, this.instanceDefaults, this); + this._queues = new Queues(NUM_PRIORITIES); + this._scheduled = {}; + this._states = new States(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : [])); + this._limiter = null; + this.Events = new Events(this); + this._submitLock = new Sync("submit"); + this._registerLock = new Sync("register"); + const storeOptions = parser.load(options, this.storeDefaults, {}); + if (this.datastore === "redis" || this.datastore === "ioredis" || this.connection != null) { + const opts = parser.load(options, this.redisStoreDefaults, {}); + this._store = new RedisDatastore(this, storeOptions, opts); + } + else if (this.datastore === "local") { + const opts = parser.load(options, this.localStoreDefaults, {}); + this._store = new LocalDatastore(this, storeOptions, opts); + } + else { + throw new BottleneckError(`Invalid datastore type: ${this.datastore}`); + } + this._queues.on("leftzero", () => { var _a, _b; return (_b = (_a = this._store.heartbeat) === null || _a === void 0 ? void 0 : _a.ref) === null || _b === void 0 ? void 0 : _b.call(_a); }); + this._queues.on("zero", () => { var _a, _b; return (_b = (_a = this._store.heartbeat) === null || _a === void 0 ? void 0 : _a.unref) === null || _b === void 0 ? void 0 : _b.call(_a); }); + } + _validateOptions(options, invalid) { + if (options == null || typeof options !== "object" || invalid.length !== 0) { + throw new BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1."); + } + } + ready() { + return this._store.ready; + } + clients() { + return this._store.clients; + } + channel() { + return `b_${this.id}`; + } + channel_client() { + return `b_${this.id}_${this._store.clientId}`; + } + publish(message) { + return this._store.__publish__(message); + } + disconnect() { + return __awaiter(this, arguments, void 0, function* (flush = true) { + yield this._store.__disconnect__(flush); + }); + } + chain(_limiter) { + this._limiter = _limiter; + return this; + } + queued(priority) { + return this._queues.queued(priority); + } + clusterQueued() { + return this._store.__queued__(); + } + empty() { + return this.queued() === 0 && this._submitLock.isEmpty(); + } + running() { + return this._store.__running__(); + } + done() { + return this._store.__done__(); + } + jobStatus(id) { + return this._states.jobStatus(id); + } + jobs(status) { + return this._states.statusJobs(status); + } + counts() { + return this._states.statusCounts(); + } + _randomIndex() { + return Math.random().toString(36).slice(2); + } + check(weight = 1) { + return this._store.__check__(weight); + } + _clearGlobalState(index) { + if (this._scheduled[index] != null) { + clearTimeout(this._scheduled[index].expiration); + delete this._scheduled[index]; + return true; + } + else { + return false; + } + } + _free(index, job, options, eventInfo) { + return __awaiter(this, void 0, void 0, function* () { + try { + const { running } = yield this._store.__free__(index, options.weight); + this.Events.trigger("debug", `Freed ${options.id}`, eventInfo); + if (running === 0 && this.empty()) { + return this.Events.trigger("idle"); + } + } + catch (e) { + return this.Events.trigger("error", e); + } + }); + } + _run(index, job, wait) { + job.doRun(); + const clearGlobalState = this._clearGlobalState.bind(this, index); + const run = this._run.bind(this, index, job); + const free = this._free.bind(this, index, job); + return (this._scheduled[index] = { + timeout: setTimeout(() => { + return job.doExecute(this._limiter, clearGlobalState, run, free); + }, wait), + expiration: job.options.expiration != null + ? setTimeout(() => job.doExpire(clearGlobalState, run, free), wait + job.options.expiration) + : undefined, + job, + }); + } + _drainOne(capacity) { + return __awaiter(this, void 0, void 0, function* () { + return this._registerLock.schedule(() => __awaiter(this, void 0, void 0, function* () { + let next; + if (this.queued() === 0) { + return null; + } + const queue = this._queues.getFirst(); + const { options, args } = (next = queue.first()); + if (capacity != null && options.weight > capacity) { + return null; + } + this.Events.trigger("debug", `Draining ${options.id}`, { args, options }); + const index = this._randomIndex(); + const { success, wait, reservoir } = yield this._store.__register__(index, options.weight, options.expiration); + this.Events.trigger("debug", `Drained ${options.id}`, { success, args, options }); + if (success) { + queue.shift(); + const empty = this.empty(); + if (empty) { + this.Events.trigger("empty"); + } + if (reservoir === 0) { + this.Events.trigger("depleted", empty); + } + this._run(index, next, wait); + return options.weight; + } + else { + return null; + } + })); + }); + } + _drainAll(capacity_1) { + return __awaiter(this, arguments, void 0, function* (capacity, total = 0) { + try { + const drained = yield this._drainOne(capacity); + if (drained != null) { + const newCapacity = capacity != null ? capacity - drained : capacity; + return this._drainAll(newCapacity, total + drained); + } + else { + return total; + } + } + catch (e) { + this.Events.trigger("error", e); + } + }); + } + _dropAllQueued(message) { + return this._queues.shiftAll((job) => job.doDrop({ message })); + } + stop(options) { + options !== null && options !== void 0 ? options : (options = {}); + options = parser.load(options, this.stopDefaults); + const waitForExecuting = (at) => { + const finished = () => { + const { counts } = this._states; + return counts[0] + counts[1] + counts[2] + counts[3] === at; + }; + return new Promise((resolve) => { + if (finished()) { + resolve(); + } + else { + this.on("done", () => { + if (finished()) { + this.removeAllListeners("done"); + resolve(); + } + }); + } + }); + }; + let done; + if (options.dropWaitingJobs) { + this._run = (index, next) => next.doDrop({ message: options.dropErrorMessage }); + this._drainOne = () => this.Promise.resolve(null); + done = this._registerLock.schedule(() => this._submitLock.schedule(() => { + for (const v of Object.values(this._scheduled)) { + if (this.jobStatus(v.job.options.id) === "RUNNING") { + clearTimeout(v.timeout); + clearTimeout(v.expiration); + v.job.doDrop({ message: options.dropErrorMessage }); + } + } + this._dropAllQueued(options.dropErrorMessage); + return waitForExecuting(0); + })); + } + else { + done = this.schedule({ priority: NUM_PRIORITIES - 1, weight: 0 }, () => waitForExecuting(1)); + } + this._receive = (job) => job._reject(new BottleneckError(options.enqueueErrorMessage)); + this.stop = () => this.Promise.reject(new BottleneckError("stop() has already been called")); + return done; + } + _addToQueue(job) { + return __awaiter(this, void 0, void 0, function* () { + let blocked, reachedHWM, strategy; + const { args, options } = job; + try { + ({ reachedHWM, blocked, strategy } = yield this._store.__submit__(this.queued(), options.weight)); + } + catch (error) { + this.Events.trigger("debug", `Could not queue ${options.id}`, { args, options, error }); + job.doDrop({ error }); + return false; + } + if (blocked) { + job.doDrop(); + return true; + } + else if (reachedHWM) { + let shifted; + if (strategy === Bottleneck.strategy.LEAK) { + shifted = this._queues.shiftLastFrom(options.priority); + } + else if (strategy === Bottleneck.strategy.OVERFLOW_PRIORITY) { + shifted = this._queues.shiftLastFrom(options.priority + 1); + } + else if (strategy === Bottleneck.strategy.OVERFLOW) { + shifted = job; + } + if (shifted != null) { + shifted.doDrop(); + } + if (shifted == null || strategy === Bottleneck.strategy.OVERFLOW) { + if (shifted == null) { + job.doDrop(); + } + return reachedHWM; + } + } + job.doQueue(reachedHWM, blocked); + this._queues.push(job); + yield this._drainAll(); + return reachedHWM; + }); + } + _receive(job) { + if (this._states.jobStatus(job.options.id) != null) { + job._reject(new BottleneckError(`A job with the same id already exists (id=${job.options.id})`)); + return false; + } + else { + job.doReceive(); + return this._submitLock.schedule(this._addToQueue, job); + } + } + submit(...args) { + let cb, fn, options; + if (typeof args[0] === "function") { + cb = args.pop(); + [fn, ...args] = args; + options = parser.load({}, this.jobDefaults); + } + else { + cb = args.pop(); + [options, fn, ...args] = args; + options = parser.load(options, this.jobDefaults); + } + const task = (...args) => { + return new Promise((resolve, reject) => fn(...args, (...args) => (args[0] != null ? reject : resolve)(args))); + }; + const job = new Job(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states); + job.promise + .then((args) => (typeof cb === "function" ? cb(...(args || [])) : undefined)) + .catch(function (args) { + if (Array.isArray(args)) { + return typeof cb === "function" ? cb(...args) : undefined; + } + else { + return typeof cb === "function" ? cb(args) : undefined; + } + }); + return this._receive(job); + } + schedule(...args) { + let options, task; + if (typeof args[0] === "function") { + [task, ...args] = args; + options = {}; + } + else { + [options, task, ...args] = args; + } + const job = new Job(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states); + this._receive(job); + return job.promise; + } + wrap(fn) { + const schedule = this.schedule.bind(this); + const wrapped = function (...args) { + return schedule(fn.bind(this), ...args); + }; + wrapped.withOptions = (options, ...args) => schedule(options, fn, ...args); + return wrapped; + } + updateSettings(options) { + return __awaiter(this, void 0, void 0, function* () { + options !== null && options !== void 0 ? options : (options = {}); + yield this._store.__updateSettings__(parser.overwrite(options, this.storeDefaults)); + parser.overwrite(options, this.instanceDefaults, this); + return this; + }); + } + currentReservoir() { + return this._store.__currentReservoir__(); + } + incrementReservoir(incr = 0) { + return this._store.__incrementReservoir__(incr); + } + }; + Bottleneck$1.BottleneckError = BottleneckError; + Bottleneck$1.Group = Group; + Bottleneck$1.RedisConnection = RedisConnection; + Bottleneck$1.IORedisConnection = IORedisConnection; + Bottleneck$1.Batcher = Batcher; + Bottleneck$1.Events = Events; + Bottleneck$1.strategy = { + LEAK: 1, + OVERFLOW: 2, + OVERFLOW_PRIORITY: 4, + BLOCK: 3, + }; + Bottleneck.exports = Bottleneck$1; + Bottleneck.exports.default = Bottleneck$1; + return Bottleneck.exports; + } - }).call(commonjsGlobal); + var lib; + var hasRequiredLib; - var Bottleneck_1 = Bottleneck; + function requireLib () { + if (hasRequiredLib) return lib; + hasRequiredLib = 1; + lib = requireBottleneck(); + return lib; + } - var lib = Bottleneck_1; + var libExports = requireLib(); + var index = /*@__PURE__*/getDefaultExportFromCjs(libExports); - return lib; + return index; -}))); +})); diff --git a/package-lock.json b/package-lock.json deleted file mode 100644 index 60dd048..0000000 --- a/package-lock.json +++ /dev/null @@ -1,4619 +0,0 @@ -{ - "name": "@sderrow/bottleneck", - "version": "3.0.7", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "@sderrow/bottleneck", - "version": "3.0.7", - "license": "MIT", - "devDependencies": { - "@babel/core": "^7.5.0", - "@babel/preset-env": "^7.5.0", - "@token-cjg/leakage": "^0.6.0", - "@types/es6-promise": "0.0.33", - "assert": "^1.5.0", - "coffeescript": "2.4.x", - "ioredis": "^4.11.1", - "mocha": "^6.1.4", - "redis": "^2.8.0", - "regenerator-runtime": "^0.12.1", - "rollup": "^0.66.6", - "rollup-plugin-babel": "^4.3.3", - "rollup-plugin-commonjs": "^9.3.4", - "rollup-plugin-json": "^3.1.0", - "rollup-plugin-node-resolve": "^3.4.0", - "typescript": "^4.0.0" - } - }, - "node_modules/@airbnb/node-memwatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@airbnb/node-memwatch/-/node-memwatch-2.0.0.tgz", - "integrity": "sha512-4DMP5GQz9ZYklB/FXiE1+yNffzjdiSerpr10QGxBQF56xcZsKLE0PnL/Pq6yC1sLGT0IHgG4UXgz/a5Yd463gw==", - "dev": true, - "hasInstallScript": true, - "dependencies": { - "bindings": "^1.5.0", - "nan": "^2.14.1" - }, - "engines": { - "node": ">= 10.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.0.0.tgz", - "integrity": "sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA==", - "dev": true, - "dependencies": { - "@babel/highlight": "^7.0.0" - } - }, - "node_modules/@babel/core": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.5.0.tgz", - "integrity": "sha512-6Isr4X98pwXqHvtigw71CKgmhL1etZjPs5A67jL/w0TkLM9eqmFR40YrnJvEc1WnMZFsskjsmid8bHZyxKEAnw==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.0.0", - "@babel/generator": "^7.5.0", - "@babel/helpers": "^7.5.0", - "@babel/parser": "^7.5.0", - "@babel/template": "^7.4.4", - "@babel/traverse": "^7.5.0", - "@babel/types": "^7.5.0", - "convert-source-map": "^1.1.0", - "debug": "^4.1.0", - "json5": "^2.1.0", - "lodash": "^4.17.11", - "resolve": "^1.3.2", - "semver": "^5.4.1", - "source-map": "^0.5.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/generator": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.5.0.tgz", - "integrity": "sha512-1TTVrt7J9rcG5PMjvO7VEG3FrEoEJNHxumRq66GemPmzboLWtIjjcJgk8rokuAS7IiRSpgVSu5Vb9lc99iJkOA==", - "dev": true, - "dependencies": { - "@babel/types": "^7.5.0", - "jsesc": "^2.5.1", - "lodash": "^4.17.11", - "source-map": "^0.5.0", - "trim-right": "^1.0.1" - } - }, - "node_modules/@babel/generator/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.0.0.tgz", - "integrity": "sha512-3UYcJUj9kvSLbLbUIfQTqzcy5VX7GRZ/CCDrnOaZorFFM01aXp1+GJwuFGV4NDDoAS+mOUyHcO6UD/RfqOks3Q==", - "dev": true, - "dependencies": { - "@babel/types": "^7.0.0" - } - }, - "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.1.0.tgz", - "integrity": "sha512-qNSR4jrmJ8M1VMM9tibvyRAHXQs2PmaksQF7c1CGJNipfe3D8p+wgNwgso/P2A2r2mdgBWAXljNWR0QRZAMW8w==", - "dev": true, - "dependencies": { - "@babel/helper-explode-assignable-expression": "^7.1.0", - "@babel/types": "^7.0.0" - } - }, - "node_modules/@babel/helper-call-delegate": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-call-delegate/-/helper-call-delegate-7.4.4.tgz", - "integrity": "sha512-l79boDFJ8S1c5hvQvG+rc+wHw6IuH7YldmRKsYtpbawsxURu/paVy57FZMomGK22/JckepaikOkY0MoAmdyOlQ==", - "dev": true, - "dependencies": { - "@babel/helper-hoist-variables": "^7.4.4", - "@babel/traverse": "^7.4.4", - "@babel/types": "^7.4.4" - } - }, - "node_modules/@babel/helper-call-delegate/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/helper-define-map": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-define-map/-/helper-define-map-7.4.4.tgz", - "integrity": "sha512-IX3Ln8gLhZpSuqHJSnTNBWGDE9kdkTEWl21A/K7PQ00tseBwbqCHTvNLHSBd9M0R5rER4h5Rsvj9vw0R5SieBg==", - "dev": true, - "dependencies": { - "@babel/helper-function-name": "^7.1.0", - "@babel/types": "^7.4.4", - "lodash": "^4.17.11" - } - }, - "node_modules/@babel/helper-define-map/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/helper-explode-assignable-expression": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.1.0.tgz", - "integrity": "sha512-NRQpfHrJ1msCHtKjbzs9YcMmJZOg6mQMmGRB+hbamEdG5PNpaSm95275VD92DvJKuyl0s2sFiDmMZ+EnnvufqA==", - "dev": true, - "dependencies": { - "@babel/traverse": "^7.1.0", - "@babel/types": "^7.0.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.1.0.tgz", - "integrity": "sha512-A95XEoCpb3TO+KZzJ4S/5uW5fNe26DjBGqf1o9ucyLyCmi1dXq/B3c8iaWTfBk3VvetUxl16e8tIrd5teOCfGw==", - "dev": true, - "dependencies": { - "@babel/helper-get-function-arity": "^7.0.0", - "@babel/template": "^7.1.0", - "@babel/types": "^7.0.0" - } - }, - "node_modules/@babel/helper-get-function-arity": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0.tgz", - "integrity": "sha512-r2DbJeg4svYvt3HOS74U4eWKsUAMRH01Z1ds1zx8KNTPtpTL5JAsdFv8BNyOpVqdFhHkkRDIg5B4AsxmkjAlmQ==", - "dev": true, - "dependencies": { - "@babel/types": "^7.0.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.4.4.tgz", - "integrity": "sha512-VYk2/H/BnYbZDDg39hr3t2kKyifAm1W6zHRfhx8jGjIHpQEBv9dry7oQ2f3+J703TLu69nYdxsovl0XYfcnK4w==", - "dev": true, - "dependencies": { - "@babel/types": "^7.4.4" - } - }, - "node_modules/@babel/helper-hoist-variables/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.0.0.tgz", - "integrity": "sha512-avo+lm/QmZlv27Zsi0xEor2fKcqWG56D5ae9dzklpIaY7cQMK5N8VSpaNVPPagiqmy7LrEjK1IWdGMOqPu5csg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.0.0" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.0.0.tgz", - "integrity": "sha512-aP/hlLq01DWNEiDg4Jn23i+CXxW/owM4WpDLFUbpjxe4NS3BhLVZQ5i7E0ZrxuQ/vwekIeciyamgB1UIYxxM6A==", - "dev": true, - "dependencies": { - "@babel/types": "^7.0.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.4.4.tgz", - "integrity": "sha512-3Z1yp8TVQf+B4ynN7WoHPKS8EkdTbgAEy0nU0rs/1Kw4pDgmvYH3rz3aI11KgxKCba2cn7N+tqzV1mY2HMN96w==", - "dev": true, - "dependencies": { - "@babel/helper-module-imports": "^7.0.0", - "@babel/helper-simple-access": "^7.1.0", - "@babel/helper-split-export-declaration": "^7.4.4", - "@babel/template": "^7.4.4", - "@babel/types": "^7.4.4", - "lodash": "^4.17.11" - } - }, - "node_modules/@babel/helper-module-transforms/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.0.0.tgz", - "integrity": "sha512-u8nd9NQePYNQV8iPWu/pLLYBqZBa4ZaY1YWRFMuxrid94wKI1QNt67NEZ7GAe5Kc/0LLScbim05xZFWkAdrj9g==", - "dev": true, - "dependencies": { - "@babel/types": "^7.0.0" - } - }, - "node_modules/@babel/helper-plugin-utils": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz", - "integrity": "sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA==", - "dev": true - }, - "node_modules/@babel/helper-regex": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-regex/-/helper-regex-7.4.4.tgz", - "integrity": "sha512-Y5nuB/kESmR3tKjU8Nkn1wMGEx1tjJX076HBMeL3XLQCu6vA/YRzuTW0bbb+qRnXvQGn+d6Rx953yffl8vEy7Q==", - "dev": true, - "dependencies": { - "lodash": "^4.17.11" - } - }, - "node_modules/@babel/helper-remap-async-to-generator": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.1.0.tgz", - "integrity": "sha512-3fOK0L+Fdlg8S5al8u/hWE6vhufGSn0bN09xm2LXMy//REAF8kDCrYoOBKYmA8m5Nom+sV9LyLCwrFynA8/slg==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.0.0", - "@babel/helper-wrap-function": "^7.1.0", - "@babel/template": "^7.1.0", - "@babel/traverse": "^7.1.0", - "@babel/types": "^7.0.0" - } - }, - "node_modules/@babel/helper-replace-supers": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.4.4.tgz", - "integrity": "sha512-04xGEnd+s01nY1l15EuMS1rfKktNF+1CkKmHoErDppjAAZL+IUBZpzT748x262HF7fibaQPhbvWUl5HeSt1EXg==", - "dev": true, - "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.0.0", - "@babel/helper-optimise-call-expression": "^7.0.0", - "@babel/traverse": "^7.4.4", - "@babel/types": "^7.4.4" - } - }, - "node_modules/@babel/helper-replace-supers/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/helper-simple-access": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.1.0.tgz", - "integrity": "sha512-Vk+78hNjRbsiu49zAPALxTb+JUQCz1aolpd8osOF16BGnLtseD21nbHgLPGUwrXEurZgiCOUmvs3ExTu4F5x6w==", - "dev": true, - "dependencies": { - "@babel/template": "^7.1.0", - "@babel/types": "^7.0.0" - } - }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.4.4.tgz", - "integrity": "sha512-Ro/XkzLf3JFITkW6b+hNxzZ1n5OQ80NvIUdmHspih1XAhtN3vPTuUFT4eQnela+2MaZ5ulH+iyP513KJrxbN7Q==", - "dev": true, - "dependencies": { - "@babel/types": "^7.4.4" - } - }, - "node_modules/@babel/helper-split-export-declaration/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/helper-wrap-function": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.2.0.tgz", - "integrity": "sha512-o9fP1BZLLSrYlxYEYyl2aS+Flun5gtjTIG8iln+XuEzQTs0PLagAGSXUcqruJwD5fM48jzIEggCKpIfWTcR7pQ==", - "dev": true, - "dependencies": { - "@babel/helper-function-name": "^7.1.0", - "@babel/template": "^7.1.0", - "@babel/traverse": "^7.1.0", - "@babel/types": "^7.2.0" - } - }, - "node_modules/@babel/helper-wrap-function/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/helpers": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.5.1.tgz", - "integrity": "sha512-rVOTDv8sH8kNI72Unenusxw6u+1vEepZgLxeV+jHkhsQlYhzVhzL1EpfoWT7Ub3zpWSv2WV03V853dqsnyoQzA==", - "dev": true, - "dependencies": { - "@babel/template": "^7.4.4", - "@babel/traverse": "^7.5.0", - "@babel/types": "^7.5.0" - } - }, - "node_modules/@babel/helpers/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/highlight": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.5.0.tgz", - "integrity": "sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ==", - "dev": true, - "dependencies": { - "chalk": "^2.0.0", - "esutils": "^2.0.2", - "js-tokens": "^4.0.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.5.0.tgz", - "integrity": "sha512-I5nW8AhGpOXGCCNYGc+p7ExQIBxRFnS2fd/d862bNOKvmoEPjYPcfIjsfdy0ujagYOIYPczKgD9l3FsgTkAzKA==", - "dev": true, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-proposal-async-generator-functions": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.2.0.tgz", - "integrity": "sha512-+Dfo/SCQqrwx48ptLVGLdE39YtWRuKc/Y9I5Fy0P1DDBB9lsAHpjcEJQt+4IifuSOSTLBKJObJqMvaO1pIE8LQ==", - "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-async-generator-functions instead.", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-remap-async-to-generator": "^7.1.0", - "@babel/plugin-syntax-async-generators": "^7.2.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-proposal-dynamic-import": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.5.0.tgz", - "integrity": "sha512-x/iMjggsKTFHYC6g11PL7Qy58IK8H5zqfm9e6hu4z1iH2IRyAp9u9dL80zA6R76yFovETFLKz2VJIC2iIPBuFw==", - "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-dynamic-import instead.", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/plugin-syntax-dynamic-import": "^7.2.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-proposal-json-strings": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.2.0.tgz", - "integrity": "sha512-MAFV1CA/YVmYwZG0fBQyXhmj0BHCB5egZHCKWIFVv/XCxAeVGIHfos3SwDck4LvCllENIAg7xMKOG5kH0dzyUg==", - "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-json-strings instead.", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/plugin-syntax-json-strings": "^7.2.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-proposal-object-rest-spread": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.1.tgz", - "integrity": "sha512-PVGXx5LYHcT7L4MdoE+rM5uq68IKlvU9lljVQ4OXY6aUEnGvezcGbM4VNY57Ug+3R2Zg/nYHlEdiWoIBoRA0mw==", - "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-object-rest-spread instead.", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/plugin-syntax-object-rest-spread": "^7.2.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-proposal-optional-catch-binding": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.2.0.tgz", - "integrity": "sha512-mgYj3jCcxug6KUcX4OBoOJz3CMrwRfQELPQ5560F70YQUBZB7uac9fqaWamKR1iWUzGiK2t0ygzjTScZnVz75g==", - "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-optional-catch-binding instead.", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/plugin-syntax-optional-catch-binding": "^7.2.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-proposal-unicode-property-regex": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.4.4.tgz", - "integrity": "sha512-j1NwnOqMG9mFUOH58JTFsA/+ZYzQLUZ/drqWUqxCYLGeu2JFZL8YrNC9hBxKmWtAuOCHPcRpgv7fhap09Fb4kA==", - "deprecated": "This proposal has been merged to the ECMAScript standard and thus this plugin is no longer maintained. Please use @babel/plugin-transform-unicode-property-regex instead.", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-regex": "^7.4.4", - "regexpu-core": "^4.5.4" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-async-generators": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.2.0.tgz", - "integrity": "sha512-1ZrIRBv2t0GSlcwVoQ6VgSLpLgiN/FVQUzt9znxo7v2Ov4jJrs8RY8tv0wvDmFN3qIdMKWrmMMW6yZ0G19MfGg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-dynamic-import": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.2.0.tgz", - "integrity": "sha512-mVxuJ0YroI/h/tbFTPGZR8cv6ai+STMKNBq0f8hFxsxWjl94qqhsb+wXbpNMDPU3cfR1TIsVFzU3nXyZMqyK4w==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-json-strings": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.2.0.tgz", - "integrity": "sha512-5UGYnMSLRE1dqqZwug+1LISpA403HzlSfsg6P9VXU6TBjcSHeNlw4DxDx7LgpF+iKZoOG/+uzqoRHTdcUpiZNg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-object-rest-spread": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.2.0.tgz", - "integrity": "sha512-t0JKGgqk2We+9may3t0xDdmneaXmyxq0xieYcKHxIsrJO64n1OiMWNUtc5gQK1PA0NpdCRrtZp4z+IUaKugrSA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-optional-catch-binding": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.2.0.tgz", - "integrity": "sha512-bDe4xKNhb0LI7IvZHiA13kff0KEfaGX/Hv4lMA9+7TEc63hMNvfKo6ZFpXhKuEp+II/q35Gc4NoMeDZyaUbj9w==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.2.0.tgz", - "integrity": "sha512-ER77Cax1+8/8jCB9fo4Ud161OZzWN5qawi4GusDuRLcDbDG+bIGYY20zb2dfAFdTRGzrfq2xZPvF0R64EHnimg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-async-to-generator": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.5.0.tgz", - "integrity": "sha512-mqvkzwIGkq0bEF1zLRRiTdjfomZJDV33AH3oQzHVGkI2VzEmXLpKKOBvEVaFZBJdN0XTyH38s9j/Kiqr68dggg==", - "dev": true, - "dependencies": { - "@babel/helper-module-imports": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-remap-async-to-generator": "^7.1.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-block-scoped-functions": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.2.0.tgz", - "integrity": "sha512-ntQPR6q1/NKuphly49+QiQiTN0O63uOwjdD6dhIjSWBI5xlrbUFh720TIpzBhpnrLfv2tNH/BXvLIab1+BAI0w==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-block-scoping": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.4.4.tgz", - "integrity": "sha512-jkTUyWZcTrwxu5DD4rWz6rDB5Cjdmgz6z7M7RLXOJyCUkFBawssDGcGh8M/0FTSB87avyJI1HsTwUXp9nKA1PA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "lodash": "^4.17.11" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-classes": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.4.4.tgz", - "integrity": "sha512-/e44eFLImEGIpL9qPxSRat13I5QNRgBLu2hOQJCF7VLy/otSM/sypV1+XaIw5+502RX/+6YaSAPmldk+nhHDPw==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.0.0", - "@babel/helper-define-map": "^7.4.4", - "@babel/helper-function-name": "^7.1.0", - "@babel/helper-optimise-call-expression": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-replace-supers": "^7.4.4", - "@babel/helper-split-export-declaration": "^7.4.4", - "globals": "^11.1.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.2.0.tgz", - "integrity": "sha512-kP/drqTxY6Xt3NNpKiMomfgkNn4o7+vKxK2DDKcBG9sHj51vHqMBGy8wbDS/J4lMxnqs153/T3+DmCEAkC5cpA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-destructuring": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.5.0.tgz", - "integrity": "sha512-YbYgbd3TryYYLGyC7ZR+Tq8H/+bCmwoaxHfJHupom5ECstzbRLTch6gOQbhEY9Z4hiCNHEURgq06ykFv9JZ/QQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-dotall-regex": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.4.4.tgz", - "integrity": "sha512-P05YEhRc2h53lZDjRPk/OektxCVevFzZs2Gfjd545Wde3k+yFDbXORgl2e0xpbq8mLcKJ7Idss4fAg0zORN/zg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-regex": "^7.4.4", - "regexpu-core": "^4.5.4" - }, - "engines": { - "node": ">=4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-duplicate-keys": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.5.0.tgz", - "integrity": "sha512-igcziksHizyQPlX9gfSjHkE2wmoCH3evvD2qR5w29/Dk0SMKE/eOI7f1HhBdNhR/zxJDqrgpoDTq5YSLH/XMsQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-exponentiation-operator": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.2.0.tgz", - "integrity": "sha512-umh4hR6N7mu4Elq9GG8TOu9M0bakvlsREEC+ialrQN6ABS4oDQ69qJv1VtR3uxlKMCQMCvzk7vr17RHKcjx68A==", - "dev": true, - "dependencies": { - "@babel/helper-builder-binary-assignment-operator-visitor": "^7.1.0", - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-for-of": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.4.4.tgz", - "integrity": "sha512-9T/5Dlr14Z9TIEXLXkt8T1DU7F24cbhwhMNUziN3hB1AXoZcdzPcTiKGRn/6iOymDqtTKWnr/BtRKN9JwbKtdQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-function-name": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.4.4.tgz", - "integrity": "sha512-iU9pv7U+2jC9ANQkKeNF6DrPy4GBa4NWQtl6dHB4Pb3izX2JOEvDTFarlNsBj/63ZEzNNIAMs3Qw4fNCcSOXJA==", - "dev": true, - "dependencies": { - "@babel/helper-function-name": "^7.1.0", - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-literals": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.2.0.tgz", - "integrity": "sha512-2ThDhm4lI4oV7fVQ6pNNK+sx+c/GM5/SaML0w/r4ZB7sAneD/piDJtwdKlNckXeyGK7wlwg2E2w33C/Hh+VFCg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-member-expression-literals": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.2.0.tgz", - "integrity": "sha512-HiU3zKkSU6scTidmnFJ0bMX8hz5ixC93b4MHMiYebmk2lUVNGOboPsqQvx5LzooihijUoLR/v7Nc1rbBtnc7FA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-amd": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.5.0.tgz", - "integrity": "sha512-n20UsQMKnWrltocZZm24cRURxQnWIvsABPJlw/fvoy9c6AgHZzoelAIzajDHAQrDpuKFFPPcFGd7ChsYuIUMpg==", - "dev": true, - "dependencies": { - "@babel/helper-module-transforms": "^7.1.0", - "@babel/helper-plugin-utils": "^7.0.0", - "babel-plugin-dynamic-import-node": "^2.3.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.5.0.tgz", - "integrity": "sha512-xmHq0B+ytyrWJvQTc5OWAC4ii6Dhr0s22STOoydokG51JjWhyYo5mRPXoi+ZmtHQhZZwuXNN+GG5jy5UZZJxIQ==", - "dev": true, - "dependencies": { - "@babel/helper-module-transforms": "^7.4.4", - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-simple-access": "^7.1.0", - "babel-plugin-dynamic-import-node": "^2.3.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-systemjs": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.5.0.tgz", - "integrity": "sha512-Q2m56tyoQWmuNGxEtUyeEkm6qJYFqs4c+XyXH5RAuYxObRNz9Zgj/1g2GMnjYp2EUyEy7YTrxliGCXzecl/vJg==", - "dev": true, - "dependencies": { - "@babel/helper-hoist-variables": "^7.4.4", - "@babel/helper-plugin-utils": "^7.0.0", - "babel-plugin-dynamic-import-node": "^2.3.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-umd": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.2.0.tgz", - "integrity": "sha512-BV3bw6MyUH1iIsGhXlOK6sXhmSarZjtJ/vMiD9dNmpY8QXFFQTj+6v92pcfy1iqa8DeAfJFwoxcrS/TUZda6sw==", - "dev": true, - "dependencies": { - "@babel/helper-module-transforms": "^7.1.0", - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.4.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.4.5.tgz", - "integrity": "sha512-z7+2IsWafTBbjNsOxU/Iv5CvTJlr5w4+HGu1HovKYTtgJ362f7kBcQglkfmlspKKZ3bgrbSGvLfNx++ZJgCWsg==", - "dev": true, - "dependencies": { - "regexp-tree": "^0.1.6" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-new-target": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.4.4.tgz", - "integrity": "sha512-r1z3T2DNGQwwe2vPGZMBNjioT2scgWzK9BCnDEh+46z8EEwXBq24uRzd65I7pjtugzPSj921aM15RpESgzsSuA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-object-super": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.2.0.tgz", - "integrity": "sha512-VMyhPYZISFZAqAPVkiYb7dUe2AsVi2/wCT5+wZdsNO31FojQJa9ns40hzZ6U9f50Jlq4w6qwzdBB2uwqZ00ebg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-replace-supers": "^7.1.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-parameters": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.4.4.tgz", - "integrity": "sha512-oMh5DUO1V63nZcu/ZVLQFqiihBGo4OpxJxR1otF50GMeCLiRx5nUdtokd+u9SuVJrvvuIh9OosRFPP4pIPnwmw==", - "dev": true, - "dependencies": { - "@babel/helper-call-delegate": "^7.4.4", - "@babel/helper-get-function-arity": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-property-literals": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.2.0.tgz", - "integrity": "sha512-9q7Dbk4RhgcLp8ebduOpCbtjh7C0itoLYHXd9ueASKAG/is5PQtMR5VJGka9NKqGhYEGn5ITahd4h9QeBMylWQ==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-regenerator": { - "version": "7.4.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.4.5.tgz", - "integrity": "sha512-gBKRh5qAaCWntnd09S8QC7r3auLCqq5DI6O0DlfoyDjslSBVqBibrMdsqO+Uhmx3+BlOmE/Kw1HFxmGbv0N9dA==", - "dev": true, - "dependencies": { - "regenerator-transform": "^0.14.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-reserved-words": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.2.0.tgz", - "integrity": "sha512-fz43fqW8E1tAB3DKF19/vxbpib1fuyCwSPE418ge5ZxILnBhWyhtPgz8eh1RCGGJlwvksHkyxMxh0eenFi+kFw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-shorthand-properties": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.2.0.tgz", - "integrity": "sha512-QP4eUM83ha9zmYtpbnyjTLAGKQritA5XW/iG9cjtuOI8s1RuL/3V6a3DeSHfKutJQ+ayUfeZJPcnCYEQzaPQqg==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-spread": { - "version": "7.2.2", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.2.2.tgz", - "integrity": "sha512-KWfky/58vubwtS0hLqEnrWJjsMGaOeSBn90Ezn5Jeg9Z8KKHmELbP1yGylMlm5N6TPKeY9A2+UaSYLdxahg01w==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-sticky-regex": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.2.0.tgz", - "integrity": "sha512-KKYCoGaRAf+ckH8gEL3JHUaFVyNHKe3ASNsZ+AlktgHevvxGigoIttrEJb8iKN03Q7Eazlv1s6cx2B2cQ3Jabw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-regex": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-template-literals": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.4.4.tgz", - "integrity": "sha512-mQrEC4TWkhLN0z8ygIvEL9ZEToPhG5K7KDW3pzGqOfIGZ28Jb0POUkeWcoz8HnHvhFy6dwAT1j8OzqN8s804+g==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-typeof-symbol": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.2.0.tgz", - "integrity": "sha512-2LNhETWYxiYysBtrBTqL8+La0jIoQQnIScUJc74OYvUGRmkskNY4EzLCnjHBzdmb38wqtTaixpo1NctEcvMDZw==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-regex": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.4.4.tgz", - "integrity": "sha512-il+/XdNw01i93+M9J9u4T7/e/Ue/vWfNZE4IRUQjplu2Mqb/AFTDimkw2tdEdSH50wuQXZAbXSql0UphQke+vA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-regex": "^7.4.4", - "regexpu-core": "^4.5.4" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/preset-env": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.5.0.tgz", - "integrity": "sha512-/5oQ7cYg+6sH9Dt9yx5IiylnLPiUdyMHl5y+K0mKVNiW2wJ7FpU5bg8jKcT8PcCbxdYzfv6OuC63jLEtMuRSmQ==", - "dev": true, - "dependencies": { - "@babel/helper-module-imports": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/plugin-proposal-async-generator-functions": "^7.2.0", - "@babel/plugin-proposal-dynamic-import": "^7.5.0", - "@babel/plugin-proposal-json-strings": "^7.2.0", - "@babel/plugin-proposal-object-rest-spread": "^7.5.0", - "@babel/plugin-proposal-optional-catch-binding": "^7.2.0", - "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", - "@babel/plugin-syntax-async-generators": "^7.2.0", - "@babel/plugin-syntax-dynamic-import": "^7.2.0", - "@babel/plugin-syntax-json-strings": "^7.2.0", - "@babel/plugin-syntax-object-rest-spread": "^7.2.0", - "@babel/plugin-syntax-optional-catch-binding": "^7.2.0", - "@babel/plugin-transform-arrow-functions": "^7.2.0", - "@babel/plugin-transform-async-to-generator": "^7.5.0", - "@babel/plugin-transform-block-scoped-functions": "^7.2.0", - "@babel/plugin-transform-block-scoping": "^7.4.4", - "@babel/plugin-transform-classes": "^7.4.4", - "@babel/plugin-transform-computed-properties": "^7.2.0", - "@babel/plugin-transform-destructuring": "^7.5.0", - "@babel/plugin-transform-dotall-regex": "^7.4.4", - "@babel/plugin-transform-duplicate-keys": "^7.5.0", - "@babel/plugin-transform-exponentiation-operator": "^7.2.0", - "@babel/plugin-transform-for-of": "^7.4.4", - "@babel/plugin-transform-function-name": "^7.4.4", - "@babel/plugin-transform-literals": "^7.2.0", - "@babel/plugin-transform-member-expression-literals": "^7.2.0", - "@babel/plugin-transform-modules-amd": "^7.5.0", - "@babel/plugin-transform-modules-commonjs": "^7.5.0", - "@babel/plugin-transform-modules-systemjs": "^7.5.0", - "@babel/plugin-transform-modules-umd": "^7.2.0", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.4.5", - "@babel/plugin-transform-new-target": "^7.4.4", - "@babel/plugin-transform-object-super": "^7.2.0", - "@babel/plugin-transform-parameters": "^7.4.4", - "@babel/plugin-transform-property-literals": "^7.2.0", - "@babel/plugin-transform-regenerator": "^7.4.5", - "@babel/plugin-transform-reserved-words": "^7.2.0", - "@babel/plugin-transform-shorthand-properties": "^7.2.0", - "@babel/plugin-transform-spread": "^7.2.0", - "@babel/plugin-transform-sticky-regex": "^7.2.0", - "@babel/plugin-transform-template-literals": "^7.4.4", - "@babel/plugin-transform-typeof-symbol": "^7.2.0", - "@babel/plugin-transform-unicode-regex": "^7.4.4", - "@babel/types": "^7.5.0", - "browserslist": "^4.6.0", - "core-js-compat": "^3.1.1", - "invariant": "^2.2.2", - "js-levenshtein": "^1.1.3", - "semver": "^5.5.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/preset-env/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/template": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.4.4.tgz", - "integrity": "sha512-CiGzLN9KgAvgZsnivND7rkA+AeJ9JB0ciPOD4U59GKbQP2iQl+olF1l76kJOupqidozfZ32ghwBEJDhnk9MEcw==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.0.0", - "@babel/parser": "^7.4.4", - "@babel/types": "^7.4.4" - } - }, - "node_modules/@babel/template/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/traverse": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.5.0.tgz", - "integrity": "sha512-SnA9aLbyOCcnnbQEGwdfBggnc142h/rbqqsXcaATj2hZcegCl903pUD/lfpsNBlBSuWow/YDfRyJuWi2EPR5cg==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.0.0", - "@babel/generator": "^7.5.0", - "@babel/helper-function-name": "^7.1.0", - "@babel/helper-split-export-declaration": "^7.4.4", - "@babel/parser": "^7.5.0", - "@babel/types": "^7.5.0", - "debug": "^4.1.0", - "globals": "^11.1.0", - "lodash": "^4.17.11" - } - }, - "node_modules/@babel/traverse/node_modules/@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@babel/types": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.1.3.tgz", - "integrity": "sha512-RpPOVfK+yatXyn8n4PB1NW6k9qjinrXrRR8ugBN8fD6hCy5RXI6PSbVqpOJBO9oSaY7Nom4ohj35feb0UR9hSA==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2", - "lodash": "^4.17.10", - "to-fast-properties": "^2.0.0" - } - }, - "node_modules/@token-cjg/leakage": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/@token-cjg/leakage/-/leakage-0.6.0.tgz", - "integrity": "sha512-cFs9tNOtEQZ4HTHxCj0SVXwaZhKZFYg+E7mT/riA+FAw55SZ1BicxyBJpsqHpllhSvRYLB+XJaLjMFWYqZk5Hw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@airbnb/node-memwatch": "^2.0.0", - "es6-error": "^4.0.2", - "minimist": "^1.2.0", - "pretty-bytes": "^4.0.2" - }, - "engines": { - "node": ">= 8.0" - } - }, - "node_modules/@token-cjg/leakage/node_modules/minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/@types/es6-promise": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/@types/es6-promise/-/es6-promise-0.0.33.tgz", - "integrity": "sha512-HKJFVLCGrWQ/1unEw8JdaTxu6n3EUxmwTxJ6D0O1x0gD8joCsgoTWxEgevb7fp2XIogNjof3KEd+3bJoGne/nw==", - "dev": true - }, - "node_modules/@types/estree": { - "version": "0.0.39", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.39.tgz", - "integrity": "sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==", - "dev": true - }, - "node_modules/@types/node": { - "version": "10.12.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.0.tgz", - "integrity": "sha512-3TUHC3jsBAB7qVRGxT6lWyYo2v96BMmD2PTcl47H25Lu7UXtFH/2qqmKiVrnel6Ne//0TFYf6uvNX+HW2FRkLQ==", - "dev": true - }, - "node_modules/ansi-colors": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.3.tgz", - "integrity": "sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "dependencies": { - "sprintf-js": "~1.0.2" - } - }, - "node_modules/arr-flatten": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", - "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/arr-union": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", - "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/assert": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/assert/-/assert-1.5.0.tgz", - "integrity": "sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA==", - "dev": true, - "dependencies": { - "object-assign": "^4.1.1", - "util": "0.10.3" - } - }, - "node_modules/assign-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", - "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/atob": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", - "dev": true, - "bin": { - "atob": "bin/atob.js" - }, - "engines": { - "node": ">= 4.5.0" - } - }, - "node_modules/babel-plugin-dynamic-import-node": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz", - "integrity": "sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ==", - "dev": true, - "dependencies": { - "object.assign": "^4.1.0" - } - }, - "node_modules/balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "node_modules/base": { - "version": "0.11.2", - "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", - "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", - "dev": true, - "dependencies": { - "cache-base": "^1.0.1", - "class-utils": "^0.3.5", - "component-emitter": "^1.2.1", - "define-property": "^1.0.0", - "isobject": "^3.0.1", - "mixin-deep": "^1.2.0", - "pascalcase": "^0.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dev": true, - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "deprecated": "Please upgrade to v1.0.1", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "deprecated": "Please upgrade to v1.0.1", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/base/node_modules/kind-of": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz", - "integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/bindings": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "file-uri-to-path": "1.0.0" - } - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/browser-stdout": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", - "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", - "dev": true - }, - "node_modules/browserslist": { - "version": "4.6.3", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.6.3.tgz", - "integrity": "sha512-CNBqTCq22RKM8wKJNowcqihHJ4SkI8CGeK7KOR9tPboXUuS5Zk5lQgzzTbs4oxD8x+6HUshZUa2OyNI9lR93bQ==", - "dev": true, - "dependencies": { - "caniuse-lite": "^1.0.30000975", - "electron-to-chromium": "^1.3.164", - "node-releases": "^1.1.23" - }, - "bin": { - "browserslist": "cli.js" - } - }, - "node_modules/builtin-modules": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-2.0.0.tgz", - "integrity": "sha512-3U5kUA5VPsRUA3nofm/BXX7GVHKfxz0hOBAPxXrIvHzlDRkQVqEn6yi8QJegxl4LzOHLdvb7XF5dVawa/VVYBg==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/cache-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", - "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", - "dev": true, - "dependencies": { - "collection-visit": "^1.0.0", - "component-emitter": "^1.2.1", - "get-value": "^2.0.6", - "has-value": "^1.0.0", - "isobject": "^3.0.1", - "set-value": "^2.0.0", - "to-object-path": "^0.3.0", - "union-value": "^1.0.0", - "unset-value": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/cache-base/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30000980", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30000980.tgz", - "integrity": "sha512-as0PRtWHaX3gl2gpC7qA7bX88lr+qLacMMXm1QKLLQtBCwT/Ljbgrv5EXKMNBoeEX6yFZ4vIsBb4Nh+PEwW2Rw==", - "dev": true - }, - "node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/class-utils": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", - "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", - "dev": true, - "dependencies": { - "arr-union": "^3.1.0", - "define-property": "^0.2.5", - "isobject": "^3.0.0", - "static-extend": "^0.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/class-utils/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/class-utils/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/cliui": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", - "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", - "dev": true, - "dependencies": { - "string-width": "^2.1.1", - "strip-ansi": "^4.0.0", - "wrap-ansi": "^2.0.0" - } - }, - "node_modules/cluster-key-slot": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.0.12.tgz", - "integrity": "sha512-21O0kGmvED5OJ7ZTdqQ5lQQ+sjuez33R+d35jZKLwqUb5mqcPHUsxOSzj61+LHVtxGZd1kShbQM3MjB/gBJkVg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/coffeescript": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/coffeescript/-/coffeescript-2.4.1.tgz", - "integrity": "sha512-34GV1aHrsMpTaO3KfMJL40ZNuvKDR/g98THHnE9bQj8HjMaZvSrLik99WWqyMhRtbe8V5hpx5iLgdcSvM/S2wg==", - "dev": true, - "bin": { - "cake": "bin/cake", - "coffee": "bin/coffee" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/collection-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", - "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", - "dev": true, - "dependencies": { - "map-visit": "^1.0.0", - "object-visit": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "node_modules/component-emitter": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", - "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==", - "dev": true - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "node_modules/convert-source-map": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.6.0.tgz", - "integrity": "sha512-eFu7XigvxdZ1ETfbgPBohgyQ/Z++C0eEhTor0qRwBw9unw+L0/6V8wkSuGgzdThkiS5lSpdptOQPD8Ak40a+7A==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.1" - } - }, - "node_modules/copy-descriptor": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", - "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/core-js-compat": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.1.4.tgz", - "integrity": "sha512-Z5zbO9f1d0YrJdoaQhphVAnKPimX92D6z8lCGphH89MNRxlL1prI9ExJPqVwP0/kgkQCv8c4GJGT8X16yUncOg==", - "dev": true, - "dependencies": { - "browserslist": "^4.6.2", - "core-js-pure": "3.1.4", - "semver": "^6.1.1" - } - }, - "node_modules/core-js-compat/node_modules/semver": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.2.0.tgz", - "integrity": "sha512-jdFC1VdUGT/2Scgbimf7FSx9iJLXoqfglSF+gJeuNWVpiE37OIbc1jywR/GJyFdz3mnkz2/id0L0J/cr0izR5A==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/core-js-pure": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.1.4.tgz", - "integrity": "sha512-uJ4Z7iPNwiu1foygbcZYJsJs1jiXrTTCvxfLDXNhI/I+NHbSIEyr548y4fcsCEyWY0XgfAG/qqaunJ1SThHenA==", - "deprecated": "core-js-pure@<3.23.3 is no longer maintained and not recommended for usage due to the number of issues. Because of the V8 engine whims, feature detection in old core-js versions could cause a slowdown up to 100x even if nothing is polyfilled. Some versions have web compatibility issues. Please, upgrade your dependencies to the actual version of core-js-pure.", - "dev": true, - "hasInstallScript": true - }, - "node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", - "dev": true, - "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - }, - "engines": { - "node": ">=4.8" - } - }, - "node_modules/debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/decamelize": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/decode-uri-component": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz", - "integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=", - "dev": true, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/define-properties": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", - "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", - "dev": true, - "dependencies": { - "object-keys": "^1.0.12" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/define-property": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", - "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", - "dev": true, - "dependencies": { - "is-descriptor": "^1.0.2", - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "deprecated": "Please upgrade to v1.0.1", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "deprecated": "Please upgrade to v1.0.1", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/define-property/node_modules/kind-of": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz", - "integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/denque": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.4.1.tgz", - "integrity": "sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ==", - "dev": true, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/diff": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", - "dev": true, - "engines": { - "node": ">=0.3.1" - } - }, - "node_modules/double-ended-queue": { - "version": "2.1.0-0", - "resolved": "https://registry.npmjs.org/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz", - "integrity": "sha1-ED01J/0xUo9AGIEwyEHv3XgmTlw=", - "dev": true - }, - "node_modules/electron-to-chromium": { - "version": "1.3.188", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.188.tgz", - "integrity": "sha512-tEQcughYIMj8WDMc59EGEtNxdGgwal/oLLTDw+NEqJRJwGflQvH3aiyiexrWeZOETP4/ko78PVr6gwNhdozvuQ==", - "dev": true - }, - "node_modules/emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true - }, - "node_modules/end-of-stream": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", - "integrity": "sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q==", - "dev": true, - "dependencies": { - "once": "^1.4.0" - } - }, - "node_modules/es-abstract": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.13.0.tgz", - "integrity": "sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg==", - "dev": true, - "dependencies": { - "es-to-primitive": "^1.2.0", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "is-callable": "^1.1.4", - "is-regex": "^1.0.4", - "object-keys": "^1.0.12" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-to-primitive": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", - "integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==", - "dev": true, - "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es6-error": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", - "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", - "dev": true, - "license": "MIT" - }, - "node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true, - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/estree-walker": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.1.tgz", - "integrity": "sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==", - "dev": true - }, - "node_modules/esutils": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", - "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/execa": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", - "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", - "dev": true, - "dependencies": { - "cross-spawn": "^6.0.0", - "get-stream": "^4.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/extend-shallow": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", - "integrity": "sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=", - "dev": true, - "dependencies": { - "assign-symbols": "^1.0.0", - "is-extendable": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/extend-shallow/node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dev": true, - "dependencies": { - "is-plain-object": "^2.0.4" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/file-uri-to-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", - "dev": true, - "license": "MIT" - }, - "node_modules/find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "dependencies": { - "locate-path": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/flat": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.0.tgz", - "integrity": "sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==", - "deprecated": "Fixed a prototype pollution security issue in 4.1.0, please upgrade to ^4.1.1 or ^5.0.1.", - "dev": true, - "dependencies": { - "is-buffer": "~2.0.3" - }, - "bin": { - "flat": "cli.js" - } - }, - "node_modules/flat/node_modules/is-buffer": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz", - "integrity": "sha512-U15Q7MXTuZlrbymiz95PJpZxu8IlipAp4dtS3wOdgPXx3mqBnslrWU14kxfHB+Py/+2PVKSr37dMAgM2A4uArw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fragment-cache": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", - "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", - "dev": true, - "dependencies": { - "map-cache": "^0.2.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true - }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "dev": true, - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-stream": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", - "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", - "dev": true, - "dependencies": { - "pump": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/get-value": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", - "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/growl": { - "version": "1.10.5", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", - "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", - "dev": true, - "engines": { - "node": ">=4.x" - } - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/has-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz", - "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/has-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", - "integrity": "sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=", - "dev": true, - "dependencies": { - "get-value": "^2.0.6", - "has-values": "^1.0.0", - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-value/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", - "integrity": "sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=", - "dev": true, - "dependencies": { - "is-number": "^3.0.0", - "kind-of": "^4.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values/node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/has-values/node_modules/kind-of": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", - "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/he": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", - "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", - "dev": true, - "bin": { - "he": "bin/he" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dev": true, - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", - "integrity": "sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE=", - "dev": true - }, - "node_modules/invariant": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", - "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", - "dev": true, - "dependencies": { - "loose-envify": "^1.0.0" - } - }, - "node_modules/invert-kv": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-2.0.0.tgz", - "integrity": "sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/ioredis": { - "version": "4.11.1", - "resolved": "https://registry.npmjs.org/ioredis/-/ioredis-4.11.1.tgz", - "integrity": "sha512-Qnp7ecb3WeaL7ojeSlb0UBRXmsRMMFcjM+PaAcap8FLLf1NznRD6x96/PS2DEqoRfdM9WVffAjIIYuUp+q3zEw==", - "dev": true, - "dependencies": { - "cluster-key-slot": "^1.0.6", - "debug": "^4.1.1", - "denque": "^1.1.0", - "lodash.defaults": "^4.2.0", - "lodash.flatten": "^4.4.0", - "redis-commands": "1.5.0", - "redis-errors": "^1.2.0", - "redis-parser": "^3.0.0", - "standard-as-callback": "^2.0.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/ioredis/node_modules/redis-commands": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.5.0.tgz", - "integrity": "sha512-6KxamqpZ468MeQC3bkWmCB1fp56XL64D4Kf0zJSwDZbVLLm7KFkoIcHrgRvQ+sk8dnhySs7+yBg94yIkAK7aJg==", - "dev": true - }, - "node_modules/is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", - "deprecated": "Please upgrade to v0.1.7", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", - "dev": true - }, - "node_modules/is-callable": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", - "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", - "deprecated": "Please upgrade to v0.1.5", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-date-object": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", - "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-descriptor/node_modules/kind-of": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", - "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/is-module": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-module/-/is-module-1.0.0.tgz", - "integrity": "sha1-Mlj7afeMFNW4FdZkM2tM/7ZEFZE=", - "dev": true - }, - "node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dev": true, - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-plain-object/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-regex": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", - "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=", - "dev": true, - "dependencies": { - "has": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-symbol": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz", - "integrity": "sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw==", - "dev": true, - "dependencies": { - "has-symbols": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/is-windows": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", - "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", - "dev": true - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "node_modules/js-levenshtein": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/js-levenshtein/-/js-levenshtein-1.1.6.tgz", - "integrity": "sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "dev": true - }, - "node_modules/js-yaml": { - "version": "3.13.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", - "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", - "dev": true, - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", - "dev": true, - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/json5": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.1.0.tgz", - "integrity": "sha512-8Mh9h6xViijj36g7Dxi+Y4S6hNGV96vcJZr/SrlHh1LR/pEn/8j/+qIBbs44YKl69Lrfctp4QD+AdWLTMqEZAQ==", - "dev": true, - "dependencies": { - "minimist": "^1.2.0" - }, - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/json5/node_modules/minimist": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", - "dev": true - }, - "node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/lcid": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/lcid/-/lcid-2.0.0.tgz", - "integrity": "sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==", - "dev": true, - "dependencies": { - "invert-kv": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "dependencies": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/lodash": { - "version": "4.17.11", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", - "integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==", - "dev": true - }, - "node_modules/lodash.defaults": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", - "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=", - "dev": true - }, - "node_modules/lodash.flatten": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", - "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=", - "dev": true - }, - "node_modules/log-symbols": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-2.2.0.tgz", - "integrity": "sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==", - "dev": true, - "dependencies": { - "chalk": "^2.0.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "dev": true, - "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" - } - }, - "node_modules/magic-string": { - "version": "0.25.2", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.25.2.tgz", - "integrity": "sha512-iLs9mPjh9IuTtRsqqhNGYcZXGei0Nh/A4xirrsqW7c+QhKVFL2vm7U09ru6cHRD22azaP/wMDgI+HCqbETMTtg==", - "dev": true, - "dependencies": { - "sourcemap-codec": "^1.4.4" - } - }, - "node_modules/map-age-cleaner": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz", - "integrity": "sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==", - "dev": true, - "dependencies": { - "p-defer": "^1.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/map-cache": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", - "integrity": "sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/map-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", - "integrity": "sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=", - "dev": true, - "dependencies": { - "object-visit": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/mem": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/mem/-/mem-4.3.0.tgz", - "integrity": "sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==", - "dev": true, - "dependencies": { - "map-age-cleaner": "^0.1.1", - "mimic-fn": "^2.0.0", - "p-is-promise": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "0.0.8", - "resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", - "dev": true - }, - "node_modules/mixin-deep": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.1.tgz", - "integrity": "sha512-8ZItLHeEgaqEvd5lYBXfm4EZSFCX29Jb9K+lAHhDKzReKBQKj3R+7NOF6tjqYi9t4oI8VUfaWITJQm86wnXGNQ==", - "deprecated": "Critical bug fixed in v2.0.1, please upgrade to the latest version.", - "dev": true, - "dependencies": { - "for-in": "^1.0.2", - "is-extendable": "^1.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/mixin-deep/node_modules/is-extendable": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", - "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", - "dev": true, - "dependencies": { - "is-plain-object": "^2.0.4" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/mkdirp": { - "version": "0.5.1", - "resolved": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "deprecated": "Legacy versions of mkdirp are no longer supported. Please update to mkdirp 1.x. (Note that the API surface has changed to use Promises in 1.x.)", - "dev": true, - "dependencies": { - "minimist": "0.0.8" - }, - "bin": { - "mkdirp": "bin/cmd.js" - } - }, - "node_modules/mocha": { - "version": "6.1.4", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-6.1.4.tgz", - "integrity": "sha512-PN8CIy4RXsIoxoFJzS4QNnCH4psUCPWc4/rPrst/ecSJJbLBkubMiyGCP2Kj/9YnWbotFqAoeXyXMucj7gwCFg==", - "dev": true, - "dependencies": { - "ansi-colors": "3.2.3", - "browser-stdout": "1.3.1", - "debug": "3.2.6", - "diff": "3.5.0", - "escape-string-regexp": "1.0.5", - "find-up": "3.0.0", - "glob": "7.1.3", - "growl": "1.10.5", - "he": "1.2.0", - "js-yaml": "3.13.1", - "log-symbols": "2.2.0", - "minimatch": "3.0.4", - "mkdirp": "0.5.1", - "ms": "2.1.1", - "node-environment-flags": "1.0.5", - "object.assign": "4.1.0", - "strip-json-comments": "2.0.1", - "supports-color": "6.0.0", - "which": "1.3.1", - "wide-align": "1.1.3", - "yargs": "13.2.2", - "yargs-parser": "13.0.0", - "yargs-unparser": "1.5.0" - }, - "bin": { - "_mocha": "bin/_mocha", - "mocha": "bin/mocha" - }, - "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/mocha/node_modules/debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", - "dev": true, - "dependencies": { - "ms": "^2.1.1" - } - }, - "node_modules/mocha/node_modules/glob": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", - "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - } - }, - "node_modules/mocha/node_modules/ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", - "dev": true - }, - "node_modules/mocha/node_modules/supports-color": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.0.0.tgz", - "integrity": "sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==", - "dev": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "node_modules/nan": { - "version": "2.22.2", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.22.2.tgz", - "integrity": "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/nanomatch": { - "version": "1.2.13", - "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", - "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", - "dev": true, - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "fragment-cache": "^0.2.1", - "is-windows": "^1.0.2", - "kind-of": "^6.0.2", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/nanomatch/node_modules/arr-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", - "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/nanomatch/node_modules/array-unique": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", - "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/nanomatch/node_modules/kind-of": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz", - "integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", - "dev": true - }, - "node_modules/node-environment-flags": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/node-environment-flags/-/node-environment-flags-1.0.5.tgz", - "integrity": "sha512-VNYPRfGfmZLx0Ye20jWzHUjyTW/c+6Wq+iLhDzUI4XmhrDd9l/FozXV3F2xOaXjvp0co0+v1YSR3CMP6g+VvLQ==", - "dev": true, - "dependencies": { - "object.getownpropertydescriptors": "^2.0.3", - "semver": "^5.7.0" - } - }, - "node_modules/node-releases": { - "version": "1.1.25", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.25.tgz", - "integrity": "sha512-fI5BXuk83lKEoZDdH3gRhtsNgh05/wZacuXkgbiYkceE7+QIMXOg98n9ZV7mz27B+kFHnqHcUpscZZlGRSmTpQ==", - "dev": true, - "dependencies": { - "semver": "^5.3.0" - } - }, - "node_modules/npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", - "dev": true, - "dependencies": { - "path-key": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", - "integrity": "sha1-fn2Fi3gb18mRpBupde04EnVOmYw=", - "dev": true, - "dependencies": { - "copy-descriptor": "^0.1.0", - "define-property": "^0.2.5", - "kind-of": "^3.0.3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-copy/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object-visit": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", - "integrity": "sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=", - "dev": true, - "dependencies": { - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-visit/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object.assign": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", - "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", - "dev": true, - "dependencies": { - "define-properties": "^1.1.2", - "function-bind": "^1.1.1", - "has-symbols": "^1.0.0", - "object-keys": "^1.0.11" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/object.getownpropertydescriptors": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz", - "integrity": "sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY=", - "dev": true, - "dependencies": { - "define-properties": "^1.1.2", - "es-abstract": "^1.5.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/object.pick": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", - "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", - "dev": true, - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object.pick/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/os-locale": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-3.1.0.tgz", - "integrity": "sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q==", - "dev": true, - "dependencies": { - "execa": "^1.0.0", - "lcid": "^2.0.0", - "mem": "^4.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/p-defer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-1.0.0.tgz", - "integrity": "sha1-n26xgvbJqozXQwBKfU+WsZaw+ww=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/p-is-promise": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-2.1.0.tgz", - "integrity": "sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/p-limit": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", - "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", - "dev": true, - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "dependencies": { - "p-limit": "^2.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/pascalcase": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", - "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", - "dev": true - }, - "node_modules/posix-character-classes": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", - "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/pretty-bytes": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-4.0.2.tgz", - "integrity": "sha512-yJAF+AjbHKlxQ8eezMd/34Mnj/YTQ3i6kLzvVsH4l/BfIFtp444n0wVbnsn66JimZ9uBofv815aRp1zCppxlWw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/private": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/private/-/private-0.1.8.tgz", - "integrity": "sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg==", - "dev": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "node_modules/redis": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/redis/-/redis-2.8.0.tgz", - "integrity": "sha512-M1OkonEQwtRmZv4tEWF2VgpG0JWJ8Fv1PhlgT5+B+uNq2cA3Rt1Yt/ryoR+vQNOQcIEgdCdfH0jr3bDpihAw1A==", - "dev": true, - "dependencies": { - "double-ended-queue": "^2.1.0-0", - "redis-commands": "^1.2.0", - "redis-parser": "^2.6.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/redis-commands": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.4.0.tgz", - "integrity": "sha512-cu8EF+MtkwI4DLIT0x9P8qNTLFhQD4jLfxLR0cCNkeGzs87FN6879JOJwNQR/1zD7aSYNbU0hgsV9zGY71Itvw==", - "dev": true - }, - "node_modules/redis-errors": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", - "integrity": "sha1-62LSrbFeTq9GEMBK/hUpOEJQq60=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/redis-parser": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", - "integrity": "sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ=", - "dev": true, - "dependencies": { - "redis-errors": "^1.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/redis/node_modules/redis-parser": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-2.6.0.tgz", - "integrity": "sha1-Uu0J2srBCPGmMcB+m2mUHnoZUEs=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/regenerate": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.0.tgz", - "integrity": "sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg==", - "dev": true - }, - "node_modules/regenerate-unicode-properties": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz", - "integrity": "sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA==", - "dev": true, - "dependencies": { - "regenerate": "^1.4.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/regenerator-runtime": { - "version": "0.12.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.12.1.tgz", - "integrity": "sha512-odxIc1/vDlo4iZcfXqRYFj0vpXFNoGdKMAUieAlFYO6m/nl5e9KR/beGf41z4a1FI+aQgtjhuaSlDxQ0hmkrHg==", - "dev": true - }, - "node_modules/regenerator-transform": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.14.0.tgz", - "integrity": "sha512-rtOelq4Cawlbmq9xuMR5gdFmv7ku/sFoB7sRiywx7aq53bc52b4j6zvH7Te1Vt/X2YveDKnCGUbioieU7FEL3w==", - "dev": true, - "dependencies": { - "private": "^0.1.6" - } - }, - "node_modules/regex-not": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", - "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", - "dev": true, - "dependencies": { - "extend-shallow": "^3.0.2", - "safe-regex": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/regexp-tree": { - "version": "0.1.11", - "resolved": "https://registry.npmjs.org/regexp-tree/-/regexp-tree-0.1.11.tgz", - "integrity": "sha512-7/l/DgapVVDzZobwMCCgMlqiqyLFJ0cduo/j+3BcDJIB+yJdsYCfKuI3l/04NV+H/rfNRdPIDbXNZHM9XvQatg==", - "dev": true, - "bin": { - "regexp-tree": "bin/regexp-tree" - } - }, - "node_modules/regexpu-core": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-4.5.4.tgz", - "integrity": "sha512-BtizvGtFQKGPUcTy56o3nk1bGRp4SZOTYrDtGNlqCQufptV5IkkLN6Emw+yunAJjzf+C9FQFtvq7IoA3+oMYHQ==", - "dev": true, - "dependencies": { - "regenerate": "^1.4.0", - "regenerate-unicode-properties": "^8.0.2", - "regjsgen": "^0.5.0", - "regjsparser": "^0.6.0", - "unicode-match-property-ecmascript": "^1.0.4", - "unicode-match-property-value-ecmascript": "^1.1.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/regjsgen": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.5.0.tgz", - "integrity": "sha512-RnIrLhrXCX5ow/E5/Mh2O4e/oa1/jW0eaBKTSy3LaCj+M3Bqvm97GWDp2yUtzIs4LEn65zR2yiYGFqb2ApnzDA==", - "dev": true - }, - "node_modules/regjsparser": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.6.0.tgz", - "integrity": "sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ==", - "dev": true, - "dependencies": { - "jsesc": "~0.5.0" - }, - "bin": { - "regjsparser": "bin/parser" - } - }, - "node_modules/regjsparser/node_modules/jsesc": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", - "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=", - "dev": true, - "bin": { - "jsesc": "bin/jsesc" - } - }, - "node_modules/repeat-element": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", - "integrity": "sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", - "dev": true, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/require-main-filename": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", - "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", - "dev": true - }, - "node_modules/resolve": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.8.1.tgz", - "integrity": "sha512-AicPrAC7Qu1JxPCZ9ZgCZlY35QgFnNqc+0LtbRNxnVw4TXvjQ72wnuL9JQcEBgXkI9JM8MsT9kaQoHcpCRJOYA==", - "dev": true, - "dependencies": { - "path-parse": "^1.0.5" - } - }, - "node_modules/resolve-url": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", - "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=", - "deprecated": "https://github.com/lydell/resolve-url#deprecated", - "dev": true - }, - "node_modules/ret": { - "version": "0.1.15", - "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", - "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", - "dev": true, - "engines": { - "node": ">=0.12" - } - }, - "node_modules/rollup": { - "version": "0.66.6", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-0.66.6.tgz", - "integrity": "sha512-J7/SWanrcb83vfIHqa8+aVVGzy457GcjA6GVZEnD0x2u4OnOd0Q1pCrEoNe8yLwM6z6LZP02zBT2uW0yh5TqOw==", - "dev": true, - "dependencies": { - "@types/estree": "0.0.39", - "@types/node": "*" - }, - "bin": { - "rollup": "bin/rollup" - } - }, - "node_modules/rollup-plugin-babel": { - "version": "4.3.3", - "resolved": "https://registry.npmjs.org/rollup-plugin-babel/-/rollup-plugin-babel-4.3.3.tgz", - "integrity": "sha512-tKzWOCmIJD/6aKNz0H1GMM+lW1q9KyFubbWzGiOG540zxPPifnEAHTZwjo0g991Y+DyOZcLqBgqOdqazYE5fkw==", - "deprecated": "This package has been deprecated and is no longer maintained. Please use @rollup/plugin-babel.", - "dev": true, - "dependencies": { - "@babel/helper-module-imports": "^7.0.0", - "rollup-pluginutils": "^2.8.1" - }, - "peerDependencies": { - "@babel/core": "7 || ^7.0.0-rc.2", - "rollup": ">=0.60.0 <2" - } - }, - "node_modules/rollup-plugin-commonjs": { - "version": "9.3.4", - "resolved": "https://registry.npmjs.org/rollup-plugin-commonjs/-/rollup-plugin-commonjs-9.3.4.tgz", - "integrity": "sha512-DTZOvRoiVIHHLFBCL4pFxOaJt8pagxsVldEXBOn6wl3/V21wVaj17HFfyzTsQUuou3sZL3lEJZVWKPFblJfI6w==", - "deprecated": "This package has been deprecated and is no longer maintained. Please use @rollup/plugin-commonjs.", - "dev": true, - "dependencies": { - "estree-walker": "^0.6.0", - "magic-string": "^0.25.2", - "resolve": "^1.10.0", - "rollup-pluginutils": "^2.6.0" - }, - "peerDependencies": { - "rollup": ">=0.56.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/arr-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", - "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/array-unique": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", - "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", - "dev": true, - "dependencies": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/braces/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/estree-walker": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-0.6.0.tgz", - "integrity": "sha512-peq1RfVAVzr3PU/jL31RaOjUKLoZJpObQWJJ+LgfcxDUifyLZ1RjPQZTl0pzj2uJ45b7A7XpyppXvxdEqzo4rw==", - "dev": true - }, - "node_modules/rollup-plugin-commonjs/node_modules/expand-brackets": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", - "integrity": "sha1-t3c14xXOMPa27/D4OwQVGiJEliI=", - "dev": true, - "dependencies": { - "debug": "^2.3.3", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "posix-character-classes": "^0.1.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/expand-brackets/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/expand-brackets/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/expand-brackets/node_modules/is-accessor-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", - "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", - "deprecated": "Please upgrade to v0.1.7", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/expand-brackets/node_modules/is-accessor-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/expand-brackets/node_modules/is-data-descriptor": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", - "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", - "deprecated": "Please upgrade to v0.1.5", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/expand-brackets/node_modules/is-data-descriptor/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/expand-brackets/node_modules/is-descriptor": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", - "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^0.1.6", - "is-data-descriptor": "^0.1.4", - "kind-of": "^5.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/expand-brackets/node_modules/kind-of": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", - "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/extglob": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", - "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", - "dev": true, - "dependencies": { - "array-unique": "^0.3.2", - "define-property": "^1.0.0", - "expand-brackets": "^2.1.4", - "extend-shallow": "^2.0.1", - "fragment-cache": "^0.2.1", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/extglob/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dev": true, - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/extglob/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/fill-range": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", - "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", - "dev": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-number": "^3.0.0", - "repeat-string": "^1.6.1", - "to-regex-range": "^2.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/fill-range/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "deprecated": "Please upgrade to v1.0.1", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "deprecated": "Please upgrade to v1.0.1", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/is-number/node_modules/kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "dependencies": { - "is-buffer": "^1.1.5" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/kind-of": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz", - "integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/micromatch": { - "version": "3.1.10", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", - "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", - "dev": true, - "dependencies": { - "arr-diff": "^4.0.0", - "array-unique": "^0.3.2", - "braces": "^2.3.1", - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "extglob": "^2.0.4", - "fragment-cache": "^0.2.1", - "kind-of": "^6.0.2", - "nanomatch": "^1.2.9", - "object.pick": "^1.3.0", - "regex-not": "^1.0.0", - "snapdragon": "^0.8.1", - "to-regex": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - }, - "node_modules/rollup-plugin-commonjs/node_modules/resolve": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.10.1.tgz", - "integrity": "sha512-KuIe4mf++td/eFb6wkaPbMDnP6kObCaEtIDuHOUED6MNUo4K670KZUHuuvYPZDxNF0WVLw49n06M2m2dXphEzA==", - "dev": true, - "dependencies": { - "path-parse": "^1.0.6" - } - }, - "node_modules/rollup-plugin-commonjs/node_modules/rollup-pluginutils": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.6.0.tgz", - "integrity": "sha512-aGQwspEF8oPKvg37u3p7h0cYNwmJR1sCBMZGZ5b9qy8HGtETknqjzcxrDRrcAnJNXN18lBH4Q9vZYth/p4n8jQ==", - "dev": true, - "dependencies": { - "estree-walker": "^0.6.0", - "micromatch": "^3.1.10" - } - }, - "node_modules/rollup-plugin-json": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/rollup-plugin-json/-/rollup-plugin-json-3.1.0.tgz", - "integrity": "sha512-BlYk5VspvGpjz7lAwArVzBXR60JK+4EKtPkCHouAWg39obk9S61hZYJDBfMK+oitPdoe11i69TlxKlMQNFC/Uw==", - "deprecated": "This module has been deprecated and is no longer maintained. Please use @rollup/plugin-json.", - "dev": true, - "dependencies": { - "rollup-pluginutils": "^2.3.1" - } - }, - "node_modules/rollup-plugin-node-resolve": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/rollup-plugin-node-resolve/-/rollup-plugin-node-resolve-3.4.0.tgz", - "integrity": "sha512-PJcd85dxfSBWih84ozRtBkB731OjXk0KnzN0oGp7WOWcarAFkVa71cV5hTJg2qpVsV2U8EUwrzHP3tvy9vS3qg==", - "deprecated": "This package has been deprecated and is no longer maintained. Please use @rollup/plugin-node-resolve.", - "dev": true, - "dependencies": { - "builtin-modules": "^2.0.0", - "is-module": "^1.0.0", - "resolve": "^1.1.6" - } - }, - "node_modules/rollup-pluginutils": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/rollup-pluginutils/-/rollup-pluginutils-2.8.1.tgz", - "integrity": "sha512-J5oAoysWar6GuZo0s+3bZ6sVZAC0pfqKz68De7ZgDi5z63jOVZn1uJL/+z1jeKHNbGII8kAyHF5q8LnxSX5lQg==", - "dev": true, - "dependencies": { - "estree-walker": "^0.6.1" - } - }, - "node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/safe-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", - "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", - "dev": true, - "dependencies": { - "ret": "~0.1.10" - } - }, - "node_modules/semver": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", - "integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", - "dev": true - }, - "node_modules/set-value": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.0.tgz", - "integrity": "sha512-hw0yxk9GT/Hr5yJEYnHNKYXkIA8mVJgd9ditYZCe16ZczcaELYYcfvaXesNACk2O8O0nTiPQcQhGUQj8JLzeeg==", - "deprecated": "Critical bug fixed in v3.0.1, please upgrade to the latest version.", - "dev": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.3", - "split-string": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/set-value/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "dependencies": { - "shebang-regex": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/signal-exit": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", - "dev": true - }, - "node_modules/snapdragon": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", - "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", - "dev": true, - "dependencies": { - "base": "^0.11.1", - "debug": "^2.2.0", - "define-property": "^0.2.5", - "extend-shallow": "^2.0.1", - "map-cache": "^0.2.2", - "source-map": "^0.5.6", - "source-map-resolve": "^0.5.0", - "use": "^3.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", - "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", - "dev": true, - "dependencies": { - "define-property": "^1.0.0", - "isobject": "^3.0.0", - "snapdragon-util": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "dev": true, - "dependencies": { - "is-descriptor": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "deprecated": "Please upgrade to v1.0.1", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "deprecated": "Please upgrade to v1.0.1", - "dev": true, - "dependencies": { - "kind-of": "^6.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "dev": true, - "dependencies": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-node/node_modules/kind-of": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz", - "integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon-util": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", - "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", - "dev": true, - "dependencies": { - "kind-of": "^3.2.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/snapdragon/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/snapdragon/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - }, - "node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-resolve": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.2.tgz", - "integrity": "sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA==", - "deprecated": "See https://github.com/lydell/source-map-resolve#deprecated", - "dev": true, - "dependencies": { - "atob": "^2.1.1", - "decode-uri-component": "^0.2.0", - "resolve-url": "^0.2.1", - "source-map-url": "^0.4.0", - "urix": "^0.1.0" - } - }, - "node_modules/source-map-url": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz", - "integrity": "sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=", - "deprecated": "See https://github.com/lydell/source-map-url#deprecated", - "dev": true - }, - "node_modules/sourcemap-codec": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.4.tgz", - "integrity": "sha512-CYAPYdBu34781kLHkaW3m6b/uUSyMOC2R61gcYMWooeuaGtjof86ZA/8T+qVPPt7np1085CR9hmMGrySwEc8Xg==", - "deprecated": "Please use @jridgewell/sourcemap-codec instead", - "dev": true - }, - "node_modules/split-string": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", - "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", - "dev": true, - "dependencies": { - "extend-shallow": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", - "dev": true - }, - "node_modules/standard-as-callback": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.0.1.tgz", - "integrity": "sha512-NQOxSeB8gOI5WjSaxjBgog2QFw55FV8TkS6Y07BiB3VJ8xNTvUYm0wl0s8ObgQ5NhdpnNfigMIKjgPESzgr4tg==", - "dev": true - }, - "node_modules/static-extend": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", - "integrity": "sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=", - "dev": true, - "dependencies": { - "define-property": "^0.2.5", - "object-copy": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/static-extend/node_modules/define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", - "dev": true, - "dependencies": { - "is-descriptor": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", - "dev": true, - "dependencies": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "dependencies": { - "ansi-regex": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/to-object-path": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", - "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-regex": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", - "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", - "dev": true, - "dependencies": { - "define-property": "^2.0.2", - "extend-shallow": "^3.0.2", - "regex-not": "^1.0.2", - "safe-regex": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-regex-range": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", - "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", - "dev": true, - "dependencies": { - "is-number": "^3.0.0", - "repeat-string": "^1.6.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/to-regex-range/node_modules/is-number": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", - "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", - "dev": true, - "dependencies": { - "kind-of": "^3.0.2" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/trim-right": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/trim-right/-/trim-right-1.0.1.tgz", - "integrity": "sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/typescript": { - "version": "4.9.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", - "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=4.2.0" - } - }, - "node_modules/unicode-canonical-property-names-ecmascript": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz", - "integrity": "sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-ecmascript": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz", - "integrity": "sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg==", - "dev": true, - "dependencies": { - "unicode-canonical-property-names-ecmascript": "^1.0.4", - "unicode-property-aliases-ecmascript": "^1.0.4" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-value-ecmascript": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz", - "integrity": "sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-property-aliases-ecmascript": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz", - "integrity": "sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/union-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.0.tgz", - "integrity": "sha1-XHHDTLW61dzr4+oM0IIHulqhrqQ=", - "dev": true, - "dependencies": { - "arr-union": "^3.1.0", - "get-value": "^2.0.6", - "is-extendable": "^0.1.1", - "set-value": "^0.4.3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/union-value/node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "dependencies": { - "is-extendable": "^0.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/union-value/node_modules/set-value": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-0.4.3.tgz", - "integrity": "sha1-fbCPnT0i3H945Trzw79GZuzfzPE=", - "deprecated": "Critical bug fixed in v3.0.1, please upgrade to the latest version.", - "dev": true, - "dependencies": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.1", - "to-object-path": "^0.3.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", - "integrity": "sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=", - "dev": true, - "dependencies": { - "has-value": "^0.3.1", - "isobject": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-value": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", - "integrity": "sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=", - "dev": true, - "dependencies": { - "get-value": "^2.0.3", - "has-values": "^0.1.4", - "isobject": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-value/node_modules/isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "dev": true, - "dependencies": { - "isarray": "1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/has-values": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", - "integrity": "sha1-bWHeldkd/Km5oCCJrThL/49it3E=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/unset-value/node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/urix": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", - "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=", - "deprecated": "Please see https://github.com/lydell/urix#deprecated", - "dev": true - }, - "node_modules/use": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", - "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/util": { - "version": "0.10.3", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", - "integrity": "sha1-evsa/lCAUkZInj23/g7TeTNqwPk=", - "dev": true, - "dependencies": { - "inherits": "2.0.1" - } - }, - "node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } - }, - "node_modules/which-module": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", - "dev": true - }, - "node_modules/wide-align": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", - "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", - "dev": true, - "dependencies": { - "string-width": "^1.0.2 || 2" - } - }, - "node_modules/wrap-ansi": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", - "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", - "dev": true, - "dependencies": { - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "dev": true, - "dependencies": { - "number-is-nan": "^1.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wrap-ansi/node_modules/string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "dev": true, - "dependencies": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "dependencies": { - "ansi-regex": "^2.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, - "node_modules/y18n": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", - "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", - "dev": true - }, - "node_modules/yargs": { - "version": "13.2.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.2.2.tgz", - "integrity": "sha512-WyEoxgyTD3w5XRpAQNYUB9ycVH/PQrToaTXdYXRdOXvEy1l19br+VJsc0vcO8PTGg5ro/l/GY7F/JMEBmI0BxA==", - "dev": true, - "dependencies": { - "cliui": "^4.0.0", - "find-up": "^3.0.0", - "get-caller-file": "^2.0.1", - "os-locale": "^3.1.0", - "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^3.0.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^13.0.0" - } - }, - "node_modules/yargs-parser": { - "version": "13.0.0", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.0.0.tgz", - "integrity": "sha512-w2LXjoL8oRdRQN+hOyppuXs+V/fVAYtpcrRxZuF7Kt/Oc+Jr2uAcVntaUTNT6w5ihoWfFDpNY8CPx1QskxZ/pw==", - "dev": true, - "dependencies": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } - }, - "node_modules/yargs-unparser": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-1.5.0.tgz", - "integrity": "sha512-HK25qidFTCVuj/D1VfNiEndpLIeJN78aqgR23nL3y4N0U/91cOAzqfHlF8n2BvoNDcZmJKin3ddNSvOxSr8flw==", - "dev": true, - "dependencies": { - "flat": "^4.1.0", - "lodash": "^4.17.11", - "yargs": "^12.0.5" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/yargs-unparser/node_modules/get-caller-file": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", - "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", - "dev": true - }, - "node_modules/yargs-unparser/node_modules/require-main-filename": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", - "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=", - "dev": true - }, - "node_modules/yargs-unparser/node_modules/yargs": { - "version": "12.0.5", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-12.0.5.tgz", - "integrity": "sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw==", - "dev": true, - "dependencies": { - "cliui": "^4.0.0", - "decamelize": "^1.2.0", - "find-up": "^3.0.0", - "get-caller-file": "^1.0.1", - "os-locale": "^3.0.0", - "require-directory": "^2.1.1", - "require-main-filename": "^1.0.1", - "set-blocking": "^2.0.0", - "string-width": "^2.0.0", - "which-module": "^2.0.0", - "y18n": "^3.2.1 || ^4.0.0", - "yargs-parser": "^11.1.1" - } - }, - "node_modules/yargs-unparser/node_modules/yargs-parser": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-11.1.1.tgz", - "integrity": "sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ==", - "dev": true, - "dependencies": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } - }, - "node_modules/yargs/node_modules/ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/yargs/node_modules/string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "dependencies": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/yargs/node_modules/strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "dependencies": { - "ansi-regex": "^4.1.0" - }, - "engines": { - "node": ">=6" - } - } - } -} diff --git a/package.json b/package.json index 3741d1e..62161bc 100644 --- a/package.json +++ b/package.json @@ -1,27 +1,35 @@ { "name": "@sderrow/bottleneck", - "version": "3.0.7", + "version": "4.0.0-rc.0", "description": "Distributed task scheduler and rate limiter", + "repository": { + "type": "git", + "url": "git+https://github.com/sderrow/bottleneck.git" + }, + "bugs": { + "url": "https://github.com/sderrow/bottleneck/issues" + }, + "author": { + "name": "Simon Grondin" + }, + "contributors": [ + { + "name": "Sean Derrow" + } + ], + "license": "MIT", + "packageManager": "yarn@4.12.0", "main": "lib/index.js", - "typings": "bottleneck.d.ts", + "types": "bottleneck.d.ts", "files": [ - "lib/", + "bottleneck.d.ts", "es5.js", - "light.js", + "lib/", "light.d.ts", - "bottleneck.d.ts", + "light.js", "README.md", "LICENSE" ], - "scripts": { - "build": "./scripts/build.sh", - "test": "mocha test", - "test-all": "./scripts/test_all.sh" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/sderrow/bottleneck.git" - }, "keywords": [ "async rate limiter", "rate limiter", @@ -36,35 +44,34 @@ "load", "clustering" ], - "author": { - "name": "Simon Grondin" - }, - "contributors": [ - { - "name": "Sean Derrow" - } - ], - "license": "MIT", - "bugs": { - "url": "https://github.com/sderrow/bottleneck/issues" + "scripts": { + "build": "./scripts/build.sh", + "check-types": "tsc --noEmit --strict test.ts", + "lint": "eslint . --max-warnings 0", + "postinstall": "husky", + "test": "mocha --timeout=10000 test", + "test-all": "./scripts/test_all.sh" }, "devDependencies": { - "@babel/core": "^7.5.0", - "@babel/preset-env": "^7.5.0", + "@eslint/js": "^9.34.0", + "@rollup/plugin-commonjs": "^28.0.6", + "@rollup/plugin-json": "^6.1.0", + "@rollup/plugin-node-resolve": "^16.0.1", "@token-cjg/leakage": "^0.6.0", - "@types/es6-promise": "0.0.33", - "assert": "^1.5.0", - "coffeescript": "2.4.x", - "ioredis": "^4.11.1", - "mocha": "^6.1.4", + "@types/node": "^22.18.0", + "eslint": "^9.34.0", + "eslint-config-prettier": "^10.1.8", + "globals": "^16.3.0", + "husky": "^9.1.7", + "ioredis": "^5.7.0", + "lint-staged": "^16.1.5", + "mocha": "^11.7.1", + "prettier": "^3.6.2", + "prettier-plugin-organize-imports": "^4.2.0", + "prettier-plugin-pkg": "^0.21.2", + "prettier-plugin-sh": "^0.18.0", "redis": "^2.8.0", - "regenerator-runtime": "^0.12.1", - "rollup": "^0.66.6", - "rollup-plugin-babel": "^4.3.3", - "rollup-plugin-commonjs": "^9.3.4", - "rollup-plugin-json": "^3.1.0", - "rollup-plugin-node-resolve": "^3.4.0", - "typescript": "^4.0.0" - }, - "dependencies": {} + "rollup": "^4.48.1", + "typescript": "^5.9.2" + } } diff --git a/lib/lua.json b/ref/lua.json similarity index 100% rename from lib/lua.json rename to ref/lua.json diff --git a/rollup.config.es5.js b/rollup.config.es5.js deleted file mode 100644 index 8b0483e..0000000 --- a/rollup.config.es5.js +++ /dev/null @@ -1,34 +0,0 @@ -import json from 'rollup-plugin-json'; -import resolve from 'rollup-plugin-node-resolve'; -import commonjs from 'rollup-plugin-commonjs'; -import babel from 'rollup-plugin-babel'; - -const bannerLines = [ - 'This file contains the full Bottleneck library (MIT) compiled to ES5.', - 'https://github.com/SGrondin/bottleneck', - 'It also contains the regenerator-runtime (MIT), necessary for Babel-generated ES5 code to execute promise and async/await code.', - 'See the following link for Copyright and License information:', - 'https://github.com/facebook/regenerator/blob/master/packages/regenerator-runtime/runtime.js', -].map(x => ` * ${x}`).join('\n'); -const banner = `/**\n${bannerLines}\n */`; - -export default { - input: 'lib/es5.js', - output: { - name: 'Bottleneck', - file: 'es5.js', - sourcemap: false, - globals: {}, - format: 'umd', - banner - }, - external: [], - plugins: [ - json(), - resolve(), - commonjs(), - babel({ - exclude: 'node_modules/**' - }) - ] -}; diff --git a/rollup.config.light.js b/rollup.config.light.js deleted file mode 100644 index 6a72c70..0000000 --- a/rollup.config.light.js +++ /dev/null @@ -1,44 +0,0 @@ -import commonjs from 'rollup-plugin-commonjs'; -import json from 'rollup-plugin-json'; -import resolve from 'rollup-plugin-node-resolve'; - -const bannerLines = [ - 'This file contains the Bottleneck library (MIT), compiled to ES2017, and without Clustering support.', - 'https://github.com/SGrondin/bottleneck', -].map(x => ` * ${x}`).join('\n'); -const banner = `/**\n${bannerLines}\n */`; - -const missing = `export default () => console.log('You must import the full version of Bottleneck in order to use this feature.');`; -const exclude = [ - 'RedisDatastore.js', - 'RedisConnection.js', - 'IORedisConnection.js', - 'Scripts.js' -]; - -export default { - input: 'lib/index.js', - output: { - name: 'Bottleneck', - file: 'light.js', - sourcemap: false, - globals: {}, - format: 'umd', - banner - }, - external: [], - plugins: [ - json(), - { - load: id => { - const chunks = id.split('/'); - const file = chunks[chunks.length - 1]; - if (exclude.indexOf(file) >= 0) { - return missing - } - } - }, - resolve(), - commonjs() - ] -}; diff --git a/rollup.config.light.mjs b/rollup.config.light.mjs new file mode 100644 index 0000000..f6000f5 --- /dev/null +++ b/rollup.config.light.mjs @@ -0,0 +1,44 @@ +import commonjs from "@rollup/plugin-commonjs"; +import json from "@rollup/plugin-json"; +import resolve from "@rollup/plugin-node-resolve"; +import { defineConfig } from "rollup"; + +const bannerLines = [ + "This file contains the Bottleneck library (MIT) without Clustering support.", + "https://github.com/sderrow/bottleneck", +] + .map((x) => ` * ${x}`) + .join("\n"); +const banner = `/* eslint-disable no-undef */\n/**\n${bannerLines}\n */`; + +const missing = `export default () => console.log('You must import the full version of Bottleneck in order to use this feature.');`; +const exclude = ["RedisDatastore.js", "RedisConnection.js", "IORedisConnection.js", "Scripts.js"]; + +export default defineConfig({ + input: "lib/index.js", + output: { + name: "Bottleneck", + file: "light.js", + sourcemap: false, + globals: {}, + format: "umd", + banner, + }, + plugins: [ + json(), + { + name: "exclude-clustering", + load: (id) => { + const chunks = id.split("/"); + const file = chunks[chunks.length - 1]; + if (exclude.includes(file)) { + return missing; + } + }, + }, + resolve({ + preferBuiltins: true, + }), + commonjs(), + ], +}); diff --git a/scripts/assemble_lua.js b/scripts/assemble_lua.js index eb7a93b..bce3931 100644 --- a/scripts/assemble_lua.js +++ b/scripts/assemble_lua.js @@ -1,25 +1,25 @@ -var fs = require('fs') +var fs = require("fs"); -var input = __dirname + '/../src/redis' -var loaded = {} +var input = __dirname + "/../src/redis"; +var loaded = {}; var promises = fs.readdirSync(input).map(function (file) { return new Promise(function (resolve, reject) { - fs.readFile(input + '/' + file, function (err, data) { + fs.readFile(input + "/" + file, function (err, data) { if (err != null) { - return reject(err) + return reject(err); } - loaded[file] = data.toString('utf8') - return resolve() - }) - }) -}) + loaded[file] = data.toString("utf8"); + return resolve(); + }); + }); +}); Promise.all(promises) -.then(function () { - console.log(JSON.stringify(loaded, Object.keys(loaded).sort(), 2)) -}) -.catch(function (err) { - console.error(err) - process.exit(1) -}) + .then(function () { + console.log(JSON.stringify(loaded, Object.keys(loaded).sort(), 2)); + }) + .catch(function (err) { + console.error(err); + process.exit(1); + }); diff --git a/scripts/build.sh b/scripts/build.sh index ed36410..d1150a3 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -1,83 +1,51 @@ #!/usr/bin/env bash - set -e if [ ! -d node_modules ]; then - echo "[B] Run 'npm install' first" - exit 1 + echo "[B] Run 'yarn' first" + exit 1 fi - clean() { - rm -f .babelrc + mkdir -p ref + mkdir -p lib + rm -rf ref/* rm -rf lib/* - node scripts/version.js > lib/version.json - node scripts/assemble_lua.js > lib/lua.json + node scripts/assemble_lua.js > ref/lua.json } -makeLib10() { - echo '[B] Compiling Bottleneck to Node 10+...' - npx coffee --compile --bare --no-header src/*.coffee - mv src/*.js lib/ -} - -makeLib6() { - echo '[B] Compiling Bottleneck to Node 6+...' - ln -s .babelrc.lib .babelrc - npx coffee --compile --bare --no-header --transpile src/*.coffee - mv src/*.js lib/ -} - -makeES5() { - echo '[B] Compiling Bottleneck to ES5...' - ln -s .babelrc.es5 .babelrc - npx coffee --compile --bare --no-header src/*.coffee - mv src/*.js lib/ - - echo '[B] Assembling ES5 bundle...' - npx rollup -c rollup.config.es5.js +makeLib() { + echo '[B] Transpiling source code...' + yarn tsc } makeLight() { - makeLib10 - echo '[B] Assembling light bundle...' - npx rollup -c rollup.config.light.js + yarn rollup -c rollup.config.light.mjs } makeTypings() { echo '[B] Compiling and testing TS typings...' cp bottleneck.d.ts light.d.ts sed -i '' '1s/"bottleneck"/"bottleneck\/light"/' light.d.ts - npx tsc --noEmit --strict test.ts + yarn check-types } -if [ "$1" = 'dev' ]; then - clean - makeLib10 -elif [ "$1" = 'bench' ]; then - clean - makeLib6 -elif [ "$1" = 'es5' ]; then - clean - makeES5 -elif [ "$1" = 'light' ]; then +if [ "$1" = 'light' ]; then clean makeLight +elif [ "$1" = 'lib' ]; then + clean + makeLib elif [ "$1" = 'typings' ]; then makeTypings else clean - makeES5 - - clean + makeLib makeLight - - clean - makeLib6 makeTypings + echo "[B] Checking code formatting..." + yarn prettier --log-level=warn --write . fi -rm -f .babelrc - -echo '[B] Done!' +echo "[B] Build complete" diff --git a/scripts/test_all.sh b/scripts/test_all.sh index afc6892..dd9afb8 100755 --- a/scripts/test_all.sh +++ b/scripts/test_all.sh @@ -5,16 +5,13 @@ set -e source .env echo 'ioredis tests' -DATASTORE=ioredis npm test +DATASTORE=ioredis yarn test echo 'NodeRedis tests' -DATASTORE=redis npm test - -echo 'ES5 bundle tests' -BUILD=es5 npm test +DATASTORE=redis yarn test echo 'Light bundle tests' -BUILD=light npm test +BUILD=light yarn test echo 'Local tests' -npm test +yarn test diff --git a/scripts/version.js b/scripts/version.js deleted file mode 100644 index 75671da..0000000 --- a/scripts/version.js +++ /dev/null @@ -1,3 +0,0 @@ -const packagejson = require('../package.json') - -console.log(JSON.stringify({version: packagejson.version})) diff --git a/src/Batcher.coffee b/src/Batcher.coffee deleted file mode 100644 index 5ddd66d..0000000 --- a/src/Batcher.coffee +++ /dev/null @@ -1,39 +0,0 @@ -parser = require "./parser" -Events = require "./Events" - -class Batcher - defaults: - maxTime: null - maxSize: null - Promise: Promise - - constructor: (@options={}) -> - parser.load @options, @defaults, @ - @Events = new Events @ - @_arr = [] - @_resetPromise() - @_lastFlush = Date.now() - - _resetPromise: -> - @_promise = new @Promise (res, rej) => @_resolve = res - - _flush: -> - clearTimeout @_timeout - @_lastFlush = Date.now() - @_resolve() - @Events.trigger "batch", @_arr - @_arr = [] - @_resetPromise() - - add: (data) -> - @_arr.push data - ret = @_promise - if @_arr.length == @maxSize - @_flush() - else if @maxTime? and @_arr.length == 1 - @_timeout = setTimeout => - @_flush() - , @maxTime - ret - -module.exports = Batcher diff --git a/src/Batcher.js b/src/Batcher.js new file mode 100644 index 0000000..5b050a3 --- /dev/null +++ b/src/Batcher.js @@ -0,0 +1,45 @@ +const parser = require("./parser"); +const Events = require("./Events"); + +class Batcher { + defaults = { maxTime: null, maxSize: null }; + + constructor(options) { + this.options = options ?? {}; + parser.load(this.options, this.defaults, this); + this.Events = new Events(this); + this._arr = []; + this._resetPromise(); + this._lastFlush = Date.now(); + } + + _resetPromise() { + this._promise = new Promise((res) => { + this._resolve = res; + }); + } + + _flush() { + clearTimeout(this._timeout); + this._lastFlush = Date.now(); + this._resolve(); + this.Events.trigger("batch", this._arr); + this._arr = []; + this._resetPromise(); + } + + add(data) { + this._arr.push(data); + const existingPromise = this._promise; + if (this._arr.length === this.maxSize) { + this._flush(); + } else if (this.maxTime != null && this._arr.length === 1) { + this._timeout = setTimeout(() => { + this._flush(); + }, this.maxTime); + } + return existingPromise; + } +} + +module.exports = Batcher; diff --git a/src/Bottleneck.coffee b/src/Bottleneck.coffee deleted file mode 100644 index 37db2be..0000000 --- a/src/Bottleneck.coffee +++ /dev/null @@ -1,298 +0,0 @@ -NUM_PRIORITIES = 10 -DEFAULT_PRIORITY = 5 - -parser = require "./parser" -Queues = require "./Queues" -Job = require "./Job" -LocalDatastore = require "./LocalDatastore" -RedisDatastore = require "./RedisDatastore" -Events = require "./Events" -States = require "./States" -Sync = require "./Sync" - -class Bottleneck - Bottleneck.default = Bottleneck - Bottleneck.Events = Events - Bottleneck.version = Bottleneck::version = require("./version.json").version - Bottleneck.strategy = Bottleneck::strategy = { LEAK:1, OVERFLOW:2, OVERFLOW_PRIORITY:4, BLOCK:3 } - Bottleneck.BottleneckError = Bottleneck::BottleneckError = require "./BottleneckError" - Bottleneck.Group = Bottleneck::Group = require "./Group" - Bottleneck.RedisConnection = Bottleneck::RedisConnection = require "./RedisConnection" - Bottleneck.IORedisConnection = Bottleneck::IORedisConnection = require "./IORedisConnection" - Bottleneck.Batcher = Bottleneck::Batcher = require "./Batcher" - jobDefaults: - priority: DEFAULT_PRIORITY - weight: 1 - expiration: null - id: "" - storeDefaults: - maxConcurrent: null - minTime: 0 - highWater: null - strategy: Bottleneck::strategy.LEAK - penalty: null - reservoir: null - reservoirRefreshInterval: null - reservoirRefreshAmount: null - reservoirIncreaseInterval: null - reservoirIncreaseAmount: null - reservoirIncreaseMaximum: null - localStoreDefaults: - Promise: Promise - timeout: null - heartbeatInterval: 250 - redisStoreDefaults: - Promise: Promise - timeout: null - heartbeatInterval: 5000 - clientTimeout: 10000 - Redis: null - clientOptions: {} - clusterNodes: null - clearDatastore: false - connection: null - instanceDefaults: - datastore: "local" - connection: null - id: "" - rejectOnDrop: true - trackDoneStatus: false - Promise: Promise - stopDefaults: - enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs." - dropWaitingJobs: true - dropErrorMessage: "This limiter has been stopped." - - constructor: (options={}, invalid...) -> - @_validateOptions options, invalid - parser.load options, @instanceDefaults, @ - @_queues = new Queues NUM_PRIORITIES - @_scheduled = {} - @_states = new States ["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(if @trackDoneStatus then ["DONE"] else []) - @_limiter = null - @Events = new Events @ - @_submitLock = new Sync "submit", @Promise - @_registerLock = new Sync "register", @Promise - storeOptions = parser.load options, @storeDefaults, {} - - @_store = if @datastore == "redis" or @datastore == "ioredis" or @connection? - storeInstanceOptions = parser.load options, @redisStoreDefaults, {} - new RedisDatastore @, storeOptions, storeInstanceOptions - else if @datastore == "local" - storeInstanceOptions = parser.load options, @localStoreDefaults, {} - new LocalDatastore @, storeOptions, storeInstanceOptions - else - throw new Bottleneck::BottleneckError "Invalid datastore type: #{@datastore}" - - @_queues.on "leftzero", => @_store.heartbeat?.ref?() - @_queues.on "zero", => @_store.heartbeat?.unref?() - - _validateOptions: (options, invalid) -> - unless options? and typeof options == "object" and invalid.length == 0 - throw new Bottleneck::BottleneckError "Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1." - - ready: -> @_store.ready - - clients: -> @_store.clients - - channel: -> "b_#{@id}" - - channel_client: -> "b_#{@id}_#{@_store.clientId}" - - publish: (message) -> @_store.__publish__ message - - disconnect: (flush=true) -> @_store.__disconnect__ flush - - chain: (@_limiter) -> @ - - queued: (priority) -> @_queues.queued priority - - clusterQueued: -> @_store.__queued__() - - empty: -> @queued() == 0 and @_submitLock.isEmpty() - - running: -> @_store.__running__() - - done: -> @_store.__done__() - - jobStatus: (id) -> @_states.jobStatus id - - jobs: (status) -> @_states.statusJobs status - - counts: -> @_states.statusCounts() - - _randomIndex: -> Math.random().toString(36).slice(2) - - check: (weight=1) -> @_store.__check__ weight - - _clearGlobalState: (index) -> - if @_scheduled[index]? - clearTimeout @_scheduled[index].expiration - delete @_scheduled[index] - true - else false - - _free: (index, job, options, eventInfo) -> - try - { running } = await @_store.__free__ index, options.weight - @Events.trigger "debug", "Freed #{options.id}", eventInfo - if running == 0 and @empty() then @Events.trigger "idle" - catch e - @Events.trigger "error", e - - _run: (index, job, wait) -> - job.doRun() - clearGlobalState = @_clearGlobalState.bind @, index - run = @_run.bind @, index, job - free = @_free.bind @, index, job - - @_scheduled[index] = - timeout: setTimeout => - job.doExecute @_limiter, clearGlobalState, run, free - , wait - expiration: if job.options.expiration? then setTimeout -> - job.doExpire clearGlobalState, run, free - , wait + job.options.expiration - job: job - - _drainOne: (capacity) -> - @_registerLock.schedule => - if @queued() == 0 then return @Promise.resolve null - queue = @_queues.getFirst() - { options, args } = next = queue.first() - if capacity? and options.weight > capacity then return @Promise.resolve null - @Events.trigger "debug", "Draining #{options.id}", { args, options } - index = @_randomIndex() - @_store.__register__ index, options.weight, options.expiration - .then ({ success, wait, reservoir }) => - @Events.trigger "debug", "Drained #{options.id}", { success, args, options } - if success - queue.shift() - empty = @empty() - if empty then @Events.trigger "empty" - if reservoir == 0 then @Events.trigger "depleted", empty - @_run index, next, wait - @Promise.resolve options.weight - else - @Promise.resolve null - - _drainAll: (capacity, total=0) -> - @_drainOne(capacity) - .then (drained) => - if drained? - newCapacity = if capacity? then capacity - drained else capacity - @_drainAll(newCapacity, total + drained) - else @Promise.resolve total - .catch (e) => @Events.trigger "error", e - - _dropAllQueued: (message) -> @_queues.shiftAll (job) -> job.doDrop { message } - - stop: (options={}) -> - options = parser.load options, @stopDefaults - waitForExecuting = (at) => - finished = => - counts = @_states.counts - (counts[0] + counts[1] + counts[2] + counts[3]) == at - new @Promise (resolve, reject) => - if finished() then resolve() - else - @on "done", => - if finished() - @removeAllListeners "done" - resolve() - done = if options.dropWaitingJobs - @_run = (index, next) -> next.doDrop { message: options.dropErrorMessage } - @_drainOne = => @Promise.resolve null - @_registerLock.schedule => @_submitLock.schedule => - for k, v of @_scheduled - if @jobStatus(v.job.options.id) == "RUNNING" - clearTimeout v.timeout - clearTimeout v.expiration - v.job.doDrop { message: options.dropErrorMessage } - @_dropAllQueued options.dropErrorMessage - waitForExecuting(0) - else - @schedule { priority: NUM_PRIORITIES - 1, weight: 0 }, => waitForExecuting(1) - @_receive = (job) -> job._reject new Bottleneck::BottleneckError options.enqueueErrorMessage - @stop = => @Promise.reject new Bottleneck::BottleneckError "stop() has already been called" - done - - _addToQueue: (job) => - { args, options } = job - try - { reachedHWM, blocked, strategy } = await @_store.__submit__ @queued(), options.weight - catch error - @Events.trigger "debug", "Could not queue #{options.id}", { args, options, error } - job.doDrop { error } - return false - - if blocked - job.doDrop() - return true - else if reachedHWM - shifted = if strategy == Bottleneck::strategy.LEAK then @_queues.shiftLastFrom(options.priority) - else if strategy == Bottleneck::strategy.OVERFLOW_PRIORITY then @_queues.shiftLastFrom(options.priority + 1) - else if strategy == Bottleneck::strategy.OVERFLOW then job - if shifted? then shifted.doDrop() - if not shifted? or strategy == Bottleneck::strategy.OVERFLOW - if not shifted? then job.doDrop() - return reachedHWM - - job.doQueue reachedHWM, blocked - @_queues.push job - await @_drainAll() - reachedHWM - - _receive: (job) -> - if @_states.jobStatus(job.options.id)? - job._reject new Bottleneck::BottleneckError "A job with the same id already exists (id=#{job.options.id})" - false - else - job.doReceive() - @_submitLock.schedule @_addToQueue, job - - submit: (args...) -> - if typeof args[0] == "function" - [fn, args..., cb] = args - options = parser.load {}, @jobDefaults - else - [options, fn, args..., cb] = args - options = parser.load options, @jobDefaults - - task = (args...) => - new @Promise (resolve, reject) -> - fn args..., (args...) -> - (if args[0]? then reject else resolve) args - - job = new Job task, args, options, @jobDefaults, @rejectOnDrop, @Events, @_states, @Promise - job.promise - .then (args) -> cb? args... - .catch (args) -> if Array.isArray args then cb? args... else cb? args - @_receive job - - schedule: (args...) -> - if typeof args[0] == "function" - [task, args...] = args - options = {} - else - [options, task, args...] = args - job = new Job task, args, options, @jobDefaults, @rejectOnDrop, @Events, @_states, @Promise - @_receive job - job.promise - - wrap: (fn) -> - schedule = @schedule.bind @ - wrapped = (args...) -> schedule fn.bind(@), args... - wrapped.withOptions = (options, args...) -> schedule options, fn, args... - wrapped - - updateSettings: (options={}) -> - await @_store.__updateSettings__ parser.overwrite options, @storeDefaults - parser.overwrite options, @instanceDefaults, @ - @ - - currentReservoir: -> @_store.__currentReservoir__() - - incrementReservoir: (incr=0) -> @_store.__incrementReservoir__ incr - -module.exports = Bottleneck diff --git a/src/Bottleneck.js b/src/Bottleneck.js new file mode 100644 index 0000000..e23dd88 --- /dev/null +++ b/src/Bottleneck.js @@ -0,0 +1,480 @@ +const NUM_PRIORITIES = 10; +const DEFAULT_PRIORITY = 5; + +const parser = require("./parser"); +const Queues = require("./Queues"); +const Job = require("./Job"); +const LocalDatastore = require("./LocalDatastore"); +const RedisDatastore = require("./RedisDatastore"); +const Events = require("./Events"); +const States = require("./States"); +const Sync = require("./Sync"); +const BottleneckError = require("./BottleneckError"); +const Group = require("./Group"); +const RedisConnection = require("./RedisConnection"); +const IORedisConnection = require("./IORedisConnection"); +const Batcher = require("./Batcher"); +const version = require("../package.json").version; +class Bottleneck { + static BottleneckError = BottleneckError; + static Group = Group; + static RedisConnection = RedisConnection; + static IORedisConnection = IORedisConnection; + static Batcher = Batcher; + static Events = Events; + static strategy = { + LEAK: 1, + OVERFLOW: 2, + OVERFLOW_PRIORITY: 4, + BLOCK: 3, + }; + + version = version; + jobDefaults = { + priority: DEFAULT_PRIORITY, + weight: 1, + expiration: null, + id: "", + }; + storeDefaults = { + maxConcurrent: null, + minTime: 0, + highWater: null, + strategy: Bottleneck.strategy.LEAK, + penalty: null, + reservoir: null, + reservoirRefreshInterval: null, + reservoirRefreshAmount: null, + reservoirIncreaseInterval: null, + reservoirIncreaseAmount: null, + reservoirIncreaseMaximum: null, + }; + localStoreDefaults = { + Promise, + timeout: null, + heartbeatInterval: 250, + }; + redisStoreDefaults = { + Promise, + timeout: null, + heartbeatInterval: 5000, + clientTimeout: 10000, + Redis: null, + clientOptions: {}, + clusterNodes: null, + clearDatastore: false, + connection: null, + }; + instanceDefaults = { + datastore: "local", + connection: null, + id: "", + rejectOnDrop: true, + trackDoneStatus: false, + Promise, + }; + stopDefaults = { + enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.", + dropWaitingJobs: true, + dropErrorMessage: "This limiter has been stopped.", + }; + + constructor(options, ...invalid) { + this._addToQueue = this._addToQueue.bind(this); + options ??= {}; + this._validateOptions(options, invalid); + parser.load(options, this.instanceDefaults, this); + this._queues = new Queues(NUM_PRIORITIES); + this._scheduled = {}; + this._states = new States( + ["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : []), + ); + this._limiter = null; + this.Events = new Events(this); + this._submitLock = new Sync("submit"); + this._registerLock = new Sync("register"); + const storeOptions = parser.load(options, this.storeDefaults, {}); + + if (this.datastore === "redis" || this.datastore === "ioredis" || this.connection != null) { + const opts = parser.load(options, this.redisStoreDefaults, {}); + this._store = new RedisDatastore(this, storeOptions, opts); + } else if (this.datastore === "local") { + const opts = parser.load(options, this.localStoreDefaults, {}); + this._store = new LocalDatastore(this, storeOptions, opts); + } else { + throw new BottleneckError(`Invalid datastore type: ${this.datastore}`); + } + + this._queues.on("leftzero", () => this._store.heartbeat?.ref?.()); + this._queues.on("zero", () => this._store.heartbeat?.unref?.()); + } + + _validateOptions(options, invalid) { + if (options == null || typeof options !== "object" || invalid.length !== 0) { + throw new BottleneckError( + "Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1.", + ); + } + } + + ready() { + return this._store.ready; + } + + clients() { + return this._store.clients; + } + + channel() { + return `b_${this.id}`; + } + + channel_client() { + return `b_${this.id}_${this._store.clientId}`; + } + + publish(message) { + return this._store.__publish__(message); + } + + async disconnect(flush = true) { + await this._store.__disconnect__(flush); + } + + chain(_limiter) { + this._limiter = _limiter; + return this; + } + + queued(priority) { + return this._queues.queued(priority); + } + + clusterQueued() { + return this._store.__queued__(); + } + + empty() { + return this.queued() === 0 && this._submitLock.isEmpty(); + } + + running() { + return this._store.__running__(); + } + + done() { + return this._store.__done__(); + } + + jobStatus(id) { + return this._states.jobStatus(id); + } + + jobs(status) { + return this._states.statusJobs(status); + } + + counts() { + return this._states.statusCounts(); + } + + _randomIndex() { + return Math.random().toString(36).slice(2); + } + + check(weight = 1) { + return this._store.__check__(weight); + } + + _clearGlobalState(index) { + if (this._scheduled[index] != null) { + clearTimeout(this._scheduled[index].expiration); + delete this._scheduled[index]; + return true; + } else { + return false; + } + } + + async _free(index, job, options, eventInfo) { + try { + const { running } = await this._store.__free__(index, options.weight); + this.Events.trigger("debug", `Freed ${options.id}`, eventInfo); + if (running === 0 && this.empty()) { + return this.Events.trigger("idle"); + } + } catch (e) { + return this.Events.trigger("error", e); + } + } + + _run(index, job, wait) { + job.doRun(); + const clearGlobalState = this._clearGlobalState.bind(this, index); + const run = this._run.bind(this, index, job); + const free = this._free.bind(this, index, job); + + return (this._scheduled[index] = { + timeout: setTimeout(() => { + return job.doExecute(this._limiter, clearGlobalState, run, free); + }, wait), + expiration: + job.options.expiration != null + ? setTimeout( + () => job.doExpire(clearGlobalState, run, free), + wait + job.options.expiration, + ) + : undefined, + job, + }); + } + + async _drainOne(capacity) { + return this._registerLock.schedule(async () => { + let next; + if (this.queued() === 0) { + return null; + } + const queue = this._queues.getFirst(); + const { options, args } = (next = queue.first()); + if (capacity != null && options.weight > capacity) { + return null; + } + this.Events.trigger("debug", `Draining ${options.id}`, { args, options }); + const index = this._randomIndex(); + + const { success, wait, reservoir } = await this._store.__register__( + index, + options.weight, + options.expiration, + ); + + this.Events.trigger("debug", `Drained ${options.id}`, { success, args, options }); + + if (success) { + queue.shift(); + const empty = this.empty(); + if (empty) { + this.Events.trigger("empty"); + } + if (reservoir === 0) { + this.Events.trigger("depleted", empty); + } + this._run(index, next, wait); + return options.weight; + } else { + return null; + } + }); + } + + async _drainAll(capacity, total = 0) { + try { + const drained = await this._drainOne(capacity); + if (drained != null) { + const newCapacity = capacity != null ? capacity - drained : capacity; + return this._drainAll(newCapacity, total + drained); + } else { + return total; + } + } catch (e) { + this.Events.trigger("error", e); + } + } + + _dropAllQueued(message) { + return this._queues.shiftAll((job) => job.doDrop({ message })); + } + + stop(options) { + options ??= {}; + options = parser.load(options, this.stopDefaults); + + const waitForExecuting = (at) => { + const finished = () => { + const { counts } = this._states; + return counts[0] + counts[1] + counts[2] + counts[3] === at; + }; + return new Promise((resolve) => { + if (finished()) { + resolve(); + } else { + this.on("done", () => { + if (finished()) { + this.removeAllListeners("done"); + resolve(); + } + }); + } + }); + }; + + let done; + if (options.dropWaitingJobs) { + this._run = (index, next) => next.doDrop({ message: options.dropErrorMessage }); + this._drainOne = () => this.Promise.resolve(null); + done = this._registerLock.schedule(() => + this._submitLock.schedule(() => { + for (const v of Object.values(this._scheduled)) { + if (this.jobStatus(v.job.options.id) === "RUNNING") { + clearTimeout(v.timeout); + clearTimeout(v.expiration); + v.job.doDrop({ message: options.dropErrorMessage }); + } + } + this._dropAllQueued(options.dropErrorMessage); + return waitForExecuting(0); + }), + ); + } else { + done = this.schedule({ priority: NUM_PRIORITIES - 1, weight: 0 }, () => waitForExecuting(1)); + } + + this._receive = (job) => job._reject(new BottleneckError(options.enqueueErrorMessage)); + this.stop = () => this.Promise.reject(new BottleneckError("stop() has already been called")); + + return done; + } + + async _addToQueue(job) { + let blocked, reachedHWM, strategy; + const { args, options } = job; + try { + ({ reachedHWM, blocked, strategy } = await this._store.__submit__( + this.queued(), + options.weight, + )); + } catch (error) { + this.Events.trigger("debug", `Could not queue ${options.id}`, { args, options, error }); + job.doDrop({ error }); + return false; + } + + if (blocked) { + job.doDrop(); + return true; + } else if (reachedHWM) { + let shifted; + if (strategy === Bottleneck.strategy.LEAK) { + shifted = this._queues.shiftLastFrom(options.priority); + } else if (strategy === Bottleneck.strategy.OVERFLOW_PRIORITY) { + shifted = this._queues.shiftLastFrom(options.priority + 1); + } else if (strategy === Bottleneck.strategy.OVERFLOW) { + shifted = job; + } + if (shifted != null) { + shifted.doDrop(); + } + if (shifted == null || strategy === Bottleneck.strategy.OVERFLOW) { + if (shifted == null) { + job.doDrop(); + } + return reachedHWM; + } + } + + job.doQueue(reachedHWM, blocked); + this._queues.push(job); + await this._drainAll(); + return reachedHWM; + } + + _receive(job) { + if (this._states.jobStatus(job.options.id) != null) { + job._reject( + new BottleneckError(`A job with the same id already exists (id=${job.options.id})`), + ); + return false; + } else { + job.doReceive(); + return this._submitLock.schedule(this._addToQueue, job); + } + } + + submit(...args) { + let cb, fn, options; + if (typeof args[0] === "function") { + cb = args.pop(); + [fn, ...args] = args; + options = parser.load({}, this.jobDefaults); + } else { + cb = args.pop(); + [options, fn, ...args] = args; + options = parser.load(options, this.jobDefaults); + } + + const task = (...args) => { + return new Promise((resolve, reject) => + fn(...args, (...args) => (args[0] != null ? reject : resolve)(args)), + ); + }; + + const job = new Job( + task, + args, + options, + this.jobDefaults, + this.rejectOnDrop, + this.Events, + this._states, + ); + job.promise + .then((args) => (typeof cb === "function" ? cb(...(args || [])) : undefined)) + .catch(function (args) { + if (Array.isArray(args)) { + return typeof cb === "function" ? cb(...args) : undefined; + } else { + return typeof cb === "function" ? cb(args) : undefined; + } + }); + return this._receive(job); + } + + schedule(...args) { + let options, task; + if (typeof args[0] === "function") { + [task, ...args] = args; + options = {}; + } else { + [options, task, ...args] = args; + } + const job = new Job( + task, + args, + options, + this.jobDefaults, + this.rejectOnDrop, + this.Events, + this._states, + ); + this._receive(job); + return job.promise; + } + + wrap(fn) { + const schedule = this.schedule.bind(this); + const wrapped = function (...args) { + return schedule(fn.bind(this), ...args); + }; + wrapped.withOptions = (options, ...args) => schedule(options, fn, ...args); + return wrapped; + } + + async updateSettings(options) { + options ??= {}; + await this._store.__updateSettings__(parser.overwrite(options, this.storeDefaults)); + parser.overwrite(options, this.instanceDefaults, this); + return this; + } + + currentReservoir() { + return this._store.__currentReservoir__(); + } + + incrementReservoir(incr = 0) { + return this._store.__incrementReservoir__(incr); + } +} + +module.exports = Bottleneck; +module.exports.default = Bottleneck; diff --git a/src/BottleneckError.coffee b/src/BottleneckError.coffee deleted file mode 100644 index 157b8ac..0000000 --- a/src/BottleneckError.coffee +++ /dev/null @@ -1,3 +0,0 @@ -class BottleneckError extends Error - -module.exports = BottleneckError diff --git a/src/BottleneckError.js b/src/BottleneckError.js new file mode 100644 index 0000000..640d2dd --- /dev/null +++ b/src/BottleneckError.js @@ -0,0 +1,3 @@ +class BottleneckError extends Error {} + +module.exports = BottleneckError; diff --git a/src/DLList.coffee b/src/DLList.coffee deleted file mode 100644 index 9dded30..0000000 --- a/src/DLList.coffee +++ /dev/null @@ -1,38 +0,0 @@ -class DLList - constructor: (@incr, @decr) -> - @_first = null - @_last = null - @length = 0 - push: (value) -> - @length++ - @incr?() - node = { value, prev: @_last, next: null } - if @_last? - @_last.next = node - @_last = node - else @_first = @_last = node - undefined - shift: () -> - if not @_first? then return - else - @length-- - @decr?() - value = @_first.value - if (@_first = @_first.next)? - @_first.prev = null - else - @_last = null - value - first: () -> if @_first? then @_first.value - getArray: () -> - node = @_first - while node? then (ref = node; node = node.next; ref.value) - forEachShift: (cb) -> - node = @shift() - while node? then (cb node; node = @shift()) - undefined - debug: () -> - node = @_first - while node? then (ref = node; node = node.next; { value: ref.value, prev: ref.prev?.value, next: ref.next?.value }) - -module.exports = DLList diff --git a/src/DLList.js b/src/DLList.js new file mode 100644 index 0000000..913e9f6 --- /dev/null +++ b/src/DLList.js @@ -0,0 +1,73 @@ +class DLList { + constructor(incr, decr) { + this.incr = incr; + this.decr = decr; + this._first = null; + this._last = null; + this.length = 0; + } + push(value) { + this.length++; + this.incr?.(); + const node = { value, prev: this._last, next: null }; + if (this._last != null) { + this._last.next = node; + this._last = node; + } else { + this._first = this._last = node; + } + } + shift() { + if (this._first == null) { + return; + } else { + this.length--; + this.decr?.(); + } + const { value } = this._first; + if ((this._first = this._first.next) != null) { + this._first.prev = null; + } else { + this._last = null; + } + return value; + } + first() { + return this._first?.value; + } + getArray() { + let node = this._first; + const result = []; + while (node != null) { + var ref; + result.push(((ref = node), (node = node.next), ref.value)); + } + return result; + } + forEachShift(cb) { + let node = this.shift(); + while (node != null) { + cb(node); + node = this.shift(); + } + } + debug() { + let node = this._first; + const result = []; + while (node != null) { + var ref; + result.push( + ((ref = node), + (node = node.next), + { + value: ref.value, + prev: ref.prev?.value, + next: ref.next?.value, + }), + ); + } + return result; + } +} + +module.exports = DLList; diff --git a/src/Events.coffee b/src/Events.coffee deleted file mode 100644 index c96b31a..0000000 --- a/src/Events.coffee +++ /dev/null @@ -1,38 +0,0 @@ -class Events - constructor: (@instance) -> - @_events = {} - if @instance.on? or @instance.once? or @instance.removeAllListeners? - throw new Error "An Emitter already exists for this object" - @instance.on = (name, cb) => @_addListener name, "many", cb - @instance.once = (name, cb) => @_addListener name, "once", cb - @instance.removeAllListeners = (name=null) => - if name? then delete @_events[name] else @_events = {} - _addListener: (name, status, cb) -> - @_events[name] ?= [] - @_events[name].push {cb, status} - @instance - listenerCount: (name) -> - if @_events[name]? then @_events[name].length else 0 - trigger: (name, args...) -> - try - if name != "debug" then @trigger "debug", "Event triggered: #{name}", args - return unless @_events[name]? - @_events[name] = @_events[name].filter (listener) -> listener.status != "none" - promises = @_events[name].map (listener) => - return if listener.status == "none" - if listener.status == "once" then listener.status = "none" - try - returned = listener.cb?(args...) - if typeof returned?.then == "function" - await returned - else - returned - catch e - if "name" != "error" then @trigger "error", e - null - (await Promise.all promises).find (x) -> x? - catch e - if "name" != "error" then @trigger "error", e - null - -module.exports = Events diff --git a/src/Events.js b/src/Events.js new file mode 100644 index 0000000..79844fa --- /dev/null +++ b/src/Events.js @@ -0,0 +1,62 @@ +class Events { + constructor(instance) { + this.instance = instance; + this._events = {}; + if ( + this.instance.on != null || + this.instance.once != null || + this.instance.removeAllListeners != null + ) { + throw new Error("An Emitter already exists for this object"); + } + this.instance.on = (name, cb) => this._addListener(name, "many", cb); + this.instance.once = (name, cb) => this._addListener(name, "once", cb); + this.instance.removeAllListeners = (name = null) => { + if (name != null) { + delete this._events[name]; + } else { + this._events = {}; + } + }; + } + _addListener(name, status, cb) { + this._events[name] ??= []; + this._events[name].push({ cb, status }); + return this.instance; + } + listenerCount(name) { + return this._events[name]?.length ?? 0; + } + async trigger(name, ...args) { + try { + if (name !== "debug") { + this.trigger("debug", `Event triggered: ${name}`, args); + } + + if (this._events[name] == null) return; + + this._events[name] = this._events[name].filter((listener) => listener.status !== "none"); + const allEvents = await Promise.all( + this._events[name].map(async (listener) => { + if (listener.status === "once") listener.status = "none"; + try { + return typeof listener.cb === "function" ? listener.cb(...(args || [])) : undefined; + } catch (e) { + if (name !== "error") this.trigger("error", e); + return null; + } + }), + ); + + return allEvents.find((x) => x != null); + } catch (error) { + const e = error; + if (name !== "error") { + this.trigger("error", e); + } + return null; + } + } +} + +module.exports = Events; diff --git a/src/Group.coffee b/src/Group.coffee deleted file mode 100644 index 210b502..0000000 --- a/src/Group.coffee +++ /dev/null @@ -1,80 +0,0 @@ -parser = require "./parser" -Events = require "./Events" -RedisConnection = require "./RedisConnection" -IORedisConnection = require "./IORedisConnection" -Scripts = require "./Scripts" - -class Group - defaults: - timeout: 1000 * 60 * 5 - connection: null - Promise: Promise - id: "group-key" - - constructor: (@limiterOptions={}) -> - parser.load @limiterOptions, @defaults, @ - @Events = new Events @ - @instances = {} - @Bottleneck = require "./Bottleneck" - @_startAutoCleanup() - @sharedConnection = @connection? - - if !@connection? - if @limiterOptions.datastore == "redis" - @connection = new RedisConnection Object.assign {}, @limiterOptions, { @Events } - else if @limiterOptions.datastore == "ioredis" - @connection = new IORedisConnection Object.assign {}, @limiterOptions, { @Events } - - key: (key="") -> @instances[key] ? do => - limiter = @instances[key] = new @Bottleneck Object.assign @limiterOptions, { - id: "#{@id}-#{key}", - @timeout, - @connection - } - @Events.trigger "created", limiter, key - limiter - - deleteKey: (key="") => - instance = @instances[key] - if @connection - deleted = await @connection.__runCommand__ ['del', Scripts.allKeys("#{@id}-#{key}")...] - if instance? - delete @instances[key] - await instance.disconnect() - instance? or deleted > 0 - - limiters: -> { key: k, limiter: v } for k, v of @instances - - keys: -> Object.keys @instances - - clusterKeys: -> - if !@connection? then return @Promise.resolve @keys() - keys = [] - cursor = null - start = "b_#{@id}-".length - end = "_settings".length - until cursor == 0 - [next, found] = await @connection.__runCommand__ ["scan", (cursor ? 0), "match", "b_#{@id}-*_settings", "count", 10000] - cursor = ~~next - keys.push(k.slice(start, -end)) for k in found - keys - - _startAutoCleanup: -> - clearInterval @interval - (@interval = setInterval => - time = Date.now() - for k, v of @instances - try if await v._store.__groupCheck__(time) then @deleteKey k - catch e then v.Events.trigger "error", e - , (@timeout / 2)).unref?() - - updateSettings: (options={}) -> - parser.overwrite options, @defaults, @ - parser.overwrite options, options, @limiterOptions - @_startAutoCleanup() if options.timeout? - - disconnect: (flush=true) -> - if !@sharedConnection - @connection?.disconnect flush - -module.exports = Group diff --git a/src/Group.js b/src/Group.js new file mode 100644 index 0000000..5764bab --- /dev/null +++ b/src/Group.js @@ -0,0 +1,135 @@ +const parser = require("./parser"); +const Events = require("./Events"); +const RedisConnection = require("./RedisConnection"); +const IORedisConnection = require("./IORedisConnection"); +const Scripts = require("./Scripts"); + +class Group { + defaults = { + timeout: 1000 * 60 * 5, + connection: null, + id: "group-key", + }; + + constructor(limiterOptions) { + this.deleteKey = this.deleteKey.bind(this); + this.limiterOptions = limiterOptions ?? {}; + parser.load(this.limiterOptions, this.defaults, this); + this.Events = new Events(this); + this.instances = {}; + this._startAutoCleanup(); + this.sharedConnection = this.connection != null; + this.Bottleneck = require("./Bottleneck"); + + if (this.connection == null) { + if (this.limiterOptions.datastore === "redis") { + this.connection = new RedisConnection( + Object.assign({}, this.limiterOptions, { Events: this.Events }), + ); + } else if (this.limiterOptions.datastore === "ioredis") { + this.connection = new IORedisConnection( + Object.assign({}, this.limiterOptions, { Events: this.Events }), + ); + } + } + } + + key(key = "") { + let limiter = this.instances[key]; + if (!limiter) { + limiter = new this.Bottleneck( + Object.assign(this.limiterOptions, { + id: `${this.id}-${key}`, + timeout: this.timeout, + connection: this.connection, + }), + ); + this.Events.trigger("created", limiter, key); + this.instances[key] = limiter; + } + return limiter; + } + + async deleteKey(key = "") { + let deleted; + const instance = this.instances[key]; + if (this.connection) { + deleted = await this.connection.__runCommand__([ + "del", + ...Scripts.allKeys(`${this.id}-${key}`), + ]); + } + if (instance != null) { + delete this.instances[key]; + await instance.disconnect(); + } + return instance != null || deleted > 0; + } + + limiters() { + return Object.entries(this.instances).map(([key, limiter]) => ({ key, limiter })); + } + + keys() { + return Object.keys(this.instances); + } + + async clusterKeys() { + if (this.connection == null) { + return Promise.resolve(this.keys()); + } + const keys = []; + let cursor = null; + const start = `b_${this.id}-`.length; + const end = "_settings".length; + while (cursor !== 0) { + const [next, found] = await this.connection.__runCommand__([ + "scan", + cursor ?? 0, + "match", + `b_${this.id}-*_settings`, + "count", + 10000, + ]); + cursor = ~~next; + for (const k of found) { + keys.push(k.slice(start, -end)); + } + } + return keys; + } + + _startAutoCleanup() { + clearInterval(this.interval); + + this.interval = setInterval(async () => { + const time = Date.now(); + for (const [k, v] of Object.entries(this.instances)) { + try { + if (await v._store.__groupCheck__(time)) { + this.deleteKey(k); + } + } catch (e) { + v.Events.trigger("error", e); + } + } + }, this.timeout / 2).unref?.(); + } + + updateSettings(options) { + options ??= {}; + parser.overwrite(options, this.defaults, this); + parser.overwrite(options, options, this.limiterOptions); + if (options.timeout != null) { + return this._startAutoCleanup(); + } + } + + disconnect(flush = true) { + if (!this.sharedConnection) { + return this.connection?.disconnect(flush); + } + } +} + +module.exports = Group; diff --git a/src/IORedisConnection.coffee b/src/IORedisConnection.coffee deleted file mode 100644 index 211b124..0000000 --- a/src/IORedisConnection.coffee +++ /dev/null @@ -1,84 +0,0 @@ -parser = require "./parser" -Events = require "./Events" -Scripts = require "./Scripts" - -class IORedisConnection - datastore: "ioredis" - defaults: - Redis: null - clientOptions: {} - clusterNodes: null - client: null - Promise: Promise - Events: null - - constructor: (options={}) -> - parser.load options, @defaults, @ - @Redis ?= eval("require")("ioredis") # Obfuscated or else Webpack/Angular will try to inline the optional ioredis module. To override this behavior: pass the ioredis module to Bottleneck as the 'Redis' option. - @Events ?= new Events @ - @terminated = false - - if @clusterNodes? - @client = new @Redis.Cluster @clusterNodes, @clientOptions - @subscriber = new @Redis.Cluster @clusterNodes, @clientOptions - else if @client? and !@client.duplicate? - @subscriber = new @Redis.Cluster @client.startupNodes, @client.options - else - @client ?= new @Redis @clientOptions - @subscriber = @client.duplicate() - @limiters = {} - - @ready = @Promise.all [@_setup(@client, false), @_setup(@subscriber, true)] - .then => - @_loadScripts() - { @client, @subscriber } - - _setup: (client, sub) -> - client.setMaxListeners 0 - new @Promise (resolve, reject) => - client.on "error", (e) => @Events.trigger "error", e - if sub - client.on "message", (channel, message) => - @limiters[channel]?._store.onMessage channel, message - if client.status == "ready" then resolve() - else client.once "ready", resolve - - _loadScripts: -> Scripts.names.forEach (name) => @client.defineCommand name, { lua: Scripts.payload(name) } - - __runCommand__: (cmd) -> - await @ready - [[_, deleted]] = await @client.pipeline([cmd]).exec() - deleted - - __addLimiter__: (instance) -> - @Promise.all [instance.channel(), instance.channel_client()].map (channel) => - new @Promise (resolve, reject) => - @subscriber.subscribe channel, => - @limiters[channel] = instance - resolve() - - __removeLimiter__: (instance) -> - [instance.channel(), instance.channel_client()].forEach (channel) => - await @subscriber.unsubscribe channel unless @terminated - delete @limiters[channel] - - __scriptArgs__: (name, id, args, cb) -> - keys = Scripts.keys name, id - [keys.length].concat keys, args, cb - - __scriptFn__: (name) -> - @client[name].bind(@client) - - disconnect: (flush=true) -> - clearInterval(@limiters[k]._store.heartbeat) for k in Object.keys @limiters - @limiters = {} - @terminated = true - - if flush - @Promise.all [@client.quit(), @subscriber.quit()] - else - @client.disconnect() - @subscriber.disconnect() - @Promise.resolve() - -module.exports = IORedisConnection diff --git a/src/IORedisConnection.js b/src/IORedisConnection.js new file mode 100644 index 0000000..2e71113 --- /dev/null +++ b/src/IORedisConnection.js @@ -0,0 +1,122 @@ +const parser = require("./parser"); +const Events = require("./Events"); +const Scripts = require("./Scripts"); + +class IORedisConnection { + datastore = "ioredis"; + defaults = { + Redis: null, + clientOptions: {}, + clusterNodes: null, + client: null, + Events: null, + }; + + constructor(options) { + options ??= {}; + parser.load(options, this.defaults, this); + + // Obfuscated or else Webpack/Angular will try to inline the optional ioredis module. To override this behavior: pass the ioredis module to Bottleneck as the 'Redis' option. + this.Redis ??= eval("require")("ioredis"); + this.Events ??= new Events(this); + this.terminated = false; + + if (this.clusterNodes != null) { + this.client = new this.Redis.Cluster(this.clusterNodes, this.clientOptions); + this.subscriber = new this.Redis.Cluster(this.clusterNodes, this.clientOptions); + } else if (this.client != null && this.client.duplicate == null) { + this.subscriber = new this.Redis.Cluster(this.client.startupNodes, this.client.options); + } else { + this.client ??= new this.Redis(this.clientOptions); + this.subscriber = this.client.duplicate(); + } + this.limiters = {}; + + this.ready = Promise.all([ + this._setup(this.client, false), + this._setup(this.subscriber, true), + ]).then(() => { + this._loadScripts(); + return { client: this.client, subscriber: this.subscriber }; + }); + } + + _setup(client, sub) { + client.setMaxListeners(0); + return new Promise((resolve) => { + client.on("error", (e) => this.Events.trigger("error", e)); + if (sub) { + client.on("message", (channel, message) => { + this.limiters[channel]?._store.onMessage(channel, message); + }); + } + if (client.status === "ready") { + resolve(); + } else { + client.once("ready", resolve); + } + }); + } + + _loadScripts() { + return Scripts.names.forEach((name) => + this.client.defineCommand(name, { lua: Scripts.payload(name) }), + ); + } + + async __runCommand__(cmd) { + await this.ready; + const [[, deleted]] = await this.client.pipeline([cmd]).exec(); + return deleted; + } + + async __addLimiter__(instance) { + await Promise.all( + [instance.channel(), instance.channel_client()].map((channel) => { + return new Promise((resolve) => { + this.subscriber.subscribe(channel, () => { + this.limiters[channel] = instance; + resolve(); + }); + }); + }), + ); + } + + async __removeLimiter__(instance) { + await Promise.all( + [instance.channel(), instance.channel_client()].map(async (channel) => { + if (!this.terminated) { + await this.subscriber.unsubscribe(channel); + } + delete this.limiters[channel]; + }), + ); + } + + __scriptArgs__(name, id, args, cb) { + const keys = Scripts.keys(name, id); + return [keys.length].concat(keys, args, cb); + } + + __scriptFn__(name) { + return this.client[name].bind(this.client); + } + + async disconnect(flush = true) { + for (const v of Object.values(this.limiters)) { + clearInterval(v._store.heartbeat); + } + this.limiters = {}; + this.terminated = true; + + if (flush) { + await Promise.all([this.client.quit(), this.subscriber.quit()]); + } else { + this.client.disconnect(); + this.subscriber.disconnect(); + } + } +} + +module.exports = IORedisConnection; diff --git a/src/Job.coffee b/src/Job.coffee deleted file mode 100644 index 32cf1bc..0000000 --- a/src/Job.coffee +++ /dev/null @@ -1,98 +0,0 @@ -NUM_PRIORITIES = 10 -DEFAULT_PRIORITY = 5 - -parser = require "./parser" -BottleneckError = require "./BottleneckError" - -class Job - constructor: (@task, @args, options, jobDefaults, @rejectOnDrop, @Events, @_states, @Promise) -> - @options = parser.load options, jobDefaults - @options.priority = @_sanitizePriority @options.priority - if @options.id == jobDefaults.id then @options.id = "#{@options.id}-#{@_randomIndex()}" - @promise = new @Promise (@_resolve, @_reject) => - @retryCount = 0 - - _sanitizePriority: (priority) -> - sProperty = if ~~priority != priority then DEFAULT_PRIORITY else priority - if sProperty < 0 then 0 else if sProperty > NUM_PRIORITIES-1 then NUM_PRIORITIES-1 else sProperty - - _randomIndex: -> Math.random().toString(36).slice(2) - - doDrop: ({ error, message="This job has been dropped by Bottleneck" } = {}) -> - if @_states.remove @options.id - if @rejectOnDrop then @_reject (error ? new BottleneckError message) - @Events.trigger "dropped", { @args, @options, @task, @promise } - true - else - false - - _assertStatus: (expected) -> - status = @_states.jobStatus @options.id - if not (status == expected or (expected == "DONE" and status == null)) - throw new BottleneckError "Invalid job status #{status}, expected #{expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues" - - doReceive: () -> - @_states.start @options.id - @Events.trigger "received", { @args, @options } - - doQueue: (reachedHWM, blocked) -> - @_assertStatus "RECEIVED" - @_states.next @options.id - @Events.trigger "queued", { @args, @options, reachedHWM, blocked } - - doRun: () -> - if @retryCount == 0 - @_assertStatus "QUEUED" - @_states.next @options.id - else @_assertStatus "EXECUTING" - @Events.trigger "scheduled", { @args, @options } - - doExecute: (chained, clearGlobalState, run, free) -> - if @retryCount == 0 - @_assertStatus "RUNNING" - @_states.next @options.id - else @_assertStatus "EXECUTING" - eventInfo = { @args, @options, @retryCount } - @Events.trigger "executing", eventInfo - - try - passed = await if chained? - chained.schedule @options, @task, @args... - else @task @args... - - if clearGlobalState() - @doDone eventInfo - await free @options, eventInfo - @_assertStatus "DONE" - @_resolve passed - catch error - @_onFailure error, eventInfo, clearGlobalState, run, free - - doExpire: (clearGlobalState, run, free) -> - if @_states.jobStatus @options.id == "RUNNING" - @_states.next @options.id - @_assertStatus "EXECUTING" - eventInfo = { @args, @options, @retryCount } - error = new BottleneckError "This job timed out after #{@options.expiration} ms." - @_onFailure error, eventInfo, clearGlobalState, run, free - - _onFailure: (error, eventInfo, clearGlobalState, run, free) -> - if clearGlobalState() - retry = await @Events.trigger "failed", error, eventInfo - if retry? - retryAfter = ~~retry - @Events.trigger "retry", "Retrying #{@options.id} after #{retryAfter} ms", eventInfo - @retryCount++ - run retryAfter - else - @doDone eventInfo - await free @options, eventInfo - @_assertStatus "DONE" - @_reject error - - doDone: (eventInfo) -> - @_assertStatus "EXECUTING" - @_states.next @options.id - @Events.trigger "done", eventInfo - -module.exports = Job diff --git a/src/Job.js b/src/Job.js new file mode 100644 index 0000000..330b4ba --- /dev/null +++ b/src/Job.js @@ -0,0 +1,158 @@ +const NUM_PRIORITIES = 10; +const DEFAULT_PRIORITY = 5; + +const parser = require("./parser"); +const BottleneckError = require("./BottleneckError"); + +class Job { + constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states) { + this.task = task; + this.args = args; + this.rejectOnDrop = rejectOnDrop; + this.Events = Events; + this._states = _states; + this.options = parser.load(options, jobDefaults); + this.options.priority = this._sanitizePriority(this.options.priority); + if (this.options.id === jobDefaults.id) { + this.options.id = `${this.options.id}-${this._randomIndex()}`; + } + this.promise = new Promise((_resolve, _reject) => { + this._resolve = _resolve; + this._reject = _reject; + }); + this.retryCount = 0; + } + + _sanitizePriority(priority) { + const sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; + if (sProperty < 0) { + return 0; + } else if (sProperty > NUM_PRIORITIES - 1) { + return NUM_PRIORITIES - 1; + } else { + return sProperty; + } + } + + _randomIndex() { + return Math.random().toString(36).slice(2); + } + + doDrop(params) { + const { error, message = "This job has been dropped by Bottleneck" } = params || {}; + if (this._states.remove(this.options.id)) { + if (this.rejectOnDrop) { + this._reject(error ?? new BottleneckError(message)); + } + this.Events.trigger("dropped", { + args: this.args, + options: this.options, + task: this.task, + promise: this.promise, + }); + return true; + } else { + return false; + } + } + + _assertStatus(expected) { + const status = this._states.jobStatus(this.options.id); + if (!(status === expected || (expected === "DONE" && status === null))) { + throw new BottleneckError( + `Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`, + ); + } + } + + doReceive() { + this._states.start(this.options.id); + return this.Events.trigger("received", { args: this.args, options: this.options }); + } + + doQueue(reachedHWM, blocked) { + this._assertStatus("RECEIVED"); + this._states.next(this.options.id); + return this.Events.trigger("queued", { + args: this.args, + options: this.options, + reachedHWM, + blocked, + }); + } + + doRun() { + if (this.retryCount === 0) { + this._assertStatus("QUEUED"); + this._states.next(this.options.id); + } else { + this._assertStatus("EXECUTING"); + } + return this.Events.trigger("scheduled", { args: this.args, options: this.options }); + } + + async doExecute(chained, clearGlobalState, run, free) { + if (this.retryCount === 0) { + this._assertStatus("RUNNING"); + this._states.next(this.options.id); + } else { + this._assertStatus("EXECUTING"); + } + const eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; + this.Events.trigger("executing", eventInfo); + + try { + const passed = await (chained != null + ? chained.schedule(this.options, this.task, ...this.args) + : this.task(...(this.args || []))); + + if (clearGlobalState()) { + this.doDone(eventInfo); + await free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._resolve(passed); + } + } catch (error) { + return this._onFailure(error, eventInfo, clearGlobalState, run, free); + } + } + + doExpire(clearGlobalState, run, free) { + if (this._states.jobStatus(this.options.id === "RUNNING")) { + this._states.next(this.options.id); + } + this._assertStatus("EXECUTING"); + const eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; + const error = new BottleneckError(`This job timed out after ${this.options.expiration} ms.`); + return this._onFailure(error, eventInfo, clearGlobalState, run, free); + } + + async _onFailure(error, eventInfo, clearGlobalState, run, free) { + if (clearGlobalState()) { + const retry = await this.Events.trigger("failed", error, eventInfo); + if (retry != null) { + const retryAfter = ~~retry; + this.Events.trigger( + "retry", + `Retrying ${this.options.id} after ${retryAfter} ms`, + eventInfo, + ); + this.retryCount++; + return run(retryAfter); + } else { + this.doDone(eventInfo); + await free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._reject(error); + } + } + } + + doDone(eventInfo) { + this._assertStatus("EXECUTING"); + this._states.next(this.options.id); + return this.Events.trigger("done", eventInfo); + } +} + +module.exports = Job; diff --git a/src/LocalDatastore.coffee b/src/LocalDatastore.coffee deleted file mode 100644 index d843b36..0000000 --- a/src/LocalDatastore.coffee +++ /dev/null @@ -1,141 +0,0 @@ -parser = require "./parser" -BottleneckError = require "./BottleneckError" - -class LocalDatastore - constructor: (@instance, @storeOptions, storeInstanceOptions) -> - @clientId = @instance._randomIndex() - parser.load storeInstanceOptions, storeInstanceOptions, @ - @_nextRequest = @_lastReservoirRefresh = @_lastReservoirIncrease = Date.now() - @_running = 0 - @_done = 0 - @_unblockTime = 0 - @ready = @Promise.resolve() - @clients = {} - @_startHeartbeat() - - _startHeartbeat: -> - if @heartbeat? then clearInterval @heartbeat - - if (( - @storeOptions.reservoirRefreshInterval? and @storeOptions.reservoirRefreshAmount? - ) or ( - @storeOptions.reservoirIncreaseInterval? and @storeOptions.reservoirIncreaseAmount? - )) - (@heartbeat = setInterval => - now = Date.now() - - if @storeOptions.reservoirRefreshInterval? and now >= @_lastReservoirRefresh + @storeOptions.reservoirRefreshInterval - @_lastReservoirRefresh = now - @storeOptions.reservoir = @storeOptions.reservoirRefreshAmount - @instance._drainAll @computeCapacity() - - if @storeOptions.reservoirIncreaseInterval? and now >= @_lastReservoirIncrease + @storeOptions.reservoirIncreaseInterval - { reservoirIncreaseAmount: amount, reservoirIncreaseMaximum: maximum, reservoir } = @storeOptions - @_lastReservoirIncrease = now - incr = if maximum? then Math.min amount, maximum - reservoir else amount - if incr > 0 - @storeOptions.reservoir += incr - @instance._drainAll @computeCapacity() - - , @heartbeatInterval).unref?() - - __publish__: (message) -> - await @yieldLoop() - @instance.Events.trigger "message", message.toString() - - __disconnect__: (flush) -> - await @yieldLoop() - clearInterval @heartbeat - @Promise.resolve() - - yieldLoop: (t=0) -> new @Promise (resolve, reject) -> setTimeout resolve, t - - computePenalty: -> @storeOptions.penalty ? ((15 * @storeOptions.minTime) or 5000) - - __updateSettings__: (options) -> - await @yieldLoop() - parser.overwrite options, options, @storeOptions - @_startHeartbeat() - @instance._drainAll @computeCapacity() - true - - __running__: -> - await @yieldLoop() - @_running - - __queued__: -> - await @yieldLoop() - @instance.queued() - - __done__: -> - await @yieldLoop() - @_done - - __groupCheck__: (time) -> - await @yieldLoop() - (@_nextRequest + @timeout) < time - - computeCapacity: -> - { maxConcurrent, reservoir } = @storeOptions - if maxConcurrent? and reservoir? then Math.min((maxConcurrent - @_running), reservoir) - else if maxConcurrent? then maxConcurrent - @_running - else if reservoir? then reservoir - else null - - conditionsCheck: (weight) -> - capacity = @computeCapacity() - not capacity? or weight <= capacity - - __incrementReservoir__: (incr) -> - await @yieldLoop() - reservoir = @storeOptions.reservoir += incr - @instance._drainAll @computeCapacity() - reservoir - - __currentReservoir__: -> - await @yieldLoop() - @storeOptions.reservoir - - isBlocked: (now) -> @_unblockTime >= now - - check: (weight, now) -> @conditionsCheck(weight) and (@_nextRequest - now) <= 0 - - __check__: (weight) -> - await @yieldLoop() - now = Date.now() - @check weight, now - - __register__: (index, weight, expiration) -> - await @yieldLoop() - now = Date.now() - if @conditionsCheck weight - @_running += weight - if @storeOptions.reservoir? then @storeOptions.reservoir -= weight - wait = Math.max @_nextRequest - now, 0 - @_nextRequest = now + wait + @storeOptions.minTime - { success: true, wait, reservoir: @storeOptions.reservoir } - else { success: false } - - strategyIsBlock: -> @storeOptions.strategy == 3 - - __submit__: (queueLength, weight) -> - await @yieldLoop() - if @storeOptions.maxConcurrent? and weight > @storeOptions.maxConcurrent - throw new BottleneckError("Impossible to add a job having a weight of #{weight} to a limiter having a maxConcurrent setting of #{@storeOptions.maxConcurrent}") - now = Date.now() - reachedHWM = @storeOptions.highWater? and queueLength == @storeOptions.highWater and not @check(weight, now) - blocked = @strategyIsBlock() and (reachedHWM or @isBlocked now) - if blocked - @_unblockTime = now + @computePenalty() - @_nextRequest = @_unblockTime + @storeOptions.minTime - @instance._dropAllQueued() - { reachedHWM, blocked, strategy: @storeOptions.strategy } - - __free__: (index, weight) -> - await @yieldLoop() - @_running -= weight - @_done += weight - @instance._drainAll @computeCapacity() - { running: @_running } - -module.exports = LocalDatastore diff --git a/src/LocalDatastore.js b/src/LocalDatastore.js new file mode 100644 index 0000000..8bab1e2 --- /dev/null +++ b/src/LocalDatastore.js @@ -0,0 +1,203 @@ +const parser = require("./parser"); +const BottleneckError = require("./BottleneckError"); + +class LocalDatastore { + constructor(instance, storeOptions, storeInstanceOptions) { + this.instance = instance; + this.storeOptions = storeOptions; + this.clientId = this.instance._randomIndex(); + parser.load(storeInstanceOptions, storeInstanceOptions, this); + this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); + this._running = 0; + this._done = 0; + this._unblockTime = 0; + this.ready = Promise.resolve(); + this.clients = {}; + this._startHeartbeat(); + } + + _startHeartbeat() { + if (this.heartbeat) { + clearInterval(this.heartbeat); + } + + if ( + (this.storeOptions.reservoirRefreshInterval != null && + this.storeOptions.reservoirRefreshAmount != null) || + (this.storeOptions.reservoirIncreaseInterval != null && + this.storeOptions.reservoirIncreaseAmount != null) + ) { + this.heartbeat = setInterval(() => { + const now = Date.now(); + if ( + this.storeOptions.reservoirRefreshInterval != null && + now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval + ) { + this._lastReservoirRefresh = now; + this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount; + this.instance._drainAll(this.computeCapacity()); + } + + if ( + this.storeOptions.reservoirIncreaseInterval != null && + now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval + ) { + const { + reservoirIncreaseAmount: amount, + reservoirIncreaseMaximum: maximum, + reservoir, + } = this.storeOptions; + this._lastReservoirIncrease = now; + const incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; + if (incr > 0) { + this.storeOptions.reservoir += incr; + return this.instance._drainAll(this.computeCapacity()); + } + } + }, this.heartbeatInterval).unref?.(); + } + } + + async __publish__(message) { + await this.yieldLoop(); + return this.instance.Events.trigger("message", message.toString()); + } + + async __disconnect__() { + await this.yieldLoop(); + clearInterval(this.heartbeat); + } + + yieldLoop(t) { + return new Promise((resolve) => setTimeout(resolve, t ?? 0)); + } + + computePenalty() { + return this.storeOptions.penalty != null + ? this.storeOptions.penalty + : 15 * this.storeOptions.minTime || 5000; + } + + async __updateSettings__(options) { + await this.yieldLoop(); + parser.overwrite(options, options, this.storeOptions); + this._startHeartbeat(); + this.instance._drainAll(this.computeCapacity()); + return true; + } + + async __running__() { + await this.yieldLoop(); + return this._running; + } + + async __queued__() { + await this.yieldLoop(); + return this.instance.queued(); + } + + async __done__() { + await this.yieldLoop(); + return this._done; + } + + async __groupCheck__(time) { + await this.yieldLoop(); + return this._nextRequest + this.timeout < time; + } + + computeCapacity() { + const { maxConcurrent, reservoir } = this.storeOptions; + if (maxConcurrent != null && reservoir != null) { + return Math.min(maxConcurrent - this._running, reservoir); + } else if (maxConcurrent != null) { + return maxConcurrent - this._running; + } else if (reservoir != null) { + return reservoir; + } else { + return null; + } + } + + conditionsCheck(weight) { + const capacity = this.computeCapacity(); + return capacity == null || weight <= capacity; + } + + async __incrementReservoir__(incr) { + await this.yieldLoop(); + const reservoir = (this.storeOptions.reservoir += incr); + this.instance._drainAll(this.computeCapacity()); + return reservoir; + } + + async __currentReservoir__() { + await this.yieldLoop(); + return this.storeOptions.reservoir; + } + + isBlocked(now) { + return this._unblockTime >= now; + } + + check(weight, now) { + return this.conditionsCheck(weight) && this._nextRequest - now <= 0; + } + + async __check__(weight) { + await this.yieldLoop(); + const now = Date.now(); + return this.check(weight, now); + } + + async __register__(index, weight, _expiration) { + await this.yieldLoop(); + const now = Date.now(); + if (this.conditionsCheck(weight)) { + this._running += weight; + if (this.storeOptions.reservoir != null) { + this.storeOptions.reservoir -= weight; + } + const wait = Math.max(this._nextRequest - now, 0); + this._nextRequest = now + wait + this.storeOptions.minTime; + return { success: true, wait, reservoir: this.storeOptions.reservoir }; + } else { + return { success: false }; + } + } + + strategyIsBlock() { + return this.storeOptions.strategy === 3; + } + + async __submit__(queueLength, weight) { + await this.yieldLoop(); + if (this.storeOptions.maxConcurrent != null && weight > this.storeOptions.maxConcurrent) { + throw new BottleneckError( + `Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`, + ); + } + const now = Date.now(); + const reachedHWM = + this.storeOptions.highWater != null && + queueLength === this.storeOptions.highWater && + !this.check(weight, now); + const blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now)); + if (blocked) { + this._unblockTime = now + this.computePenalty(); + this._nextRequest = this._unblockTime + this.storeOptions.minTime; + this.instance._dropAllQueued(); + } + return { reachedHWM, blocked, strategy: this.storeOptions.strategy }; + } + + async __free__(index, weight) { + await this.yieldLoop(); + this._running -= weight; + this._done += weight; + this.instance._drainAll(this.computeCapacity()); + return { running: this._running }; + } +} + +module.exports = LocalDatastore; diff --git a/src/Queues.coffee b/src/Queues.coffee deleted file mode 100644 index b563ae3..0000000 --- a/src/Queues.coffee +++ /dev/null @@ -1,28 +0,0 @@ -DLList = require "./DLList" -Events = require "./Events" - -class Queues - - constructor: (num_priorities) -> - @Events = new Events @ - @_length = 0 - @_lists = for i in [1..num_priorities] then new DLList (=> @incr()), (=> @decr()) - - incr: -> if @_length++ == 0 then @Events.trigger "leftzero" - - decr: -> if --@_length == 0 then @Events.trigger "zero" - - push: (job) -> @_lists[job.options.priority].push job - - queued: (priority) -> if priority? then @_lists[priority].length else @_length - - shiftAll: (fn) -> @_lists.forEach (list) -> list.forEachShift fn - - getFirst: (arr=@_lists) -> - for list in arr - return list if list.length > 0 - [] - - shiftLastFrom: (priority) -> @getFirst(@_lists[priority..].reverse()).shift() - -module.exports = Queues diff --git a/src/Queues.js b/src/Queues.js new file mode 100644 index 0000000..4790a9f --- /dev/null +++ b/src/Queues.js @@ -0,0 +1,58 @@ +const DLList = require("./DLList"); +const Events = require("./Events"); + +class Queues { + constructor(num_priorities) { + this.Events = new Events(this); + this._length = 0; + this._lists = []; + for (let i = 0; i < num_priorities; i++) { + const list = new DLList( + () => this.incr(), + () => this.decr(), + ); + this._lists.push(list); + } + } + + incr() { + if (this._length++ === 0) { + return this.Events.trigger("leftzero"); + } + } + + decr() { + if (--this._length === 0) { + return this.Events.trigger("zero"); + } + } + + push(job) { + return this._lists[job.options.priority].push(job); + } + + queued(priority) { + if (priority != null) { + return this._lists[priority].length; + } else { + return this._length; + } + } + + shiftAll(fn) { + return this._lists.forEach((list) => list.forEachShift(fn)); + } + + getFirst(arr) { + for (const list of arr ?? this._lists) { + if (list.length > 0) return list; + } + return []; + } + + shiftLastFrom(priority) { + return this.getFirst(this._lists.slice(priority).reverse()).shift(); + } +} + +module.exports = Queues; diff --git a/src/RedisConnection.coffee b/src/RedisConnection.coffee deleted file mode 100644 index 15379ef..0000000 --- a/src/RedisConnection.coffee +++ /dev/null @@ -1,91 +0,0 @@ -parser = require "./parser" -Events = require "./Events" -Scripts = require "./Scripts" - -class RedisConnection - datastore: "redis" - defaults: - Redis: null - clientOptions: {} - client: null - Promise: Promise - Events: null - - constructor: (options={}) -> - parser.load options, @defaults, @ - @Redis ?= eval("require")("redis") # Obfuscated or else Webpack/Angular will try to inline the optional redis module. To override this behavior: pass the redis module to Bottleneck as the 'Redis' option. - @Events ?= new Events @ - @terminated = false - - @client ?= @Redis.createClient @clientOptions - @subscriber = @client.duplicate() - @limiters = {} - @shas = {} - - @ready = @Promise.all [@_setup(@client, false), @_setup(@subscriber, true)] - .then => @_loadScripts() - .then => { @client, @subscriber } - - _setup: (client, sub) -> - client.setMaxListeners 0 - new @Promise (resolve, reject) => - client.on "error", (e) => @Events.trigger "error", e - if sub - client.on "message", (channel, message) => - @limiters[channel]?._store.onMessage channel, message - if client.ready then resolve() - else client.once "ready", resolve - - _loadScript: (name) -> - new @Promise (resolve, reject) => - payload = Scripts.payload name - @client.multi([["script", "load", payload]]).exec (err, replies) => - if err? then return reject err - @shas[name] = replies[0] - resolve replies[0] - - _loadScripts: -> @Promise.all(Scripts.names.map (k) => @_loadScript k) - - __runCommand__: (cmd) -> - await @ready - new @Promise (resolve, reject) => - @client.multi([cmd]).exec_atomic (err, replies) -> - if err? then reject(err) else resolve(replies[0]) - - __addLimiter__: (instance) -> - @Promise.all [instance.channel(), instance.channel_client()].map (channel) => - new @Promise (resolve, reject) => - handler = (chan) => - if chan == channel - @subscriber.removeListener "subscribe", handler - @limiters[channel] = instance - resolve() - @subscriber.on "subscribe", handler - @subscriber.subscribe channel - - __removeLimiter__: (instance) -> - @Promise.all [instance.channel(), instance.channel_client()].map (channel) => - unless @terminated - await new @Promise (resolve, reject) => - @subscriber.unsubscribe channel, (err, chan) -> - if err? then return reject err - if chan == channel then return resolve() - delete @limiters[channel] - - __scriptArgs__: (name, id, args, cb) -> - keys = Scripts.keys name, id - [@shas[name], keys.length].concat keys, args, cb - - __scriptFn__: (name) -> - @client.evalsha.bind(@client) - - disconnect: (flush=true) -> - clearInterval(@limiters[k]._store.heartbeat) for k in Object.keys @limiters - @limiters = {} - @terminated = true - - @client.end flush - @subscriber.end flush - @Promise.resolve() - -module.exports = RedisConnection diff --git a/src/RedisConnection.js b/src/RedisConnection.js new file mode 100644 index 0000000..e0b892c --- /dev/null +++ b/src/RedisConnection.js @@ -0,0 +1,138 @@ +const parser = require("./parser"); +const Events = require("./Events"); +const Scripts = require("./Scripts"); + +class RedisConnection { + defaults = { + Redis: null, + clientOptions: {}, + client: null, + Events: null, + }; + datastore = "redis"; + + constructor(options) { + options ??= {}; + parser.load(options, this.defaults, this); + // Obfuscated or else Webpack/Angular will try to inline the optional redis module. To override this behavior: pass the redis module to Bottleneck as the 'Redis' option. + this.Redis ??= eval("require")("redis"); + this.Events ??= new Events(this); + this.terminated = false; + + this.client ??= this.Redis.createClient(this.clientOptions); + this.subscriber = this.client.duplicate(); + this.limiters = {}; + this.shas = {}; + + this.ready = Promise.all([this._setup(this.client, false), this._setup(this.subscriber, true)]) + .then(() => this._loadScripts()) + .then(() => ({ client: this.client, subscriber: this.subscriber })); + } + + _setup(client, sub) { + client.setMaxListeners(0); + return new Promise((resolve) => { + client.on("error", (e) => this.Events.trigger("error", e)); + if (sub) { + client.on("message", (channel, message) => { + this.limiters[channel]?._store.onMessage(channel, message); + }); + } + if (client.ready) { + resolve(); + } else { + client.once("ready", resolve); + } + }); + } + + _loadScript(name) { + return new Promise((resolve, reject) => { + const payload = Scripts.payload(name); + this.client.multi([["script", "load", payload]]).exec((err, replies) => { + if (err != null) { + reject(err); + } + this.shas[name] = replies[0]; + resolve(replies[0]); + }); + }); + } + + _loadScripts() { + return Promise.all(Scripts.names.map((k) => this._loadScript(k))); + } + + async __runCommand__(cmd) { + await this.ready; + return new Promise((resolve, reject) => { + this.client.multi([cmd]).exec_atomic(function (err, replies) { + if (err != null) { + reject(err); + } else { + resolve(replies[0]); + } + }); + }); + } + + async __addLimiter__(instance) { + await Promise.all( + [instance.channel(), instance.channel_client()].map((channel) => { + return new Promise((resolve) => { + var handler = (chan) => { + if (chan === channel) { + this.subscriber.removeListener("subscribe", handler); + this.limiters[channel] = instance; + resolve(); + } + }; + this.subscriber.on("subscribe", handler); + this.subscriber.subscribe(channel); + }); + }), + ); + } + + async __removeLimiter__(instance) { + await Promise.all( + [instance.channel(), instance.channel_client()].map(async (channel) => { + if (!this.terminated) { + await new Promise((resolve, reject) => { + return this.subscriber.unsubscribe(channel, function (err, chan) { + if (err != null) { + return reject(err); + } + if (chan === channel) { + return resolve(); + } + }); + }); + } + delete this.limiters[channel]; + }), + ); + } + + __scriptArgs__(name, id, args, cb) { + const keys = Scripts.keys(name, id); + return [this.shas[name], keys.length].concat(keys, args, cb); + } + + __scriptFn__() { + return this.client.evalsha.bind(this.client); + } + + async disconnect(flush = true) { + for (const v of Object.values(this.limiters)) { + clearInterval(v._store.heartbeat); + } + this.limiters = {}; + this.terminated = true; + + this.client.end(flush); + this.subscriber.end(flush); + } +} + +module.exports = RedisConnection; diff --git a/src/RedisDatastore.coffee b/src/RedisDatastore.coffee deleted file mode 100644 index a11fb3a..0000000 --- a/src/RedisDatastore.coffee +++ /dev/null @@ -1,158 +0,0 @@ -parser = require "./parser" -BottleneckError = require "./BottleneckError" -RedisConnection = require "./RedisConnection" -IORedisConnection = require "./IORedisConnection" - -class RedisDatastore - constructor: (@instance, @storeOptions, storeInstanceOptions) -> - @originalId = @instance.id - @clientId = @instance._randomIndex() - parser.load storeInstanceOptions, storeInstanceOptions, @ - @clients = {} - @capacityPriorityCounters = {} - @sharedConnection = @connection? - - @connection ?= if @instance.datastore == "redis" then new RedisConnection { @Redis, @clientOptions, @Promise, Events: @instance.Events } - else if @instance.datastore == "ioredis" then new IORedisConnection { @Redis, @clientOptions, @clusterNodes, @Promise, Events: @instance.Events } - - @instance.connection = @connection - @instance.datastore = @connection.datastore - - @ready = @connection.ready - .then (@clients) => @runScript "init", @prepareInitSettings @clearDatastore - .then => @connection.__addLimiter__ @instance - .then => @runScript "register_client", [@instance.queued()] - .then => - (@heartbeat = setInterval => - @runScript "heartbeat", [] - .catch (e) => @instance.Events.trigger "error", e - , @heartbeatInterval).unref?() - @clients - - __publish__: (message) -> - { client } = await @ready - client.publish(@instance.channel(), "message:#{message.toString()}") - - onMessage: (channel, message) -> - try - pos = message.indexOf(":") - [type, data] = [message.slice(0, pos), message.slice(pos+1)] - if type == "capacity" - await @instance._drainAll(if data.length > 0 then ~~data) - else if type == "capacity-priority" - [rawCapacity, priorityClient, counter] = data.split(":") - capacity = if rawCapacity.length > 0 then ~~rawCapacity - if priorityClient == @clientId - drained = await @instance._drainAll(capacity) - newCapacity = if capacity? then capacity - (drained or 0) else "" - await @clients.client.publish(@instance.channel(), "capacity-priority:#{newCapacity}::#{counter}") - else if priorityClient == "" - clearTimeout @capacityPriorityCounters[counter] - delete @capacityPriorityCounters[counter] - @instance._drainAll(capacity) - else - @capacityPriorityCounters[counter] = setTimeout => - try - delete @capacityPriorityCounters[counter] - await @runScript "blacklist_client", [priorityClient] - await @instance._drainAll(capacity) - catch e then @instance.Events.trigger "error", e - , 1000 - else if type == "message" - @instance.Events.trigger "message", data - else if type == "blocked" - await @instance._dropAllQueued() - catch e then @instance.Events.trigger "error", e - - __disconnect__: (flush) -> - clearInterval @heartbeat - if @sharedConnection - @connection.__removeLimiter__ @instance - else - @connection.disconnect flush - - runScript: (name, args) -> - await @ready unless name == "init" or name == "register_client" - new @Promise (resolve, reject) => - all_args = [Date.now(), @clientId].concat args - @instance.Events.trigger "debug", "Calling Redis script: #{name}.lua", all_args - arr = @connection.__scriptArgs__ name, @originalId, all_args, (err, replies) -> - if err? then return reject err - return resolve replies - @connection.__scriptFn__(name) arr... - .catch (e) => - if typeof e.message == "string" and e.message.match(/^(.*\s)?SETTINGS_KEY_NOT_FOUND$/) != null - if name == "heartbeat" then @Promise.resolve() - else - @runScript("init", @prepareInitSettings(false)) - .then => @runScript(name, args) - else if typeof e.message == "string" and e.message.match(/^(.*\s)?UNKNOWN_CLIENT$/) != null - @runScript("register_client", [@instance.queued()]) - .then => @runScript(name, args) - else @Promise.reject e - - prepareArray: (arr) -> (if x? then x.toString() else "") for x in arr - - prepareObject: (obj) -> - arr = [] - for k, v of obj then arr.push k, (if v? then v.toString() else "") - arr - - prepareInitSettings: (clear) -> - args = @prepareObject Object.assign({}, @storeOptions, { - id: @originalId - version: @instance.version - groupTimeout: @timeout - @clientTimeout - }) - args.unshift (if clear then 1 else 0), @instance.version - args - - convertBool: (b) -> !!b - - __updateSettings__: (options) -> - await @runScript "update_settings", @prepareObject options - parser.overwrite options, options, @storeOptions - - __running__: -> @runScript "running", [] - - __queued__: -> @runScript "queued", [] - - __done__: -> @runScript "done", [] - - __groupCheck__: -> @convertBool await @runScript "group_check", [] - - __incrementReservoir__: (incr) -> @runScript "increment_reservoir", [incr] - - __currentReservoir__: -> @runScript "current_reservoir", [] - - __check__: (weight) -> @convertBool await @runScript "check", @prepareArray [weight] - - __register__: (index, weight, expiration) -> - [success, wait, reservoir] = await @runScript "register", @prepareArray [index, weight, expiration] - return { - success: @convertBool(success), - wait, - reservoir - } - - __submit__: (queueLength, weight) -> - try - [reachedHWM, blocked, strategy] = await @runScript "submit", @prepareArray [queueLength, weight] - return { - reachedHWM: @convertBool(reachedHWM), - blocked: @convertBool(blocked), - strategy - } - catch e - if e.message.indexOf("OVERWEIGHT") == 0 - [overweight, weight, maxConcurrent] = e.message.split ":" - throw new BottleneckError("Impossible to add a job having a weight of #{weight} to a limiter having a maxConcurrent setting of #{maxConcurrent}") - else - throw e - - __free__: (index, weight) -> - running = await @runScript "free", @prepareArray [index] - return { running } - -module.exports = RedisDatastore diff --git a/src/RedisDatastore.js b/src/RedisDatastore.js new file mode 100644 index 0000000..34dcb6f --- /dev/null +++ b/src/RedisDatastore.js @@ -0,0 +1,260 @@ +const parser = require("./parser"); +const BottleneckError = require("./BottleneckError"); +const RedisConnection = require("./RedisConnection"); +const IORedisConnection = require("./IORedisConnection"); + +class RedisDatastore { + constructor(instance, storeOptions, storeInstanceOptions) { + this.instance = instance; + this.storeOptions = storeOptions; + this.originalId = this.instance.id; + this.clientId = this.instance._randomIndex(); + parser.load(storeInstanceOptions, storeInstanceOptions, this); + this.clients = {}; + this.capacityPriorityCounters = {}; + this.sharedConnection = this.connection != null; + + if (!this.connection) { + if (this.instance.datastore === "redis") { + this.connection = new RedisConnection({ + Redis: this.Redis, + clientOptions: this.clientOptions, + Promise: Promise, + Events: this.instance.Events, + }); + } else if (this.instance.datastore === "ioredis") { + this.connection = new IORedisConnection({ + Redis: this.Redis, + clientOptions: this.clientOptions, + clusterNodes: this.clusterNodes, + Promise: Promise, + Events: this.instance.Events, + }); + } + } + + this.instance.connection = this.connection; + this.instance.datastore = this.connection.datastore; + + this.ready = this.connection.ready + .then((clients) => { + this.clients = clients; + return this.runScript("init", this.prepareInitSettings(this.clearDatastore)); + }) + .then(() => this.connection.__addLimiter__(this.instance)) + .then(() => this.runScript("register_client", [this.instance.queued()])) + .then(() => { + this.heartbeat = setInterval(() => { + return this.runScript("heartbeat", []).catch((e) => + this.instance.Events.trigger("error", e), + ); + }, this.heartbeatInterval).unref?.(); + return this.clients; + }); + } + + async __publish__(message) { + const { client } = await this.ready; + return client.publish(this.instance.channel(), `message:${message.toString()}`); + } + + async onMessage(channel, message) { + try { + const pos = message.indexOf(":"); + const [type, data] = [message.slice(0, pos), message.slice(pos + 1)]; + if (type === "capacity") { + return await this.instance._drainAll(data.length > 0 ? ~~data : undefined); + } else if (type === "capacity-priority") { + const [rawCapacity, priorityClient, counter] = data.split(":"); + const capacity = rawCapacity.length > 0 ? ~~rawCapacity : undefined; + if (priorityClient === this.clientId) { + const drained = await this.instance._drainAll(capacity); + const newCapacity = capacity != null ? capacity - (drained || 0) : ""; + return await this.clients.client.publish( + this.instance.channel(), + `capacity-priority:${newCapacity}::${counter}`, + ); + } else if (priorityClient === "") { + clearTimeout(this.capacityPriorityCounters[counter]); + delete this.capacityPriorityCounters[counter]; + return this.instance._drainAll(capacity); + } else { + return (this.capacityPriorityCounters[counter] = setTimeout(async () => { + try { + delete this.capacityPriorityCounters[counter]; + await this.runScript("blacklist_client", [priorityClient]); + return await this.instance._drainAll(capacity); + } catch (e) { + return this.instance.Events.trigger("error", e); + } + }, 1000)); + } + } else if (type === "message") { + return this.instance.Events.trigger("message", data); + } else if (type === "blocked") { + return await this.instance._dropAllQueued(); + } + } catch (error) { + const e = error; + return this.instance.Events.trigger("error", e); + } + } + + async __disconnect__(flush) { + clearInterval(this.heartbeat); + if (this.sharedConnection) { + await this.connection.__removeLimiter__(this.instance); + } else { + return this.connection.disconnect(flush); + } + } + + async runScript(name, args) { + if (name !== "init" && name !== "register_client") { + await this.ready; + } + return new Promise((resolve, reject) => { + const all_args = [Date.now(), this.clientId].concat(args); + this.instance.Events.trigger("debug", `Calling Redis script: ${name}.lua`, all_args); + const arr = this.connection.__scriptArgs__( + name, + this.originalId, + all_args, + function (err, replies) { + if (err != null) { + return reject(err); + } + return resolve(replies); + }, + ); + return this.connection.__scriptFn__(name)(...(arr || [])); + }).catch((e) => { + if ( + typeof e.message === "string" && + e.message.match(/^(.*\s)?SETTINGS_KEY_NOT_FOUND$/) !== null + ) { + if (name === "heartbeat") { + return Promise.resolve(); + } else { + return this.runScript("init", this.prepareInitSettings(false)).then(() => + this.runScript(name, args), + ); + } + } else if ( + typeof e.message === "string" && + e.message.match(/^(.*\s)?UNKNOWN_CLIENT$/) !== null + ) { + return this.runScript("register_client", [this.instance.queued()]).then(() => + this.runScript(name, args), + ); + } else { + return Promise.reject(e); + } + }); + } + + prepareArray(arr) { + return arr.map((x) => (x != null ? x.toString() : "")); + } + + prepareObject(obj) { + const arr = []; + for (const [k, v] of Object.entries(obj)) { + arr.push(k, v != null ? v.toString() : ""); + } + return arr; + } + + prepareInitSettings(clear) { + const args = this.prepareObject( + Object.assign({}, this.storeOptions, { + id: this.originalId, + version: this.instance.version, + groupTimeout: this.timeout, + clientTimeout: this.clientTimeout, + }), + ); + args.unshift(clear ? 1 : 0, this.instance.version); + return args; + } + + convertBool(b) { + return !!b; + } + + async __updateSettings__(options) { + await this.runScript("update_settings", this.prepareObject(options)); + return parser.overwrite(options, options, this.storeOptions); + } + + __running__() { + return this.runScript("running", []); + } + + __queued__() { + return this.runScript("queued", []); + } + + __done__() { + return this.runScript("done", []); + } + + async __groupCheck__() { + return this.convertBool(await this.runScript("group_check", [])); + } + + __incrementReservoir__(incr) { + return this.runScript("increment_reservoir", [incr]); + } + + __currentReservoir__() { + return this.runScript("current_reservoir", []); + } + + async __check__(weight) { + return this.convertBool(await this.runScript("check", this.prepareArray([weight]))); + } + + async __register__(index, weight, expiration) { + const [success, wait, reservoir] = await this.runScript( + "register", + this.prepareArray([index, weight, expiration]), + ); + + return { + success: this.convertBool(success), + wait, + reservoir, + }; + } + + async __submit__(queueLength, weight) { + try { + const [reachedHWM, blocked, strategy] = Array.from( + await this.runScript("submit", this.prepareArray([queueLength, weight])), + ); + return { + reachedHWM: this.convertBool(reachedHWM), + blocked: this.convertBool(blocked), + strategy, + }; + } catch (e) { + if (/^(ERR )?OVERWEIGHT/.test(e.message)) { + let maxConcurrent; + [, weight, maxConcurrent] = e.message.split(":"); + throw new BottleneckError( + `Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${maxConcurrent}`, + ); + } else { + throw e; + } + } + } + + async __free__(index, _weight) { + const running = await this.runScript("free", this.prepareArray([index])); + return { running }; + } +} + +module.exports = RedisDatastore; diff --git a/src/Scripts.coffee b/src/Scripts.coffee deleted file mode 100644 index 9d99b4b..0000000 --- a/src/Scripts.coffee +++ /dev/null @@ -1,151 +0,0 @@ -lua = require "./lua.json" - -headers = - refs: lua["refs.lua"] - validate_keys: lua["validate_keys.lua"] - validate_client: lua["validate_client.lua"] - refresh_expiration: lua["refresh_expiration.lua"] - process_tick: lua["process_tick.lua"] - conditions_check: lua["conditions_check.lua"] - get_time: lua["get_time.lua"] - -exports.allKeys = (id) -> [ - ### - HASH - ### - "b_#{id}_settings" - - ### - HASH - job index -> weight - ### - "b_#{id}_job_weights" - - ### - ZSET - job index -> expiration - ### - "b_#{id}_job_expirations" - - ### - HASH - job index -> client - ### - "b_#{id}_job_clients" - - ### - ZSET - client -> sum running - ### - "b_#{id}_client_running" - - ### - HASH - client -> num queued - ### - "b_#{id}_client_num_queued" - - ### - ZSET - client -> last job registered - ### - "b_#{id}_client_last_registered" - - ### - ZSET - client -> last seen - ### - "b_#{id}_client_last_seen" -] - -templates = - init: - keys: exports.allKeys - headers: ["process_tick"] - refresh_expiration: true - code: lua["init.lua"] - group_check: - keys: exports.allKeys - headers: [] - refresh_expiration: false - code: lua["group_check.lua"] - register_client: - keys: exports.allKeys - headers: ["validate_keys"] - refresh_expiration: true - code: lua["register_client.lua"] - blacklist_client: - keys: exports.allKeys - headers: ["validate_keys", "validate_client"] - refresh_expiration: false - code: lua["blacklist_client.lua"] - heartbeat: - keys: exports.allKeys - headers: ["validate_keys", "validate_client", "process_tick"] - refresh_expiration: false - code: lua["heartbeat.lua"] - update_settings: - keys: exports.allKeys - headers: ["validate_keys", "validate_client", "process_tick"] - refresh_expiration: true - code: lua["update_settings.lua"] - running: - keys: exports.allKeys - headers: ["validate_keys", "validate_client", "process_tick"] - refresh_expiration: false - code: lua["running.lua"] - queued: - keys: exports.allKeys - headers: ["validate_keys", "validate_client"] - refresh_expiration: false - code: lua["queued.lua"] - done: - keys: exports.allKeys - headers: ["validate_keys", "validate_client", "process_tick"] - refresh_expiration: false - code: lua["done.lua"] - check: - keys: exports.allKeys - headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"] - refresh_expiration: false - code: lua["check.lua"] - submit: - keys: exports.allKeys - headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"] - refresh_expiration: true - code: lua["submit.lua"] - register: - keys: exports.allKeys - headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"] - refresh_expiration: true - code: lua["register.lua"] - free: - keys: exports.allKeys - headers: ["validate_keys", "validate_client", "process_tick"] - refresh_expiration: true - code: lua["free.lua"] - current_reservoir: - keys: exports.allKeys - headers: ["validate_keys", "validate_client", "process_tick"] - refresh_expiration: false - code: lua["current_reservoir.lua"] - increment_reservoir: - keys: exports.allKeys - headers: ["validate_keys", "validate_client", "process_tick"] - refresh_expiration: true - code: lua["increment_reservoir.lua"] - -exports.names = Object.keys templates - -exports.keys = (name, id) -> - templates[name].keys id - -exports.payload = (name) -> - template = templates[name] - Array::concat( - headers.refs, - template.headers.map((h) -> headers[h]), - (if template.refresh_expiration then headers.refresh_expiration else ""), - template.code - ) - .join("\n") diff --git a/src/Scripts.js b/src/Scripts.js new file mode 100644 index 0000000..1e423f1 --- /dev/null +++ b/src/Scripts.js @@ -0,0 +1,153 @@ +const lua = require("../ref/lua.json"); + +const headers = { + refs: lua["refs.lua"], + validate_keys: lua["validate_keys.lua"], + validate_client: lua["validate_client.lua"], + refresh_expiration: lua["refresh_expiration.lua"], + process_tick: lua["process_tick.lua"], + conditions_check: lua["conditions_check.lua"], + get_time: lua["get_time.lua"], +}; + +exports.allKeys = (id) => [ + // HASH + `b_${id}_settings`, + + // HASH + // job index -> weight + `b_${id}_job_weights`, + + // ZSET + // job index -> expiration + `b_${id}_job_expirations`, + + // HASH + // job index -> client + `b_${id}_job_clients`, + + // ZSET + // client -> sum running + `b_${id}_client_running`, + + // HASH + // client -> num queued + `b_${id}_client_num_queued`, + + // ZSET + // client -> last job registered + `b_${id}_client_last_registered`, + + // ZSET + // client -> last seen + `b_${id}_client_last_seen`, +]; + +const templates = { + init: { + keys: exports.allKeys, + headers: ["process_tick"], + refresh_expiration: true, + code: lua["init.lua"], + }, + group_check: { + keys: exports.allKeys, + headers: [], + refresh_expiration: false, + code: lua["group_check.lua"], + }, + register_client: { + keys: exports.allKeys, + headers: ["validate_keys"], + refresh_expiration: true, + code: lua["register_client.lua"], + }, + blacklist_client: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client"], + refresh_expiration: false, + code: lua["blacklist_client.lua"], + }, + heartbeat: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: false, + code: lua["heartbeat.lua"], + }, + update_settings: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: true, + code: lua["update_settings.lua"], + }, + running: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: false, + code: lua["running.lua"], + }, + queued: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client"], + refresh_expiration: false, + code: lua["queued.lua"], + }, + done: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: false, + code: lua["done.lua"], + }, + check: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], + refresh_expiration: false, + code: lua["check.lua"], + }, + submit: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], + refresh_expiration: true, + code: lua["submit.lua"], + }, + register: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"], + refresh_expiration: true, + code: lua["register.lua"], + }, + free: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: true, + code: lua["free.lua"], + }, + current_reservoir: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: false, + code: lua["current_reservoir.lua"], + }, + increment_reservoir: { + keys: exports.allKeys, + headers: ["validate_keys", "validate_client", "process_tick"], + refresh_expiration: true, + code: lua["increment_reservoir.lua"], + }, +}; + +exports.names = Object.keys(templates); + +exports.keys = (name, id) => templates[name].keys(id); + +exports.payload = function (name) { + const template = templates[name]; + return Array.prototype + .concat( + headers.refs, + template.headers.map((h) => headers[h]), + template.refresh_expiration ? headers.refresh_expiration : "", + template.code, + ) + .join("\n"); +}; diff --git a/src/States.coffee b/src/States.coffee deleted file mode 100644 index c382c3d..0000000 --- a/src/States.coffee +++ /dev/null @@ -1,43 +0,0 @@ -BottleneckError = require "./BottleneckError" -class States - constructor: (@status) -> - @_jobs = {} - @counts = @status.map(-> 0) - - next: (id) -> - current = @_jobs[id] - next = current + 1 - if current? and next < @status.length - @counts[current]-- - @counts[next]++ - @_jobs[id]++ - else if current? - @counts[current]-- - delete @_jobs[id] - - start: (id) -> - initial = 0 - @_jobs[id] = initial - @counts[initial]++ - - remove: (id) -> - current = @_jobs[id] - if current? - @counts[current]-- - delete @_jobs[id] - current? - - jobStatus: (id) -> @status[@_jobs[id]] ? null - - statusJobs: (status) -> - if status? - pos = @status.indexOf status - if pos < 0 - throw new BottleneckError "status must be one of #{@status.join ', '}" - k for k,v of @_jobs when v == pos - else - Object.keys @_jobs - - statusCounts: -> @counts.reduce(((acc, v, i) => acc[@status[i]] = v; acc), {}) - -module.exports = States diff --git a/src/States.js b/src/States.js new file mode 100644 index 0000000..94e89d9 --- /dev/null +++ b/src/States.js @@ -0,0 +1,67 @@ +const BottleneckError = require("./BottleneckError"); +class States { + constructor(status) { + this.status = status; + this._jobs = {}; + this.counts = this.status.map(() => 0); + } + + next(id) { + const current = this._jobs[id]; + const next = current + 1; + if (current != null && next < this.status.length) { + this.counts[current]--; + this.counts[next]++; + this._jobs[id]++; + } else if (current != null) { + this.counts[current]--; + delete this._jobs[id]; + } + } + + start(id) { + const initial = 0; + this._jobs[id] = initial; + return this.counts[initial]++; + } + + remove(id) { + const current = this._jobs[id]; + if (current != null) { + this.counts[current]--; + delete this._jobs[id]; + } + return current != null; + } + + jobStatus(id) { + return this.status[this._jobs[id]] ?? null; + } + + statusJobs(status) { + if (status != null) { + const pos = this.status.indexOf(status); + if (pos < 0) { + throw new BottleneckError(`status must be one of ${this.status.join(", ")}`); + } + const result = []; + for (const [k, v] of Object.entries(this._jobs)) { + if (v === pos) { + result.push(k); + } + } + return result; + } else { + return Object.keys(this._jobs); + } + } + + statusCounts() { + return this.counts.reduce((acc, v, i) => { + acc[this.status[i]] = v; + return acc; + }, {}); + } +} + +module.exports = States; diff --git a/src/Sync.coffee b/src/Sync.coffee deleted file mode 100644 index 9df4513..0000000 --- a/src/Sync.coffee +++ /dev/null @@ -1,28 +0,0 @@ -DLList = require "./DLList" -class Sync - constructor: (@name, @Promise) -> - @_running = 0 - @_queue = new DLList() - isEmpty: -> @_queue.length == 0 - _tryToRun: -> - if (@_running < 1) and @_queue.length > 0 - @_running++ - { task, args, resolve, reject } = @_queue.shift() - cb = try - returned = await task args... - () -> resolve returned - catch error - () -> reject error - @_running-- - @_tryToRun() - cb() - schedule: (task, args...) => - resolve = reject = null - promise = new @Promise (_resolve, _reject) -> - resolve = _resolve - reject = _reject - @_queue.push { task, args, resolve, reject } - @_tryToRun() - promise - -module.exports = Sync diff --git a/src/Sync.js b/src/Sync.js new file mode 100644 index 0000000..0fe39a2 --- /dev/null +++ b/src/Sync.js @@ -0,0 +1,41 @@ +const DLList = require("./DLList"); +class Sync { + constructor(name) { + this.schedule = this.schedule.bind(this); + this.name = name; + this._running = 0; + this._queue = new DLList(); + } + isEmpty() { + return this._queue.length === 0; + } + async _tryToRun() { + if (this._running < 1 && this._queue.length > 0) { + this._running++; + const { task, args, resolve, reject } = this._queue.shift(); + let cb; + try { + const returned = await task(...(args || [])); + cb = () => resolve(returned); + } catch (error) { + cb = () => reject(error); + } + this._running--; + this._tryToRun(); + cb(); + } + } + schedule(task, ...args) { + let reject; + let resolve = (reject = null); + const promise = new Promise(function (_resolve, _reject) { + resolve = _resolve; + reject = _reject; + }); + this._queue.push({ task, args, resolve, reject }); + this._tryToRun(); + return promise; + } +} + +module.exports = Sync; diff --git a/src/es5.coffee b/src/es5.coffee deleted file mode 100644 index 12761a7..0000000 --- a/src/es5.coffee +++ /dev/null @@ -1,3 +0,0 @@ -require("regenerator-runtime/runtime") - -module.exports = require "./Bottleneck" diff --git a/src/es5.js b/src/es5.js new file mode 100644 index 0000000..9440cea --- /dev/null +++ b/src/es5.js @@ -0,0 +1,3 @@ +require("regenerator-runtime/runtime"); + +module.exports = require("./Bottleneck"); diff --git a/src/index.coffee b/src/index.coffee deleted file mode 100644 index 7a7fcb2..0000000 --- a/src/index.coffee +++ /dev/null @@ -1 +0,0 @@ -module.exports = require "./Bottleneck" diff --git a/src/index.js b/src/index.js new file mode 100644 index 0000000..fae69f0 --- /dev/null +++ b/src/index.js @@ -0,0 +1 @@ +module.exports = require("./Bottleneck"); diff --git a/src/parser.coffee b/src/parser.coffee deleted file mode 100644 index b662fb1..0000000 --- a/src/parser.coffee +++ /dev/null @@ -1,10 +0,0 @@ -exports.load = (received, defaults, onto={}) -> - for k, v of defaults - onto[k] = received[k] ? v - onto - -exports.overwrite = (received, defaults, onto={}) -> - for k, v of received - if defaults[k] != undefined - onto[k] = v - onto diff --git a/src/parser.js b/src/parser.js new file mode 100644 index 0000000..49c8ab4 --- /dev/null +++ b/src/parser.js @@ -0,0 +1,17 @@ +exports.load = function (received, defaults, onto) { + onto ??= {}; + for (const [k, v] of Object.entries(defaults)) { + onto[k] = received[k] ?? v; + } + return onto; +}; + +exports.overwrite = function (received, defaults, onto) { + onto ??= {}; + for (const [k, v] of Object.entries(received)) { + if (defaults[k] !== undefined) { + onto[k] = v; + } + } + return onto; +}; diff --git a/test.ts b/test.ts index bf064a7..07bcb00 100644 --- a/test.ts +++ b/test.ts @@ -2,7 +2,7 @@ import Bottleneck from "bottleneck"; // import * as assert from "assert"; -function assert(b: boolean): void { } +function assert(b: boolean): void {} /* This file is run by scripts/build.sh. @@ -29,10 +29,12 @@ let limiter = new Bottleneck({ reservoirRefreshAmount: 10, reservoirIncreaseInterval: 1000 * 60, reservoirIncreaseAmount: 2, - reservoirIncreaseMaximum: 15 + reservoirIncreaseMaximum: 15, }); -limiter.ready().then(() => { console.log('Ready') }); +limiter.ready().then(() => { + console.log("Ready"); +}); limiter.clients().client; limiter.disconnect(); @@ -60,11 +62,16 @@ limiter.done().then(function (x) { let i: number = x; }); -limiter.submit(withCb, 1, () => {}, (err, result) => { - let s: string = result; - console.log(s); - assert(s == "cb 1"); -}); +limiter.submit( + withCb, + 1, + () => {}, + (err, result) => { + let s: string = result; + console.log(s); + assert(s == "cb 1"); + }, +); function withPromise(foo: number, bar: () => void): PromiseLike { let s: string = `promise ${foo}`; @@ -80,88 +87,88 @@ foo.then(function (result: string) { limiter.on("message", (msg) => console.log(msg)); -limiter.publish(JSON.stringify({ a: "abc", b: { c: 123 }})); +limiter.publish(JSON.stringify({ a: "abc", b: { c: 123 } })); function checkEventInfo(info: Bottleneck.EventInfo) { const numArgs: number = info.args.length; const id: string = info.options.id; } -limiter.on('dropped', (info) => { - checkEventInfo(info) +limiter.on("dropped", (info) => { + checkEventInfo(info); const task: Function = info.task; const promise: Promise = info.promise; -}) +}); -limiter.on('received', (info) => { - checkEventInfo(info) -}) +limiter.on("received", (info) => { + checkEventInfo(info); +}); -limiter.on('queued', (info) => { - checkEventInfo(info) +limiter.on("queued", (info) => { + checkEventInfo(info); const blocked: boolean = info.blocked; const reachedHWM: boolean = info.reachedHWM; -}) +}); -limiter.on('scheduled', (info) => { - checkEventInfo(info) -}) +limiter.on("scheduled", (info) => { + checkEventInfo(info); +}); -limiter.on('executing', (info) => { - checkEventInfo(info) +limiter.on("executing", (info) => { + checkEventInfo(info); const count: number = info.retryCount; -}) +}); -limiter.on('failed', (error, info) => { - checkEventInfo(info) +limiter.on("failed", (error, info) => { + checkEventInfo(info); const message: string = error.message; const count: number = info.retryCount; - return Promise.resolve(10) -}) + return Promise.resolve(10); +}); -limiter.on('failed', (error, info) => { - checkEventInfo(info) +limiter.on("failed", (error, info) => { + checkEventInfo(info); const message: string = error.message; const count: number = info.retryCount; - return Promise.resolve(null) -}) + return Promise.resolve(null); +}); -limiter.on('failed', (error, info) => { - checkEventInfo(info) +limiter.on("failed", (error, info) => { + checkEventInfo(info); const message: string = error.message; const count: number = info.retryCount; - return Promise.resolve() -}) + return Promise.resolve(); +}); -limiter.on('failed', (error, info) => { - checkEventInfo(info) +limiter.on("failed", (error, info) => { + checkEventInfo(info); const message: string = error.message; const count: number = info.retryCount; - return 10 -}) + return 10; +}); -limiter.on('failed', (error, info) => { - checkEventInfo(info) +limiter.on("failed", (error, info) => { + checkEventInfo(info); const message: string = error.message; const count: number = info.retryCount; - return null -}) + return null; +}); -limiter.on('failed', (error, info) => { - checkEventInfo(info) +limiter.on("failed", (error, info) => { + checkEventInfo(info); const message: string = error.message; const count: number = info.retryCount; -}) +}); -limiter.on('retry', (message: string, info) => { - checkEventInfo(info) +limiter.on("retry", (message: string, info) => { + checkEventInfo(info); const count: number = info.retryCount; -}) +}); -limiter.on('done', (info) => { - checkEventInfo(info) +limiter.on("done", (info) => { + checkEventInfo(info); const count: number = info.retryCount; -}) +}); let group = new Bottleneck.Group({ maxConcurrent: 5, @@ -171,25 +178,36 @@ let group = new Bottleneck.Group({ datastore: "ioredis", clearDatastore: true, clientOptions: {}, - clusterNodes: [] + clusterNodes: [], }); -group.on('created', (limiter, key) => { - assert(limiter.empty()) - assert(key.length > 0) -}) +group.on("created", (limiter, key) => { + assert(limiter.empty()); + assert(key.length > 0); +}); -group.key("foo").submit(withCb, 2, () => {}, (err, result) => { +group.key("foo").submit( + withCb, + 2, + () => {}, + (err, result) => { let s: string = `${result} foo`; console.log(s); assert(s == "cb 2 foo"); -}); - -group.key("bar").submit({ priority: 4 }, withCb, 3, () => {}, (err, result) => { + }, +); + +group.key("bar").submit( + { priority: 4 }, + withCb, + 3, + () => {}, + (err, result) => { let s: string = `${result} bar`; console.log(s); assert(s == "cb 3 foo"); -}); + }, +); let f1: Promise = group.key("pizza").schedule(withPromise, 2, () => {}); f1.then(function (result: string) { @@ -216,46 +234,45 @@ wrapped(1, 2).then((x) => { assert(s == "Total: 3"); }); -wrapped.withOptions({ priority: 1, id: 'some-id' }, 9, 9).then((x) => { +wrapped.withOptions({ priority: 1, id: "some-id" }, 9, 9).then((x) => { let s: string = x; console.log(s); assert(s == "Total: 18"); -}) +}); let counts = limiter.counts(); console.log(`${counts.EXECUTING + 2}`); -console.log(limiter.jobStatus('some-id')) +console.log(limiter.jobStatus("some-id")); console.log(limiter.jobs()); console.log(limiter.jobs(Bottleneck.Status.RUNNING)); - -group.deleteKey("pizza") -.then(function (deleted: boolean) { - console.log(deleted) +group.deleteKey("pizza").then(function (deleted: boolean) { + console.log(deleted); }); group.updateSettings({ timeout: 5, maxConcurrent: null, reservoir: null }); let keys: string[] = group.keys(); assert(keys.length == 3); -group.clusterKeys() -.then(function (allKeys: string[]) { +group.clusterKeys().then(function (allKeys: string[]) { let count = allKeys.length; -}) +}); let queued: number = limiter.chain(group.key("pizza")).queued(); -limiter.stop({ - dropWaitingJobs: true, - dropErrorMessage: "Begone!", - enqueueErrorMessage: "Denied!" -}).then(() => { - console.log('All stopped.') -}) +limiter + .stop({ + dropWaitingJobs: true, + dropErrorMessage: "Begone!", + enqueueErrorMessage: "Denied!", + }) + .then(() => { + console.log("All stopped."); + }); wrapped(4, 5).catch((e) => { - assert(e.message === "Denied!") -}) + assert(e.message === "Denied!"); +}); const id: string = limiter.id; const datastore: string = limiter.datastore; @@ -263,73 +280,69 @@ const channel: string = limiter.channel(); const redisConnection = new Bottleneck.RedisConnection({ client: "NodeRedis client object", - clientOptions: {} -}) + clientOptions: {}, +}); -redisConnection.ready() -.then(function (redisConnectionClients) { +redisConnection.ready().then(function (redisConnectionClients) { const client = redisConnectionClients.client; const subscriber = redisConnectionClients.subscriber; -}) +}); redisConnection.on("error", (err) => { - console.log(err.message) -}) + console.log(err.message); +}); const limiterWithConn = new Bottleneck({ - connection: redisConnection -}) + connection: redisConnection, +}); const ioredisConnection = new Bottleneck.IORedisConnection({ client: "ioredis client object", clientOptions: {}, - clusterNodes: [] -}) + clusterNodes: [], +}); -ioredisConnection.ready() -.then(function (ioredisConnectionClients) { +ioredisConnection.ready().then(function (ioredisConnectionClients) { const client = ioredisConnectionClients.client; const subscriber = ioredisConnectionClients.subscriber; -}) +}); ioredisConnection.on("error", (err: Bottleneck.BottleneckError) => { - console.log(err.message) -}) + console.log(err.message); +}); const groupWithConn = new Bottleneck.Group({ - connection: ioredisConnection -}) + connection: ioredisConnection, +}); const limiterWithConnFromGroup = new Bottleneck({ - connection: groupWithConn.connection -}) + connection: groupWithConn.connection, +}); const groupWithConnFromLimiter = new Bottleneck.Group({ - connection: limiterWithConn.connection -}) - + connection: limiterWithConn.connection, +}); const batcher = new Bottleneck.Batcher({ maxTime: 1000, - maxSize: 10 -}) + maxSize: 10, +}); batcher.on("batch", (batch) => { - const len: number = batch.length - console.log("Number of elements:", len) -}) + const len: number = batch.length; + console.log("Number of elements:", len); +}); batcher.on("error", (err: Bottleneck.BottleneckError) => { - console.log(err.message) -}) - -batcher.add("abc") -batcher.add({ xyz: 5 }) -.then(() => console.log("Flushed!")) - -const object = {} -const emitter = new Bottleneck.Events(object) -const listenerCount: number = emitter.listenerCount('info') -emitter.trigger('info', 'hello', 'world', 123).then(function (result) { - console.log(result) -}) + console.log(err.message); +}); + +batcher.add("abc"); +batcher.add({ xyz: 5 }).then(() => console.log("Flushed!")); + +const object = {}; +const emitter = new Bottleneck.Events(object); +const listenerCount: number = emitter.listenerCount("info"); +emitter.trigger("info", "hello", "world", 123).then(function (result) { + console.log(result); +}); diff --git a/test/DLList.js b/test/DLList.js index 505bdcd..e488f97 100644 --- a/test/DLList.js +++ b/test/DLList.js @@ -1,148 +1,146 @@ -var DLList = require('../lib/DLList') -var assert = require('assert') -var c = require('./context')({datastore: 'local'}) +var DLList = require("../src/DLList"); +var c = require("./context")({ datastore: "local" }); +const { describe, it } = require("mocha"); var fakeQueues = function () { - this._length = 0 - this.incr = () => this._length++ - this.decr = () => this._length-- - this.fns = [this.incr, this.decr] -} - -describe('DLList', function () { - - it('Should be created and be empty', function () { - var queues = new fakeQueues() - var list = new DLList() - c.mustEqual(list.getArray().length, 0) - }) - - it('Should be possible to append once', function () { - var queues = new fakeQueues() - var list = new DLList(...queues.fns) - list.push(5) - var arr = list.getArray() - c.mustEqual(arr.length, 1) - c.mustEqual(list.length, 1) - c.mustEqual(queues._length, 1) - c.mustEqual(arr[0], 5) - }) - - it('Should be possible to append multiple times', function () { - var queues = new fakeQueues() - var list = new DLList(...queues.fns) - list.push(5) - list.push(6) - var arr = list.getArray() - c.mustEqual(arr.length, 2) - c.mustEqual(list.length, 2) - c.mustEqual(queues._length, 2) - c.mustEqual(arr[0], 5) - c.mustEqual(arr[1], 6) - - list.push(10) - - arr = list.getArray() - c.mustEqual(arr.length, 3) - c.mustEqual(list.length, 3) - c.mustEqual(arr[0], 5) - c.mustEqual(arr[1], 6) - c.mustEqual(arr[2], 10) - }) - - it('Should be possible to shift an empty list', function () { - var queues = new fakeQueues() - var list = new DLList(...queues.fns) - c.mustEqual(list.length, 0) - assert(list.shift() === undefined) - var arr = list.getArray() - c.mustEqual(arr.length, 0) - c.mustEqual(list.length, 0) - assert(list.shift() === undefined) - arr = list.getArray() - c.mustEqual(arr.length, 0) - c.mustEqual(list.length, 0) - c.mustEqual(queues._length, 0) - }) - - it('Should be possible to append then shift once', function () { - var queues = new fakeQueues() - var list = new DLList(...queues.fns) - list.push(5) - c.mustEqual(list.length, 1) - c.mustEqual(list.shift(), 5) - var arr = list.getArray() - c.mustEqual(arr.length, 0) - c.mustEqual(list.length, 0) - c.mustEqual(queues._length, 0) - }) - - it('Should be possible to append then shift multiple times', function () { - var queues = new fakeQueues() - var list = new DLList(...queues.fns) - list.push(5) - c.mustEqual(list.length, 1) - c.mustEqual(list.shift(), 5) - c.mustEqual(list.length, 0) - - list.push(6) - c.mustEqual(list.length, 1) - c.mustEqual(list.shift(), 6) - c.mustEqual(list.length, 0) - c.mustEqual(queues._length, 0) - }) - - it('Should pass a full test', function () { - var queues = new fakeQueues() - var list = new DLList(...queues.fns) - list.push(10) - c.mustEqual(list.length, 1) - list.push("11") - c.mustEqual(list.length, 2) - list.push(12) - c.mustEqual(list.length, 3) - c.mustEqual(queues._length, 3) - - c.mustEqual(list.shift(), 10) - c.mustEqual(list.length, 2) - c.mustEqual(list.shift(),"11") - c.mustEqual(list.length, 1) - - list.push(true) - c.mustEqual(list.length, 2) - - var arr = list.getArray() - c.mustEqual(arr[0], 12) - c.mustEqual(arr[1], true) - c.mustEqual(arr.length, 2) - c.mustEqual(queues._length, 2) - }) - - it('Should return the first value without shifting', function () { - var queues = new fakeQueues() - var list = new DLList(...queues.fns) - assert(list.first() === undefined) - assert(list.first() === undefined) - - list.push(1) - c.mustEqual(list.first(), 1) - c.mustEqual(list.first(), 1) - - list.push(2) - c.mustEqual(list.first(), 1) - c.mustEqual(list.first(), 1) - - c.mustEqual(list.shift(), 1) - c.mustEqual(list.first(), 2) - c.mustEqual(list.first(), 2) - - c.mustEqual(list.shift(), 2) - assert(list.first() === undefined) - assert(list.first() === undefined) - - assert(list.first() === undefined) - assert(list.shift() === undefined) - assert(list.first() === undefined) - }) - -}) + this._length = 0; + this.incr = () => this._length++; + this.decr = () => this._length--; + this.fns = [this.incr, this.decr]; +}; + +describe("DLList", function () { + it("Should be created and be empty", function () { + new fakeQueues(); + var list = new DLList(); + c.mustEqual(list.getArray().length, 0); + }); + + it("Should be possible to append once", function () { + var queues = new fakeQueues(); + var list = new DLList(...queues.fns); + list.push(5); + var arr = list.getArray(); + c.mustEqual(arr.length, 1); + c.mustEqual(list.length, 1); + c.mustEqual(queues._length, 1); + c.mustEqual(arr[0], 5); + }); + + it("Should be possible to append multiple times", function () { + var queues = new fakeQueues(); + var list = new DLList(...queues.fns); + list.push(5); + list.push(6); + var arr = list.getArray(); + c.mustEqual(arr.length, 2); + c.mustEqual(list.length, 2); + c.mustEqual(queues._length, 2); + c.mustEqual(arr[0], 5); + c.mustEqual(arr[1], 6); + + list.push(10); + + arr = list.getArray(); + c.mustEqual(arr.length, 3); + c.mustEqual(list.length, 3); + c.mustEqual(arr[0], 5); + c.mustEqual(arr[1], 6); + c.mustEqual(arr[2], 10); + }); + + it("Should be possible to shift an empty list", function () { + var queues = new fakeQueues(); + var list = new DLList(...queues.fns); + c.mustEqual(list.length, 0); + c.mustEqual(list.shift(), undefined); + var arr = list.getArray(); + c.mustEqual(arr.length, 0); + c.mustEqual(list.length, 0); + c.mustEqual(list.shift(), undefined); + arr = list.getArray(); + c.mustEqual(arr.length, 0); + c.mustEqual(list.length, 0); + c.mustEqual(queues._length, 0); + }); + + it("Should be possible to append then shift once", function () { + var queues = new fakeQueues(); + var list = new DLList(...queues.fns); + list.push(5); + c.mustEqual(list.length, 1); + c.mustEqual(list.shift(), 5); + var arr = list.getArray(); + c.mustEqual(arr.length, 0); + c.mustEqual(list.length, 0); + c.mustEqual(queues._length, 0); + }); + + it("Should be possible to append then shift multiple times", function () { + var queues = new fakeQueues(); + var list = new DLList(...queues.fns); + list.push(5); + c.mustEqual(list.length, 1); + c.mustEqual(list.shift(), 5); + c.mustEqual(list.length, 0); + + list.push(6); + c.mustEqual(list.length, 1); + c.mustEqual(list.shift(), 6); + c.mustEqual(list.length, 0); + c.mustEqual(queues._length, 0); + }); + + it("Should pass a full test", function () { + var queues = new fakeQueues(); + var list = new DLList(...queues.fns); + list.push(10); + c.mustEqual(list.length, 1); + list.push("11"); + c.mustEqual(list.length, 2); + list.push(12); + c.mustEqual(list.length, 3); + c.mustEqual(queues._length, 3); + + c.mustEqual(list.shift(), 10); + c.mustEqual(list.length, 2); + c.mustEqual(list.shift(), "11"); + c.mustEqual(list.length, 1); + + list.push(true); + c.mustEqual(list.length, 2); + + var arr = list.getArray(); + c.mustEqual(arr[0], 12); + c.mustEqual(arr[1], true); + c.mustEqual(arr.length, 2); + c.mustEqual(queues._length, 2); + }); + + it("Should return the first value without shifting", function () { + var queues = new fakeQueues(); + var list = new DLList(...queues.fns); + c.mustEqual(list.first(), undefined); + c.mustEqual(list.first(), undefined); + + list.push(1); + c.mustEqual(list.first(), 1); + c.mustEqual(list.first(), 1); + + list.push(2); + c.mustEqual(list.first(), 1); + c.mustEqual(list.first(), 1); + + c.mustEqual(list.shift(), 1); + c.mustEqual(list.first(), 2); + c.mustEqual(list.first(), 2); + + c.mustEqual(list.shift(), 2); + c.mustEqual(list.first(), undefined); + c.mustEqual(list.first(), undefined); + + c.mustEqual(list.first(), undefined); + c.mustEqual(list.shift(), undefined); + c.mustEqual(list.first(), undefined); + }); +}); diff --git a/test/batcher.js b/test/batcher.js index c195367..660b508 100644 --- a/test/batcher.js +++ b/test/batcher.js @@ -1,209 +1,251 @@ -var makeTest = require('./context') -var Bottleneck = require('./bottleneck') -var assert = require('assert') +var makeTest = require("./context"); +var Bottleneck = require("./bottleneck"); +const { describe, it, afterEach } = require("mocha"); -describe('Batcher', function () { - var c +describe("Batcher", function () { + var c; afterEach(function () { - return c.limiter.disconnect(false) - }) + return c.limiter.disconnect(false); + }); - it('Should batch by time and size', function () { - c = makeTest() + it("Should batch by time and size", function () { + c = makeTest(); var batcher = new Bottleneck.Batcher({ maxTime: 50, - maxSize: 3 - }) - var t0 = Date.now() - var batches = [] + maxSize: 3, + }); + var t0 = Date.now(); + var batches = []; - batcher.on('batch', function (batcher) { - batches.push(batcher) - }) + batcher.on("batch", function (groups) { + batches.push(groups); + }); return Promise.all([ - batcher.add(1).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 1)), - batcher.add(2).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 2)), - batcher.add(3).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 3)), - batcher.add(4).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 4)), - batcher.add(5).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 5)) + batcher.add(1).then(() => c.limiter.schedule(c.promise, null, Date.now() - t0, 1)), + batcher.add(2).then(() => c.limiter.schedule(c.promise, null, Date.now() - t0, 2)), + batcher.add(3).then(() => c.limiter.schedule(c.promise, null, Date.now() - t0, 3)), + batcher.add(4).then(() => c.limiter.schedule(c.promise, null, Date.now() - t0, 4)), + batcher.add(5).then(() => c.limiter.schedule(c.promise, null, Date.now() - t0, 5)), ]) - .then(function (data) { - c.mustEqual( - data.map((([t, x]) => [Math.floor(t / 50), x])), - [[0, 1], [0, 2], [0, 3], [1, 4], [1, 5]] - ) - - return c.last() - }) - .then(function (results) { - c.checkDuration(50, 20) - c.mustEqual(batches, [[1, 2, 3], [4, 5]]) - }) - }) - - it('Should batch by time', function () { - c = makeTest() + .then(function (data) { + c.mustEqual( + data.map(([t, x]) => [Math.floor(t / 50), x]), + [ + [0, 1], + [0, 2], + [0, 3], + [1, 4], + [1, 5], + ], + ); + + return c.last(); + }) + .then(function (_results) { + c.checkDuration(50, 20); + c.mustEqual(batches, [ + [1, 2, 3], + [4, 5], + ]); + }); + }); + + it("Should batch by time", function () { + c = makeTest(); var batcher = new Bottleneck.Batcher({ - maxTime: 50 - }) - var t0 = Date.now() - var batches = [] + maxTime: 50, + }); + var t0 = Date.now(); + var batches = []; - batcher.on('batch', function (batcher) { - batches.push(batcher) - }) + batcher.on("batch", function (groups) { + batches.push(groups); + }); return Promise.all([ - batcher.add(1).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 1)), - batcher.add(2).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 2)) + batcher.add(1).then((_x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 1)), + batcher.add(2).then((_x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 2)), ]) - .then(function (data) { - c.mustEqual( - data.map((([t, x]) => [Math.floor(t / 50), x])), - [[1, 1], [1, 2]] - ) - - return Promise.all([ - batcher.add(3).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 3)), - batcher.add(4).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 4)) - ]) - }) - .then(function (data) { - c.mustEqual( - data.map((([t, x]) => [Math.floor(t / 50), x])), - [[2, 3], [2, 4]] - ) - - return c.last() - }) - .then(function (results) { - c.checkDuration(100) - c.mustEqual(batches, [[1, 2], [3, 4]]) - }) - }) - - it('Should batch by size', function () { - c = makeTest() + .then(function (data) { + c.mustEqual( + data.map(([t, x]) => [Math.floor(t / 50), x]), + [ + [1, 1], + [1, 2], + ], + ); + + return Promise.all([ + batcher.add(3).then((_x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 3)), + batcher.add(4).then((_x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 4)), + ]); + }) + .then(function (data) { + c.mustEqual( + data.map(([t, x]) => [Math.floor(t / 50), x]), + [ + [2, 3], + [2, 4], + ], + ); + + return c.last(); + }) + .then(function (_results) { + c.checkDuration(100); + c.mustEqual(batches, [ + [1, 2], + [3, 4], + ]); + }); + }); + + it("Should batch by size", function () { + c = makeTest(); var batcher = new Bottleneck.Batcher({ - maxSize: 2 - }) - var batches = [] + maxSize: 2, + }); + var batches = []; - batcher.on('batch', function (batcher) { - batches.push(batcher) - }) + batcher.on("batch", function (groups) { + batches.push(groups); + }); return Promise.all([ - batcher.add(1).then((x) => c.limiter.schedule(c.promise, null, 1)), - batcher.add(2).then((x) => c.limiter.schedule(c.promise, null, 2)) + batcher.add(1).then((_x) => c.limiter.schedule(c.promise, null, 1)), + batcher.add(2).then((_x) => c.limiter.schedule(c.promise, null, 2)), ]) - .then(function () { - return Promise.all([ - batcher.add(3).then((x) => c.limiter.schedule(c.promise, null, 3)), - batcher.add(4).then((x) => c.limiter.schedule(c.promise, null, 4)) - ]) - }) - .then(c.last) - .then(function (results) { - c.checkDuration(0) - c.mustEqual(batches, [[1, 2], [3, 4]]) - }) - }) - - it('Should stagger flushes', function () { - c = makeTest() - var batcher = new Bottleneck.Batcher({ + .then(function () { + return Promise.all([ + batcher.add(3).then((_x) => c.limiter.schedule(c.promise, null, 3)), + batcher.add(4).then((_x) => c.limiter.schedule(c.promise, null, 4)), + ]); + }) + .then(c.last) + .then(function (_results) { + c.checkDuration(0); + c.mustEqual(batches, [ + [1, 2], + [3, 4], + ]); + }); + }); + + it("Should stagger flushes", async function () { + c = makeTest(); + const batcher = new Bottleneck.Batcher({ maxTime: 50, - maxSize: 3 - }) - var t0 = Date.now() - var batches = [] + maxSize: 3, + }); + const t0 = Date.now(); + const batches = []; - batcher.on('batch', function (batcher) { - batches.push(batcher) - }) + batcher.on("batch", function (groups) { + batches.push(groups); + }); - return Promise.all([ - batcher.add(1).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 1)), - batcher.add(2).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 2)) - ]) - .then(function (data) { - c.mustEqual( - data.map((([t, x]) => [Math.floor(t / 50), x])), - [[1, 1], [1, 2]] - ) + const [first, second] = await Promise.all([ + batcher.add(1).then(() => c.limiter.schedule(c.promise, null, Date.now() - t0, 1)), + batcher.add(2).then(() => c.limiter.schedule(c.promise, null, Date.now() - t0, 2)), + ]); - var promises = [] - promises.push(batcher.add(3).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 3))) + c.mustGte(first[0], 50); + c.mustEqual(first[1], 1); - return c.wait(10) - .then(function () { - promises.push(batcher.add(4).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 4))) + c.mustGte(second[0], 50); + c.mustEqual(second[1], 2); - return Promise.all(promises) - }) - }) - .then(function (data) { - c.mustEqual( - data.map((([t, x]) => [Math.floor(t / 50), x])), - [[2, 3], [2, 4]] - ) - - return c.last() - }) - .then(function (results) { - c.checkDuration(120, 20) - c.mustEqual(batches, [[1, 2], [3, 4]]) - }) - }) - - it('Should force then stagger flushes', function () { - c = makeTest() + const promises = [ + batcher.add(3).then(() => c.limiter.schedule(c.promise, null, Date.now() - t0, 3)), + ]; + + await c.wait(10); + + promises.push( + batcher.add(4).then(() => c.limiter.schedule(c.promise, null, Date.now() - t0, 4)), + ); + + const [third, fourth] = await Promise.all(promises); + + c.mustGte(third[0], 100); + c.mustEqual(third[1], 3); + + c.mustGte(fourth[0], 100); + c.mustEqual(fourth[1], 4); + + await c.last(); + + c.checkDuration(120, 20); + c.mustEqual(batches, [ + [1, 2], + [3, 4], + ]); + }); + + it("Should force then stagger flushes", function () { + c = makeTest(); var batcher = new Bottleneck.Batcher({ maxTime: 50, - maxSize: 3 - }) - var t0 = Date.now() - var batches = [] - - batcher.on('batch', function (batcher) { - batches.push(batcher) - }) - - var promises = [] - promises.push(batcher.add(1).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 1))) - promises.push(batcher.add(2).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 2))) - - return c.wait(10) - .then(function () { - promises.push(batcher.add(3).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 3))) - - return Promise.all(promises) - }) - .then(function (data) { - c.mustEqual( - data.map((([t, x]) => [Math.floor(t / 50), x])), - [[0, 1], [0, 2], [0, 3]] - ) - - return Promise.all([ - batcher.add(4).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 4)), - batcher.add(5).then((x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 5)), - ]) - }) - .then(function (data) { - c.mustEqual( - data.map((([t, x]) => [Math.floor(t / 50), x])), - [[1, 4], [1, 5]] - ) - - return c.last() - }) - .then(function (results) { - c.checkDuration(85, 25) - c.mustEqual(batches, [[1, 2, 3], [4, 5]]) - }) - }) -}) + maxSize: 3, + }); + var t0 = Date.now(); + var batches = []; + + batcher.on("batch", function (groups) { + batches.push(groups); + }); + + var promises = []; + promises.push( + batcher.add(1).then((_x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 1)), + ); + promises.push( + batcher.add(2).then((_x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 2)), + ); + + return c + .wait(10) + .then(function () { + promises.push( + batcher.add(3).then((_x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 3)), + ); + + return Promise.all(promises); + }) + .then(function (data) { + c.mustEqual( + data.map(([t, x]) => [Math.floor(t / 50), x]), + [ + [0, 1], + [0, 2], + [0, 3], + ], + ); + + return Promise.all([ + batcher.add(4).then((_x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 4)), + batcher.add(5).then((_x) => c.limiter.schedule(c.promise, null, Date.now() - t0, 5)), + ]); + }) + .then(function (data) { + c.mustEqual( + data.map(([t, x]) => [Math.floor(t / 50), x]), + [ + [1, 4], + [1, 5], + ], + ); + + return c.last(); + }) + .then(function (_results) { + c.checkDuration(85, 25); + c.mustEqual(batches, [ + [1, 2, 3], + [4, 5], + ]); + }); + }); +}); diff --git a/test/bottleneck.js b/test/bottleneck.js index a3bc0c8..ae049f9 100644 --- a/test/bottleneck.js +++ b/test/bottleneck.js @@ -1,7 +1,5 @@ -if (process.env.BUILD === 'es5') { - module.exports = require('../es5.js') -} else if (process.env.BUILD === 'light') { - module.exports = require('../light.js') +if (process.env.BUILD === "light") { + module.exports = require("../light.js"); } else { - module.exports = require('../lib/index.js') + module.exports = require("../src/index.js"); } diff --git a/test/cluster.js b/test/cluster.js index b3b29b5..8a29e7a 100644 --- a/test/cluster.js +++ b/test/cluster.js @@ -1,492 +1,496 @@ -var makeTest = require('./context') -var Bottleneck = require('./bottleneck') -var Scripts = require('../lib/Scripts.js') -var assert = require('assert') -var packagejson = require('../package.json') - -if (process.env.DATASTORE === 'redis' || process.env.DATASTORE === 'ioredis') { +var makeTest = require("./context"); +var Bottleneck = require("./bottleneck"); +var Scripts = require("../src/Scripts.js"); +var assert = require("assert"); +const { describe, it, afterEach } = require("mocha"); +if (process.env.DATASTORE === "redis" || process.env.DATASTORE === "ioredis") { var limiterKeys = function (limiter) { - return Scripts.allKeys(limiter._store.originalId) - } + return Scripts.allKeys(limiter._store.originalId); + }; var countKeys = function (limiter) { - return runCommand(limiter, 'exists', limiterKeys(limiter)) - } + return runCommand(limiter, "exists", limiterKeys(limiter)); + }; var deleteKeys = function (limiter) { - return runCommand(limiter, 'del', limiterKeys(limiter)) - } + return runCommand(limiter, "del", limiterKeys(limiter)); + }; var runCommand = function (limiter, command, args) { return new Promise(function (resolve, reject) { limiter._store.clients.client[command](...args, function (err, data) { - if (err != null) return reject(err) - return resolve(data) - }) - }) - } + if (err != null) return reject(err); + return resolve(data); + }); + }); + }; - describe('Cluster-only', function () { - var c + describe("Cluster-only", function () { + var c; afterEach(function () { - return c.limiter.disconnect(false) - }) + return c.limiter.disconnect(false); + }); - it('Should return a promise for ready()', function () { - c = makeTest({ maxConcurrent: 2 }) + it("Should return a promise for ready()", function () { + c = makeTest({ maxConcurrent: 2 }); - return c.limiter.ready() - }) + return c.limiter.ready(); + }); - it('Should return clients', function () { - c = makeTest({ maxConcurrent: 2 }) + it("Should return clients", function () { + c = makeTest({ maxConcurrent: 2 }); - return c.limiter.ready() - .then(function (clients) { - c.mustEqual(Object.keys(clients), ['client', 'subscriber']) - c.mustEqual(Object.keys(c.limiter.clients()), ['client', 'subscriber']) - }) - }) + return c.limiter.ready().then(function (clients) { + c.mustEqual(Object.keys(clients), ["client", "subscriber"]); + c.mustEqual(Object.keys(c.limiter.clients()), ["client", "subscriber"]); + }); + }); - it('Should return a promise when disconnecting', function () { - c = makeTest({ maxConcurrent: 2 }) + it("Should return a promise when disconnecting", function () { + c = makeTest({ maxConcurrent: 2 }); - return c.limiter.disconnect() - .then(function () { + return c.limiter.disconnect().then(function () { // do nothing - }) - }) + }); + }); - it('Should allow passing a limiter\'s connection to a new limiter', function () { - c = makeTest() - c.limiter.connection.id = 'some-id' + it("Should allow passing a limiter's connection to a new limiter", function () { + c = makeTest(); + c.limiter.connection.id = "some-id"; var limiter = new Bottleneck({ minTime: 50, - connection: c.limiter.connection - }) + connection: c.limiter.connection, + }); return Promise.all([c.limiter.ready(), limiter.ready()]) - .then(function () { - c.mustEqual(limiter.connection.id, 'some-id') - c.mustEqual(limiter.datastore, process.env.DATASTORE) - - return Promise.all([ - c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1), - c.pNoErrVal(limiter.schedule(c.promise, null, 2), 2) - ]) - }) - .then(c.last) - .then(function (results) { - c.checkResultsOrder([[1], [2]]) - c.checkDuration(0) - }) - }) + .then(function () { + c.mustEqual(limiter.connection.id, "some-id"); + c.mustEqual(limiter.datastore, process.env.DATASTORE); - it('Should allow passing a limiter\'s connection to a new Group', function () { - c = makeTest() - c.limiter.connection.id = 'some-id' + return Promise.all([ + c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1), + c.pNoErrVal(limiter.schedule(c.promise, null, 2), 2), + ]); + }) + .then(c.last) + .then(function (_results) { + c.checkResultsOrder([[1], [2]]); + c.checkDuration(0); + }); + }); + + it("Should allow passing a limiter's connection to a new Group", function () { + c = makeTest(); + c.limiter.connection.id = "some-id"; var group = new Bottleneck.Group({ minTime: 50, - connection: c.limiter.connection - }) - var limiter1 = group.key('A') - var limiter2 = group.key('B') + connection: c.limiter.connection, + }); + var limiter1 = group.key("A"); + var limiter2 = group.key("B"); return Promise.all([c.limiter.ready(), limiter1.ready(), limiter2.ready()]) - .then(function () { - c.mustEqual(limiter1.connection.id, 'some-id') - c.mustEqual(limiter2.connection.id, 'some-id') - c.mustEqual(limiter1.datastore, process.env.DATASTORE) - c.mustEqual(limiter2.datastore, process.env.DATASTORE) - - return Promise.all([ - c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1), - c.pNoErrVal(limiter1.schedule(c.promise, null, 2), 2), - c.pNoErrVal(limiter2.schedule(c.promise, null, 3), 3) - ]) - }) - .then(c.last) - .then(function (results) { - c.checkResultsOrder([[1], [2], [3]]) - c.checkDuration(0) - }) - }) - - it('Should allow passing a Group\'s connection to a new limiter', function () { - c = makeTest() + .then(function () { + c.mustEqual(limiter1.connection.id, "some-id"); + c.mustEqual(limiter2.connection.id, "some-id"); + c.mustEqual(limiter1.datastore, process.env.DATASTORE); + c.mustEqual(limiter2.datastore, process.env.DATASTORE); + + return Promise.all([ + c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1), + c.pNoErrVal(limiter1.schedule(c.promise, null, 2), 2), + c.pNoErrVal(limiter2.schedule(c.promise, null, 3), 3), + ]); + }) + .then(c.last) + .then(function (_results) { + c.checkResultsOrder([[1], [2], [3]]); + c.checkDuration(0); + }); + }); + + it("Should allow passing a Group's connection to a new limiter", function () { + c = makeTest(); var group = new Bottleneck.Group({ minTime: 50, datastore: process.env.DATASTORE, - clearDatastore: true - }) - group.connection.id = 'some-id' + clearDatastore: true, + }); + group.connection.id = "some-id"; - var limiter1 = group.key('A') + var limiter1 = group.key("A"); var limiter2 = new Bottleneck({ minTime: 50, - connection: group.connection - }) + connection: group.connection, + }); return Promise.all([limiter1.ready(), limiter2.ready()]) - .then(function () { - c.mustEqual(limiter1.connection.id, 'some-id') - c.mustEqual(limiter2.connection.id, 'some-id') - c.mustEqual(limiter1.datastore, process.env.DATASTORE) - c.mustEqual(limiter2.datastore, process.env.DATASTORE) - - return Promise.all([ - c.pNoErrVal(limiter1.schedule(c.promise, null, 1), 1), - c.pNoErrVal(limiter2.schedule(c.promise, null, 2), 2) - ]) - }) - .then(c.last) - .then(function (results) { - c.checkResultsOrder([[1], [2]]) - c.checkDuration(0) - return group.disconnect() - }) - }) - - it('Should allow passing a Group\'s connection to a new Group', function () { - c = makeTest() + .then(function () { + c.mustEqual(limiter1.connection.id, "some-id"); + c.mustEqual(limiter2.connection.id, "some-id"); + c.mustEqual(limiter1.datastore, process.env.DATASTORE); + c.mustEqual(limiter2.datastore, process.env.DATASTORE); + + return Promise.all([ + c.pNoErrVal(limiter1.schedule(c.promise, null, 1), 1), + c.pNoErrVal(limiter2.schedule(c.promise, null, 2), 2), + ]); + }) + .then(c.last) + .then(function (_results) { + c.checkResultsOrder([[1], [2]]); + c.checkDuration(0); + return group.disconnect(); + }); + }); + + it("Should allow passing a Group's connection to a new Group", function () { + c = makeTest(); var group1 = new Bottleneck.Group({ minTime: 50, datastore: process.env.DATASTORE, - clearDatastore: true - }) - group1.connection.id = 'some-id' + clearDatastore: true, + }); + group1.connection.id = "some-id"; var group2 = new Bottleneck.Group({ minTime: 50, connection: group1.connection, - clearDatastore: true - }) - - var limiter1 = group1.key('AAA') - var limiter2 = group1.key('BBB') - var limiter3 = group1.key('CCC') - var limiter4 = group1.key('DDD') - - return Promise.all([ - limiter1.ready(), - limiter2.ready(), - limiter3.ready(), - limiter4.ready() - ]) - .then(function () { - c.mustEqual(group1.connection.id, 'some-id') - c.mustEqual(group2.connection.id, 'some-id') - c.mustEqual(limiter1.connection.id, 'some-id') - c.mustEqual(limiter2.connection.id, 'some-id') - c.mustEqual(limiter3.connection.id, 'some-id') - c.mustEqual(limiter4.connection.id, 'some-id') - c.mustEqual(limiter1.datastore, process.env.DATASTORE) - c.mustEqual(limiter2.datastore, process.env.DATASTORE) - c.mustEqual(limiter3.datastore, process.env.DATASTORE) - c.mustEqual(limiter4.datastore, process.env.DATASTORE) - - return Promise.all([ - c.pNoErrVal(limiter1.schedule(c.promise, null, 1), 1), - c.pNoErrVal(limiter2.schedule(c.promise, null, 2), 2), - c.pNoErrVal(limiter3.schedule(c.promise, null, 3), 3), - c.pNoErrVal(limiter4.schedule(c.promise, null, 4), 4) - ]) - }) - .then(c.last) - .then(function (results) { - c.checkResultsOrder([[1], [2], [3], [4]]) - c.checkDuration(0) - return group1.disconnect() - }) - }) + clearDatastore: true, + }); - it('Should not have a key TTL by default for standalone limiters', function () { - c = makeTest() + var limiter1 = group1.key("AAA"); + var limiter2 = group1.key("BBB"); + var limiter3 = group1.key("CCC"); + var limiter4 = group1.key("DDD"); - return c.limiter.ready() - .then(function () { - var settings_key = limiterKeys(c.limiter)[0] - return runCommand(c.limiter, 'ttl', [settings_key]) - }) - .then(function (ttl) { - assert(ttl < 0) - }) - }) + return Promise.all([limiter1.ready(), limiter2.ready(), limiter3.ready(), limiter4.ready()]) + .then(function () { + c.mustEqual(group1.connection.id, "some-id"); + c.mustEqual(group2.connection.id, "some-id"); + c.mustEqual(limiter1.connection.id, "some-id"); + c.mustEqual(limiter2.connection.id, "some-id"); + c.mustEqual(limiter3.connection.id, "some-id"); + c.mustEqual(limiter4.connection.id, "some-id"); + c.mustEqual(limiter1.datastore, process.env.DATASTORE); + c.mustEqual(limiter2.datastore, process.env.DATASTORE); + c.mustEqual(limiter3.datastore, process.env.DATASTORE); + c.mustEqual(limiter4.datastore, process.env.DATASTORE); + + return Promise.all([ + c.pNoErrVal(limiter1.schedule(c.promise, null, 1), 1), + c.pNoErrVal(limiter2.schedule(c.promise, null, 2), 2), + c.pNoErrVal(limiter3.schedule(c.promise, null, 3), 3), + c.pNoErrVal(limiter4.schedule(c.promise, null, 4), 4), + ]); + }) + .then(c.last) + .then(function (_results) { + c.checkResultsOrder([[1], [2], [3], [4]]); + c.checkDuration(0); + return group1.disconnect(); + }); + }); + + it("Should not have a key TTL by default for standalone limiters", function () { + c = makeTest(); + + return c.limiter + .ready() + .then(function () { + var settings_key = limiterKeys(c.limiter)[0]; + return runCommand(c.limiter, "ttl", [settings_key]); + }) + .then(function (ttl) { + c.mustLt(ttl, 0); + }); + }); - it('Should allow timeout setting for standalone limiters', function () { - c = makeTest({ timeout: 5 * 60 * 1000 }) + it("Should allow timeout setting for standalone limiters", function () { + c = makeTest({ timeout: 5 * 60 * 1000 }); - return c.limiter.ready() - .then(function () { - var settings_key = limiterKeys(c.limiter)[0] - return runCommand(c.limiter, 'ttl', [settings_key]) - }) - .then(function (ttl) { - assert(ttl >= 290 && ttl <= 305) - }) - }) + return c.limiter + .ready() + .then(function () { + var settings_key = limiterKeys(c.limiter)[0]; + return runCommand(c.limiter, "ttl", [settings_key]); + }) + .then(function (ttl) { + c.mustGte(ttl, 290); + c.mustLte(ttl, 305); + }); + }); - it('Should set TTL on all keys including client_* keys after register_client', async function () { - c = makeTest({ timeout: 5 * 60 * 1000 }) + it("Should set TTL on all keys including client_* keys after register_client", async function () { + c = makeTest({ timeout: 5 * 60 * 1000 }); - await c.limiter.ready() + await c.limiter.ready(); // Get all 8 keys for this limiter - var keys = limiterKeys(c.limiter) + var keys = limiterKeys(c.limiter); // Identify the client_* keys - var clientKeys = keys.filter(k => k.includes('_client_')) - + var clientKeys = keys.filter((k) => k.includes("_client_")); + // First verify that client_* keys actually exist (were created by register_client) for (var i = 0; i < clientKeys.length; i++) { - var key = clientKeys[i] - var exists = await runCommand(c.limiter, 'exists', [key]) - assert(exists === 1, `Expected ${key} to exist after register_client, but it doesn't`) + var key = clientKeys[i]; + var exists = await runCommand(c.limiter, "exists", [key]); + assert(exists === 1, `Expected ${key} to exist after register_client, but it doesn't`); } - + // Now verify that all keys have TTL set - for (var i = 0; i < keys.length; i++) { - var key = keys[i] - var ttl = await runCommand(c.limiter, 'ttl', [key]) - + for (i = 0; i < keys.length; i++) { + key = keys[i]; + var ttl = await runCommand(c.limiter, "ttl", [key]); + if (ttl == -2) continue; // key doesn't exist // TTL should be around 300 seconds (5 minutes) // -1 means no TTL (the bug we're fixing), -2 means key doesn't exist - assert(ttl >= 290 && ttl <= 305, + assert( + ttl >= 290 && ttl <= 305, `Expected TTL between 290-305 for ${key}, got ${ttl}. ` + - `(-1 means no TTL set, -2 means key doesn't exist)`) + `(-1 means no TTL set, -2 means key doesn't exist)`, + ); } - }) + }); - it('Should compute reservoir increased based on number of missed intervals', async function () { + it("Should compute reservoir increased based on number of missed intervals", async function () { const settings = { - id: 'missed-intervals', + id: "missed-intervals", clearDatastore: false, reservoir: 2, reservoirIncreaseInterval: 100, reservoirIncreaseAmount: 2, - timeout: 2000 - } - c = makeTest({ ...settings }) - await c.limiter.ready() + timeout: 2000, + }; + c = makeTest({ ...settings }); + await c.limiter.ready(); - c.mustEqual(await c.limiter.currentReservoir(), 2) + c.mustEqual(await c.limiter.currentReservoir(), 2); - const settings_key = limiterKeys(c.limiter)[0] - await runCommand(c.limiter, 'hincrby', [settings_key, 'lastReservoirIncrease', -3000]) + const settings_key = limiterKeys(c.limiter)[0]; + await runCommand(c.limiter, "hincrby", [settings_key, "lastReservoirIncrease", -3000]); - const limiter2 = new Bottleneck({ ...settings, datastore: process.env.DATASTORE }) - await limiter2.ready() + const limiter2 = new Bottleneck({ ...settings, datastore: process.env.DATASTORE }); + await limiter2.ready(); - c.mustEqual(await c.limiter.currentReservoir(), 62) // 2 + ((3000 / 100) * 2) === 62 + c.mustEqual(await c.limiter.currentReservoir(), 62); // 2 + ((3000 / 100) * 2) === 62 - await limiter2.disconnect() - }) + await limiter2.disconnect(); + }); - it('Should migrate from 2.8.0', function () { - c = makeTest({ id: 'migrate' }) - var settings_key = limiterKeys(c.limiter)[0] - var limiter2 + it("Should migrate from 2.8.0", function () { + c = makeTest({ id: "migrate" }); + var settings_key = limiterKeys(c.limiter)[0]; + var limiter2; - return c.limiter.ready() - .then(function () { - var settings_key = limiterKeys(c.limiter)[0] - return Promise.all([ - runCommand(c.limiter, 'hset', [settings_key, 'version', '2.8.0']), - runCommand(c.limiter, 'hdel', [settings_key, 'done', 'capacityPriorityCounter', 'clientTimeout']), - runCommand(c.limiter, 'hset', [settings_key, 'lastReservoirRefresh', '']) - ]) - }) - .then(function () { - limiter2 = new Bottleneck({ - id: 'migrate', - datastore: process.env.DATASTORE + return c.limiter + .ready() + .then(function () { + var settings_key = limiterKeys(c.limiter)[0]; + return Promise.all([ + runCommand(c.limiter, "hset", [settings_key, "version", "2.8.0"]), + runCommand(c.limiter, "hdel", [ + settings_key, + "done", + "capacityPriorityCounter", + "clientTimeout", + ]), + runCommand(c.limiter, "hset", [settings_key, "lastReservoirRefresh", ""]), + ]); }) - return limiter2.ready() - }) - .then(function () { - return runCommand(c.limiter, 'hmget', [ - settings_key, - 'version', - 'done', - 'reservoirRefreshInterval', - 'reservoirRefreshAmount', - 'capacityPriorityCounter', - 'clientTimeout', - 'reservoirIncreaseAmount', - 'reservoirIncreaseMaximum', - // Add new values here, before these 2 timestamps - 'lastReservoirRefresh', - 'lastReservoirIncrease' - ]) - }) - .then(function (values) { - var timestamps = values.slice(-2) - timestamps.forEach((t) => assert(parseInt(t) > Date.now() - 500)) - c.mustEqual(values.slice(0, -timestamps.length), [ - '2.18.0', - '0', - '', - '', - '0', - '10000', - '', - '' - ]) - }) - .then(function () { - return limiter2.disconnect(false) - }) - }) + .then(function () { + limiter2 = new Bottleneck({ + id: "migrate", + datastore: process.env.DATASTORE, + }); + return limiter2.ready(); + }) + .then(function () { + return runCommand(c.limiter, "hmget", [ + settings_key, + "version", + "done", + "reservoirRefreshInterval", + "reservoirRefreshAmount", + "capacityPriorityCounter", + "clientTimeout", + "reservoirIncreaseAmount", + "reservoirIncreaseMaximum", + // Add new values here, before these 2 timestamps + "lastReservoirRefresh", + "lastReservoirIncrease", + ]); + }) + .then(function (values) { + var timestamps = values.slice(-2); + timestamps.forEach((t) => c.mustGt(parseInt(t), Date.now() - 500)); + c.mustEqual(values.slice(0, -timestamps.length), [ + "2.18.0", + "0", + "", + "", + "0", + "10000", + "", + "", + ]); + }) + .then(function () { + return limiter2.disconnect(false); + }); + }); - it('Should keep track of each client\'s queue length', async function () { + it("Should keep track of each client's queue length", async function () { c = makeTest({ - id: 'queues', + id: "queues", maxConcurrent: 1, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var limiter2 = new Bottleneck({ datastore: process.env.DATASTORE, - id: 'queues', + id: "queues", maxConcurrent: 1, - trackDoneStatus: true - }) - var client_num_queued_key = limiterKeys(c.limiter)[5] - var clientId1 = c.limiter._store.clientId - var clientId2 = limiter2._store.clientId + trackDoneStatus: true, + }); + var client_num_queued_key = limiterKeys(c.limiter)[5]; + var clientId1 = c.limiter._store.clientId; + var clientId2 = limiter2._store.clientId; - await c.limiter.ready() - await limiter2.ready() + await c.limiter.ready(); + await limiter2.ready(); - var p0 = c.limiter.schedule({id: 0}, c.slowPromise, 100, null, 0) - await c.limiter._submitLock.schedule(() => Promise.resolve()) + var p0 = c.limiter.schedule({ id: 0 }, c.slowPromise, 100, null, 0); + await c.limiter._submitLock.schedule(() => Promise.resolve()); - var p1 = c.limiter.schedule({id: 1}, c.promise, null, 1) - var p2 = c.limiter.schedule({id: 2}, c.promise, null, 2) - var p3 = limiter2.schedule({id: 3}, c.promise, null, 3) + var p1 = c.limiter.schedule({ id: 1 }, c.promise, null, 1); + var p2 = c.limiter.schedule({ id: 2 }, c.promise, null, 2); + var p3 = limiter2.schedule({ id: 3 }, c.promise, null, 3); await Promise.all([ c.limiter._submitLock.schedule(() => Promise.resolve()), - limiter2._submitLock.schedule(() => Promise.resolve()) - ]) + limiter2._submitLock.schedule(() => Promise.resolve()), + ]); - var queuedA = await runCommand(c.limiter, 'hgetall', [client_num_queued_key]) - c.mustEqual(c.limiter.counts().QUEUED, 2) - c.mustEqual(limiter2.counts().QUEUED, 1) - c.mustEqual(~~queuedA[clientId1], 2) - c.mustEqual(~~queuedA[clientId2], 1) - - c.mustEqual(await c.limiter.clusterQueued(), 3) - - await Promise.all([p0, p1, p2, p3]) - var queuedB = await runCommand(c.limiter, 'hgetall', [client_num_queued_key]) - c.mustEqual(c.limiter.counts().QUEUED, 0) - c.mustEqual(limiter2.counts().QUEUED, 0) - c.mustEqual(~~queuedB[clientId1], 0) - c.mustEqual(~~queuedB[clientId2], 0) - c.mustEqual(c.limiter.counts().DONE, 3) - c.mustEqual(limiter2.counts().DONE, 1) - - c.mustEqual(await c.limiter.clusterQueued(), 0) - - return limiter2.disconnect(false) - }) - - it('Should publish capacity increases', function () { - c = makeTest({ maxConcurrent: 2 }) - var limiter2 - var p3, p4 - - return c.limiter.ready() - .then(function () { - limiter2 = new Bottleneck({ datastore: process.env.DATASTORE }) - return limiter2.ready() - }) - .then(function () { - var p1 = c.limiter.schedule({id: 1}, c.slowPromise, 100, null, 1) - var p2 = c.limiter.schedule({id: 2}, c.slowPromise, 100, null, 2) + var queuedA = await runCommand(c.limiter, "hgetall", [client_num_queued_key]); + c.mustEqual(c.limiter.counts().QUEUED, 2); + c.mustEqual(limiter2.counts().QUEUED, 1); + c.mustEqual(~~queuedA[clientId1], 2); + c.mustEqual(~~queuedA[clientId2], 1); - return c.limiter.schedule({id: 0, weight: 0}, c.promise, null, 0) - }) - .then(function () { - return limiter2.schedule({id: 3}, c.slowPromise, 100, null, 3) - }) - .then(c.last) - .then(function (results) { - c.checkResultsOrder([[0], [1], [2], [3]]) - c.checkDuration(200) + c.mustEqual(await c.limiter.clusterQueued(), 3); - return limiter2.disconnect(false) - }) - }) + await Promise.all([p0, p1, p2, p3]); + var queuedB = await runCommand(c.limiter, "hgetall", [client_num_queued_key]); + c.mustEqual(c.limiter.counts().QUEUED, 0); + c.mustEqual(limiter2.counts().QUEUED, 0); + c.mustEqual(~~queuedB[clientId1], 0); + c.mustEqual(~~queuedB[clientId2], 0); + c.mustEqual(c.limiter.counts().DONE, 3); + c.mustEqual(limiter2.counts().DONE, 1); + + c.mustEqual(await c.limiter.clusterQueued(), 0); + + return limiter2.disconnect(false); + }); + + it("Should publish capacity increases", function () { + c = makeTest({ maxConcurrent: 2 }); + var limiter2; - it('Should publish capacity changes on reservoir changes', function () { + return c.limiter + .ready() + .then(function () { + limiter2 = new Bottleneck({ datastore: process.env.DATASTORE }); + return limiter2.ready(); + }) + .then(function () { + c.limiter.schedule({ id: 1 }, c.slowPromise, 100, null, 1); + c.limiter.schedule({ id: 2 }, c.slowPromise, 100, null, 2); + + return c.limiter.schedule({ id: 0, weight: 0 }, c.promise, null, 0); + }) + .then(function () { + return limiter2.schedule({ id: 3 }, c.slowPromise, 100, null, 3); + }) + .then(c.last) + .then(function (_results) { + c.checkResultsOrder([[0], [1], [2], [3]]); + c.checkDuration(200); + + return limiter2.disconnect(false); + }); + }); + + it("Should publish capacity changes on reservoir changes", function () { c = makeTest({ maxConcurrent: 2, - reservoir: 2 - }) - var limiter2 - var p3, p4 + reservoir: 2, + }); + var limiter2; + var p3; - return c.limiter.ready() - .then(function () { - limiter2 = new Bottleneck({ - datastore: process.env.DATASTORE, + return c.limiter + .ready() + .then(function () { + limiter2 = new Bottleneck({ + datastore: process.env.DATASTORE, + }); + return limiter2.ready(); }) - return limiter2.ready() - }) - .then(function () { - var p1 = c.limiter.schedule({id: 1}, c.slowPromise, 100, null, 1) - var p2 = c.limiter.schedule({id: 2}, c.slowPromise, 100, null, 2) + .then(function () { + c.limiter.schedule({ id: 1 }, c.slowPromise, 100, null, 1); + c.limiter.schedule({ id: 2 }, c.slowPromise, 100, null, 2); - return c.limiter.schedule({id: 0, weight: 0}, c.promise, null, 0) - }) - .then(function () { - p3 = limiter2.schedule({id: 3, weight: 2}, c.slowPromise, 100, null, 3) - return c.limiter.currentReservoir() - }) - .then(function (reservoir) { - c.mustEqual(reservoir, 0) - return c.limiter.updateSettings({ reservoir: 1 }) - }) - .then(function () { - return c.limiter.incrementReservoir(1) - }) - .then(function (reservoir) { - c.mustEqual(reservoir, 2) - return p3 - }) - .then(function (result) { - c.mustEqual(result, [3]) - return c.limiter.currentReservoir() - }) - .then(function (reservoir) { - c.mustEqual(reservoir, 0) - return c.last({ weight: 0 }) - }) - .then(function (results) { - c.checkResultsOrder([[0], [1], [2], [3]]) - c.checkDuration(210) - }) - .then(function (data) { - return limiter2.disconnect(false) - }) - }) + return c.limiter.schedule({ id: 0, weight: 0 }, c.promise, null, 0); + }) + .then(function () { + p3 = limiter2.schedule({ id: 3, weight: 2 }, c.slowPromise, 100, null, 3); + return c.limiter.currentReservoir(); + }) + .then(function (reservoir) { + c.mustEqual(reservoir, 0); + return c.limiter.updateSettings({ reservoir: 1 }); + }) + .then(function () { + return c.limiter.incrementReservoir(1); + }) + .then(function (reservoir) { + c.mustEqual(reservoir, 2); + return p3; + }) + .then(function (result) { + c.mustEqual(result, [3]); + return c.limiter.currentReservoir(); + }) + .then(function (reservoir) { + c.mustEqual(reservoir, 0); + return c.last({ weight: 0 }); + }) + .then(function (_results) { + c.checkResultsOrder([[0], [1], [2], [3]]); + c.checkDuration(210); + }) + .then(function (_data) { + return limiter2.disconnect(false); + }); + }); - it('Should remove track job data and remove lost jobs', function () { + it("Should remove track job data and remove lost jobs", function () { c = makeTest({ - id: 'lost', - errorEventsExpected: true - }) - var clientId = c.limiter._store.clientId - var limiter1 = new Bottleneck({ datastore: process.env.DATASTORE }) + id: "lost", + errorEventsExpected: true, + }); + var clientId = c.limiter._store.clientId; + var limiter1 = new Bottleneck({ datastore: process.env.DATASTORE }); var limiter2 = new Bottleneck({ - id: 'lost', - datastore: process.env.DATASTORE, - heartbeatInterval: 150 - }) + id: "lost", + datastore: process.env.DATASTORE, + heartbeatInterval: 150, + }); var getData = function (limiter) { - c.mustEqual(limiterKeys(limiter).length, 8) // Asserting, to remember to edit this test when keys change + c.mustEqual(limiterKeys(limiter).length, 8); // Asserting, to remember to edit this test when keys change var [ settings_key, job_weights_key, @@ -495,57 +499,63 @@ if (process.env.DATASTORE === 'redis' || process.env.DATASTORE === 'ioredis') { client_running_key, client_num_queued_key, client_last_registered_key, - client_last_seen_key - ] = limiterKeys(limiter) + client_last_seen_key, + ] = limiterKeys(limiter); return Promise.all([ - runCommand(limiter1, 'hmget', [settings_key, 'running', 'done']), - runCommand(limiter1, 'hgetall', [job_weights_key]), - runCommand(limiter1, 'zcard', [job_expirations_key]), - runCommand(limiter1, 'hvals', [job_clients_key]), - runCommand(limiter1, 'zrange', [client_running_key, '0', '-1', 'withscores']), - runCommand(limiter1, 'hvals', [client_num_queued_key]), - runCommand(limiter1, 'zrange', [client_last_registered_key, '0', '-1', 'withscores']), - runCommand(limiter1, 'zrange', [client_last_seen_key, '0', '-1', 'withscores']) - ]) - } + runCommand(limiter1, "hmget", [settings_key, "running", "done"]), + runCommand(limiter1, "hgetall", [job_weights_key]), + runCommand(limiter1, "zcard", [job_expirations_key]), + runCommand(limiter1, "hvals", [job_clients_key]), + runCommand(limiter1, "zrange", [client_running_key, "0", "-1", "withscores"]), + runCommand(limiter1, "hvals", [client_num_queued_key]), + runCommand(limiter1, "zrange", [client_last_registered_key, "0", "-1", "withscores"]), + runCommand(limiter1, "zrange", [client_last_seen_key, "0", "-1", "withscores"]), + ]); + }; var sumWeights = function (weights) { return Object.keys(weights).reduce((acc, x) => { - return acc + ~~weights[x] - }, 0) - } - var numExpirations = 0 + return acc + ~~weights[x]; + }, 0); + }; + var numExpirations = 0; var errorHandler = function (err) { - if (err.message.indexOf('This job timed out') === 0) { - numExpirations++ + if (err.message.indexOf("This job timed out") === 0) { + numExpirations++; } - } + }; return Promise.all([c.limiter.ready(), limiter1.ready(), limiter2.ready()]) - .then(function () { - // No expiration, it should not be removed - c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.slowPromise, 150, null, 1), 1), - - // Expiration present, these jobs should be removed automatically - c.limiter.schedule({ expiration: 50, weight: 2 }, c.slowPromise, 75, null, 2).catch(errorHandler) - c.limiter.schedule({ expiration: 50, weight: 3 }, c.slowPromise, 75, null, 3).catch(errorHandler) - c.limiter.schedule({ expiration: 50, weight: 4 }, c.slowPromise, 75, null, 4).catch(errorHandler) - c.limiter.schedule({ expiration: 50, weight: 5 }, c.slowPromise, 75, null, 5).catch(errorHandler) - - return c.limiter._submitLock.schedule(() => Promise.resolve(true)) - }) - .then(function () { - return c.limiter._drainAll() - }) - .then(function () { - return c.limiter.disconnect(false) - }) - .then(function () { - }) - .then(function () { - return getData(c.limiter) - }) - .then(function ([ + .then(function () { + // No expiration, it should not be removed + (c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.slowPromise, 150, null, 1), 1), + // Expiration present, these jobs should be removed automatically + c.limiter + .schedule({ expiration: 50, weight: 2 }, c.slowPromise, 75, null, 2) + .catch(errorHandler)); + c.limiter + .schedule({ expiration: 50, weight: 3 }, c.slowPromise, 75, null, 3) + .catch(errorHandler); + c.limiter + .schedule({ expiration: 50, weight: 4 }, c.slowPromise, 75, null, 4) + .catch(errorHandler); + c.limiter + .schedule({ expiration: 50, weight: 5 }, c.slowPromise, 75, null, 5) + .catch(errorHandler); + + return c.limiter._submitLock.schedule(() => Promise.resolve(true)); + }) + .then(function () { + return c.limiter._drainAll(); + }) + .then(function () { + return c.limiter.disconnect(false); + }) + .then(function () {}) + .then(function () { + return getData(c.limiter); + }) + .then(function ([ settings, job_weights, job_expirations, @@ -553,270 +563,276 @@ if (process.env.DATASTORE === 'redis' || process.env.DATASTORE === 'ioredis') { client_running, client_num_queued, client_last_registered, - client_last_seen - ]) { - c.mustEqual(settings, ['15', '0']) - c.mustEqual(sumWeights(job_weights), 15) - c.mustEqual(job_expirations, 4) - c.mustEqual(job_clients.length, 5) - job_clients.forEach((id) => c.mustEqual(id, clientId)) - c.mustEqual(sumWeights(client_running), 15) - c.mustEqual(client_num_queued, ['0', '0']) - c.mustEqual(client_last_registered[1], '0') - assert(client_last_seen[1] > Date.now() - 1000) - var passed = Date.now() - parseFloat(client_last_registered[3]) - assert(passed > 0 && passed < 20) - - return c.wait(170) - }) - .then(function () { - return getData(c.limiter) - }) - .then(function ([ - settings, - job_weights, - job_expirations, - job_clients, - client_running, - client_num_queued, - client_last_registered, - client_last_seen - ]) { - c.mustEqual(settings, ['1', '14']) - c.mustEqual(sumWeights(job_weights), 1) - c.mustEqual(job_expirations, 0) - c.mustEqual(job_clients.length, 1) - job_clients.forEach((id) => c.mustEqual(id, clientId)) - c.mustEqual(sumWeights(client_running), 1) - c.mustEqual(client_num_queued, ['0', '0']) - c.mustEqual(client_last_registered[1], '0') - assert(client_last_seen[1] > Date.now() - 1000) - var passed = Date.now() - parseFloat(client_last_registered[3]) - assert(passed > 170 && passed < 200) - - c.mustEqual(numExpirations, 4) - }) - .then(function () { - return Promise.all([ - limiter1.disconnect(false), - limiter2.disconnect(false) - ]) - }) - }) + client_last_seen, + ]) { + c.mustEqual(settings, ["15", "0"]); + c.mustEqual(sumWeights(job_weights), 15); + c.mustEqual(job_expirations, 4); + c.mustEqual(job_clients.length, 5); + job_clients.forEach((id) => c.mustEqual(id, clientId)); + c.mustEqual(sumWeights(client_running), 15); + c.mustEqual(client_num_queued, ["0", "0"]); + c.mustEqual(client_last_registered[1], "0"); + c.mustGt(client_last_seen[1], Date.now() - 1000); + var passed = Date.now() - parseFloat(client_last_registered[3]); + c.mustGt(passed, 0); + c.mustLt(passed, 20); + + return c.wait(170); + }) + .then(function () { + return getData(c.limiter); + }) + .then(function ([ + settings, + job_weights, + job_expirations, + job_clients, + client_running, + client_num_queued, + client_last_registered, + client_last_seen, + ]) { + c.mustEqual(settings, ["1", "14"]); + c.mustEqual(sumWeights(job_weights), 1); + c.mustEqual(job_expirations, 0); + c.mustEqual(job_clients.length, 1); + job_clients.forEach((id) => c.mustEqual(id, clientId)); + c.mustEqual(sumWeights(client_running), 1); + c.mustEqual(client_num_queued, ["0", "0"]); + c.mustEqual(client_last_registered[1], "0"); + c.mustGt(client_last_seen[1], Date.now() - 1000); + var passed = Date.now() - parseFloat(client_last_registered[3]); + c.mustGt(passed, 170); + c.mustLt(passed, 200); + + c.mustEqual(numExpirations, 4); + }) + .then(function () { + return Promise.all([limiter1.disconnect(false), limiter2.disconnect(false)]); + }); + }); - it('Should clear unresponsive clients', async function () { + it("Should clear unresponsive clients", async function () { c = makeTest({ - id: 'unresponsive', + id: "unresponsive", maxConcurrent: 1, timeout: 1000, clientTimeout: 100, - heartbeat: 50 - }) + heartbeat: 50, + }); const limiter2 = new Bottleneck({ - id: 'unresponsive', - datastore: process.env.DATASTORE - }) - - await Promise.all([c.limiter.running(), limiter2.running()]) - - const client_running_key = limiterKeys(limiter2)[4] - const client_num_queued_key = limiterKeys(limiter2)[5] - const client_last_registered_key = limiterKeys(limiter2)[6] - const client_last_seen_key = limiterKeys(limiter2)[7] - const numClients = () => Promise.all([ - runCommand(c.limiter, 'zcard', [client_running_key]), - runCommand(c.limiter, 'hlen', [client_num_queued_key]), - runCommand(c.limiter, 'zcard', [client_last_registered_key]), - runCommand(c.limiter, 'zcard', [client_last_seen_key]) - ]) + id: "unresponsive", + datastore: process.env.DATASTORE, + }); - c.mustEqual(await numClients(), [2, 2, 2, 2]) + await Promise.all([c.limiter.running(), limiter2.running()]); - await limiter2.disconnect(false) - await c.wait(150) + const client_running_key = limiterKeys(limiter2)[4]; + const client_num_queued_key = limiterKeys(limiter2)[5]; + const client_last_registered_key = limiterKeys(limiter2)[6]; + const client_last_seen_key = limiterKeys(limiter2)[7]; + const numClients = () => + Promise.all([ + runCommand(c.limiter, "zcard", [client_running_key]), + runCommand(c.limiter, "hlen", [client_num_queued_key]), + runCommand(c.limiter, "zcard", [client_last_registered_key]), + runCommand(c.limiter, "zcard", [client_last_seen_key]), + ]); - await c.limiter.running() + c.mustEqual(await numClients(), [2, 2, 2, 2]); - c.mustEqual(await numClients(), [1, 1, 1, 1]) + await limiter2.disconnect(false); + await c.wait(150); - }) + await c.limiter.running(); + c.mustEqual(await numClients(), [1, 1, 1, 1]); + }); - it('Should not clear unresponsive clients with unexpired running jobs', async function () { + it("Should not clear unresponsive clients with unexpired running jobs", async function () { c = makeTest({ - id: 'unresponsive-unexpired', + id: "unresponsive-unexpired", maxConcurrent: 1, timeout: 1000, clientTimeout: 200, - heartbeat: 2000 - }) + heartbeat: 2000, + }); const limiter2 = new Bottleneck({ - id: 'unresponsive-unexpired', - datastore: process.env.DATASTORE - }) + id: "unresponsive-unexpired", + datastore: process.env.DATASTORE, + }); - await c.limiter.ready() - await limiter2.ready() - - const client_running_key = limiterKeys(limiter2)[4] - const client_num_queued_key = limiterKeys(limiter2)[5] - const client_last_registered_key = limiterKeys(limiter2)[6] - const client_last_seen_key = limiterKeys(limiter2)[7] - const numClients = () => Promise.all([ - runCommand(limiter2, 'zcard', [client_running_key]), - runCommand(limiter2, 'hlen', [client_num_queued_key]), - runCommand(limiter2, 'zcard', [client_last_registered_key]), - runCommand(limiter2, 'zcard', [client_last_seen_key]) - ]) + await c.limiter.ready(); + await limiter2.ready(); + + const client_running_key = limiterKeys(limiter2)[4]; + const client_num_queued_key = limiterKeys(limiter2)[5]; + const client_last_registered_key = limiterKeys(limiter2)[6]; + const client_last_seen_key = limiterKeys(limiter2)[7]; + const numClients = () => + Promise.all([ + runCommand(limiter2, "zcard", [client_running_key]), + runCommand(limiter2, "hlen", [client_num_queued_key]), + runCommand(limiter2, "zcard", [client_last_registered_key]), + runCommand(limiter2, "zcard", [client_last_seen_key]), + ]); - const job = c.limiter.schedule(c.slowPromise, 500, null, 1) + const job = c.limiter.schedule(c.slowPromise, 500, null, 1); - await c.wait(300) + await c.wait(300); // running() triggers process_tick and that will attempt to remove client 1 // but it shouldn't do it because it has a running job - c.mustEqual(await limiter2.running(), 1) + c.mustEqual(await limiter2.running(), 1); - c.mustEqual(await numClients(), [2, 2, 2, 2]) + c.mustEqual(await numClients(), [2, 2, 2, 2]); - await job + await job; - c.mustEqual(await limiter2.running(), 0) + c.mustEqual(await limiter2.running(), 0); - await limiter2.disconnect(false) - }) + await limiter2.disconnect(false); + }); - it('Should clear unresponsive clients after last jobs are expired', async function () { + it("Should clear unresponsive clients after last jobs are expired", async function () { c = makeTest({ - id: 'unresponsive-expired', + id: "unresponsive-expired", maxConcurrent: 1, timeout: 1000, clientTimeout: 200, - heartbeat: 2000 - }) + heartbeat: 2000, + }); const limiter2 = new Bottleneck({ - id: 'unresponsive-expired', - datastore: process.env.DATASTORE - }) - - await c.limiter.ready() - await limiter2.ready() - - const client_running_key = limiterKeys(limiter2)[4] - const client_num_queued_key = limiterKeys(limiter2)[5] - const client_last_registered_key = limiterKeys(limiter2)[6] - const client_last_seen_key = limiterKeys(limiter2)[7] - const numClients = () => Promise.all([ - runCommand(limiter2, 'zcard', [client_running_key]), - runCommand(limiter2, 'hlen', [client_num_queued_key]), - runCommand(limiter2, 'zcard', [client_last_registered_key]), - runCommand(limiter2, 'zcard', [client_last_seen_key]) - ]) - - const job = c.limiter.schedule({ expiration: 250 }, c.slowPromise, 300, null, 1) - await c.wait(100) // wait for it to register - - c.mustEqual(await c.limiter.running(), 1) - c.mustEqual(await numClients(), [2,2,2,2]) - - let dropped = false + id: "unresponsive-expired", + datastore: process.env.DATASTORE, + }); + + await c.limiter.ready(); + await limiter2.ready(); + + const client_running_key = limiterKeys(limiter2)[4]; + const client_num_queued_key = limiterKeys(limiter2)[5]; + const client_last_registered_key = limiterKeys(limiter2)[6]; + const client_last_seen_key = limiterKeys(limiter2)[7]; + const numClients = () => + Promise.all([ + runCommand(limiter2, "zcard", [client_running_key]), + runCommand(limiter2, "hlen", [client_num_queued_key]), + runCommand(limiter2, "zcard", [client_last_registered_key]), + runCommand(limiter2, "zcard", [client_last_seen_key]), + ]); + + const job = c.limiter.schedule({ expiration: 250 }, c.slowPromise, 300, null, 1); + await c.wait(100); // wait for it to register + + c.mustEqual(await c.limiter.running(), 1); + c.mustEqual(await numClients(), [2, 2, 2, 2]); + + let dropped = false; try { - await job + await job; } catch (e) { - if (e.message === 'This job timed out after 250 ms.') { - dropped = true + if (e.message === "This job timed out after 250 ms.") { + dropped = true; } else { - throw e + throw e; } } - assert(dropped) + assert(dropped, "Expected dropped to be true"); - await c.wait(200) + await c.wait(200); - c.mustEqual(await limiter2.running(), 0) - c.mustEqual(await numClients(), [1,1,1,1]) + c.mustEqual(await limiter2.running(), 0); + c.mustEqual(await numClients(), [1, 1, 1, 1]); - await limiter2.disconnect(false) - }) + await limiter2.disconnect(false); + }); - it('Should use shared settings', function () { - c = makeTest({ maxConcurrent: 2 }) - var limiter2 = new Bottleneck({ maxConcurrent: 1, datastore: process.env.DATASTORE }) + it("Should use shared settings", function () { + c = makeTest({ maxConcurrent: 2 }); + var limiter2 = new Bottleneck({ maxConcurrent: 1, datastore: process.env.DATASTORE }); return Promise.all([ limiter2.schedule(c.slowPromise, 100, null, 1), - limiter2.schedule(c.slowPromise, 100, null, 2) + limiter2.schedule(c.slowPromise, 100, null, 2), ]) - .then(function () { - return limiter2.disconnect(false) - }) - .then(function () { - return c.last() - }) - .then(function (results) { - c.checkResultsOrder([[1], [2]]) - c.checkDuration(100) - }) - }) - - it('Should clear previous settings', function () { - c = makeTest({ maxConcurrent: 2 }) - var limiter2 - - return c.limiter.ready() - .then(function () { - limiter2 = new Bottleneck({ maxConcurrent: 1, datastore: process.env.DATASTORE, clearDatastore: true }) - return limiter2.ready() - }) - .then(function () { - return Promise.all([ - c.limiter.schedule(c.slowPromise, 100, null, 1), - c.limiter.schedule(c.slowPromise, 100, null, 2) - ]) - }) - .then(function () { - return limiter2.disconnect(false) - }) - .then(function () { - return c.last() - }) - .then(function (results) { - c.checkResultsOrder([[1], [2]]) - c.checkDuration(200) - }) - }) + .then(function () { + return limiter2.disconnect(false); + }) + .then(function () { + return c.last(); + }) + .then(function (_results) { + c.checkResultsOrder([[1], [2]]); + c.checkDuration(100); + }); + }); + + it("Should clear previous settings", function () { + c = makeTest({ maxConcurrent: 2 }); + var limiter2; + + return c.limiter + .ready() + .then(function () { + limiter2 = new Bottleneck({ + maxConcurrent: 1, + datastore: process.env.DATASTORE, + clearDatastore: true, + }); + return limiter2.ready(); + }) + .then(function () { + return Promise.all([ + c.limiter.schedule(c.slowPromise, 100, null, 1), + c.limiter.schedule(c.slowPromise, 100, null, 2), + ]); + }) + .then(function () { + return limiter2.disconnect(false); + }) + .then(function () { + return c.last(); + }) + .then(function (_results) { + c.checkResultsOrder([[1], [2]]); + c.checkDuration(200); + }); + }); - it('Should safely handle connection failures', function () { + it("Should safely handle connection failures", function () { c = makeTest({ clientOptions: { port: 1 }, - errorEventsExpected: true - }) + errorEventsExpected: true, + }); return new Promise(function (resolve, reject) { - c.limiter.on('error', function (err) { - assert(err != null) - resolve() - }) - - c.limiter.ready() - .then(function () { - reject(new Error('Should not have connected')) - }) - .catch(function (err) { - reject(err) - }) - }) - }) + c.limiter.on("error", function (err) { + c.mustExist(err); + resolve(); + }); + + c.limiter + .ready() + .then(function () { + reject(new Error("Should not have connected")); + }) + .catch(function (err) { + reject(err); + }); + }); + }); - it('Should chain local and distributed limiters (total concurrency)', function () { - c = makeTest({ id: 'limiter1', maxConcurrent: 3 }) - var limiter2 = new Bottleneck({ id: 'limiter2', maxConcurrent: 1 }) - var limiter3 = new Bottleneck({ id: 'limiter3', maxConcurrent: 2 }) + it("Should chain local and distributed limiters (total concurrency)", function () { + c = makeTest({ id: "limiter1", maxConcurrent: 3 }); + var limiter2 = new Bottleneck({ id: "limiter2", maxConcurrent: 1 }); + var limiter3 = new Bottleneck({ id: "limiter3", maxConcurrent: 2 }); - limiter2.on('error', (err) => console.log(err)) + limiter2.on("error", (err) => console.log(err)); - limiter2.chain(c.limiter) - limiter3.chain(c.limiter) + limiter2.chain(c.limiter); + limiter3.chain(c.limiter); return Promise.all([ limiter2.schedule(c.slowPromise, 100, null, 1), @@ -824,31 +840,37 @@ if (process.env.DATASTORE === 'redis' || process.env.DATASTORE === 'ioredis') { limiter2.schedule(c.slowPromise, 100, null, 3), limiter3.schedule(c.slowPromise, 100, null, 4), limiter3.schedule(c.slowPromise, 100, null, 5), - limiter3.schedule(c.slowPromise, 100, null, 6) + limiter3.schedule(c.slowPromise, 100, null, 6), ]) - .then(c.last) - .then(function (results) { - c.checkDuration(300) - c.checkResultsOrder([[1], [4], [5], [2], [6], [3]]) - - assert(results.calls[0].time >= 100 && results.calls[0].time < 200) - assert(results.calls[1].time >= 100 && results.calls[1].time < 200) - assert(results.calls[2].time >= 100 && results.calls[2].time < 200) - - assert(results.calls[3].time >= 200 && results.calls[3].time < 300) - assert(results.calls[4].time >= 200 && results.calls[4].time < 300) - - assert(results.calls[5].time >= 300 && results.calls[2].time < 400) - }) - }) - - it('Should chain local and distributed limiters (partial concurrency)', function () { - c = makeTest({ maxConcurrent: 2 }) - var limiter2 = new Bottleneck({ maxConcurrent: 1 }) - var limiter3 = new Bottleneck({ maxConcurrent: 2 }) - - limiter2.chain(c.limiter) - limiter3.chain(c.limiter) + .then(c.last) + .then(function (results) { + c.checkDuration(300); + c.checkResultsOrder([[1], [4], [5], [2], [6], [3]]); + + c.mustGte(results.calls[0].time, 100); + c.mustLt(results.calls[0].time, 200); + c.mustGte(results.calls[1].time, 100); + c.mustLt(results.calls[1].time, 200); + c.mustGte(results.calls[2].time, 100); + c.mustLt(results.calls[2].time, 200); + + c.mustGte(results.calls[3].time, 200); + c.mustLt(results.calls[3].time, 300); + c.mustGte(results.calls[4].time, 200); + c.mustLt(results.calls[4].time, 300); + + c.mustGte(results.calls[5].time, 300); + c.mustLt(results.calls[5].time, 400); + }); + }); + + it("Should chain local and distributed limiters (partial concurrency)", function () { + c = makeTest({ maxConcurrent: 2 }); + var limiter2 = new Bottleneck({ maxConcurrent: 1 }); + var limiter3 = new Bottleneck({ maxConcurrent: 2 }); + + limiter2.chain(c.limiter); + limiter3.chain(c.limiter); return Promise.all([ limiter2.schedule(c.slowPromise, 100, null, 1), @@ -856,72 +878,84 @@ if (process.env.DATASTORE === 'redis' || process.env.DATASTORE === 'ioredis') { limiter2.schedule(c.slowPromise, 100, null, 3), limiter3.schedule(c.slowPromise, 100, null, 4), limiter3.schedule(c.slowPromise, 100, null, 5), - limiter3.schedule(c.slowPromise, 100, null, 6) + limiter3.schedule(c.slowPromise, 100, null, 6), ]) - .then(c.last) - .then(function (results) { - c.checkDuration(300) - c.checkResultsOrder([[1], [4], [5], [2], [6], [3]]) - - assert(results.calls[0].time >= 100 && results.calls[0].time < 200) - assert(results.calls[1].time >= 100 && results.calls[1].time < 200) - - assert(results.calls[2].time >= 200 && results.calls[2].time < 300) - assert(results.calls[3].time >= 200 && results.calls[3].time < 300) - - assert(results.calls[4].time >= 300 && results.calls[4].time < 400) - assert(results.calls[5].time >= 300 && results.calls[2].time < 400) - }) - }) - - it('Should use the limiter ID to build Redis keys', function () { - c = makeTest() - var randomId = c.limiter._randomIndex() - var limiter = new Bottleneck({ id: randomId, datastore: process.env.DATASTORE, clearDatastore: true }) - - return limiter.ready() - .then(function () { - var keys = limiterKeys(limiter) - keys.forEach((key) => assert(key.indexOf(randomId) > 0)) - return deleteKeys(limiter) - }) - .then(function (deleted) { - c.mustEqual(deleted, 5) - return limiter.disconnect(false) - }) - }) - - it('Should not fail when Redis data is missing', function () { - c = makeTest() - var limiter = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true }) - - return limiter.running() - .then(function (running) { - c.mustEqual(running, 0) - return deleteKeys(limiter) - }) - .then(function (deleted) { - c.mustEqual(deleted, 5) - return countKeys(limiter) - }) - .then(function (count) { - c.mustEqual(count, 0) - return limiter.running() - }) - .then(function (running) { - c.mustEqual(running, 0) - return countKeys(limiter) - }) - .then(function (count) { - assert(count > 0) - return limiter.disconnect(false) - }) - }) + .then(c.last) + .then(function (results) { + c.checkDuration(300); + c.checkResultsOrder([[1], [4], [5], [2], [6], [3]]); + + c.mustGte(results.calls[0].time, 100); + c.mustLt(results.calls[0].time, 200); + c.mustGte(results.calls[1].time, 100); + c.mustLt(results.calls[1].time, 200); + + c.mustGte(results.calls[2].time, 200); + c.mustLt(results.calls[2].time, 300); + c.mustGte(results.calls[3].time, 200); + c.mustLt(results.calls[3].time, 300); + + c.mustGte(results.calls[4].time, 300); + c.mustLt(results.calls[4].time, 400); + c.mustGte(results.calls[5].time, 300); + c.mustLt(results.calls[5].time, 400); + }); + }); + + it("Should use the limiter ID to build Redis keys", function () { + c = makeTest(); + var randomId = c.limiter._randomIndex(); + var limiter = new Bottleneck({ + id: randomId, + datastore: process.env.DATASTORE, + clearDatastore: true, + }); - it('Should drop all jobs in the Cluster when entering blocked mode', function () { - c = makeTest() + return limiter + .ready() + .then(function () { + var keys = limiterKeys(limiter); + keys.forEach((key) => c.mustGt(key.indexOf(randomId), 0)); + return deleteKeys(limiter); + }) + .then(function (deleted) { + c.mustEqual(deleted, 5); + return limiter.disconnect(false); + }); + }); + + it("Should not fail when Redis data is missing", function () { + c = makeTest(); + var limiter = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true }); + + return limiter + .running() + .then(function (running) { + c.mustEqual(running, 0); + return deleteKeys(limiter); + }) + .then(function (deleted) { + c.mustEqual(deleted, 5); + return countKeys(limiter); + }) + .then(function (count) { + c.mustEqual(count, 0); + return limiter.running(); + }) + .then(function (running) { + c.mustEqual(running, 0); + return countKeys(limiter); + }) + .then(function (count) { + c.mustGt(count, 0); + return limiter.disconnect(false); + }); + }); + + it("Should drop all jobs in the Cluster when entering blocked mode", function () { + c = makeTest(); var limiter1 = new Bottleneck({ - id: 'blocked', + id: "blocked", trackDoneStatus: true, datastore: process.env.DATASTORE, clearDatastore: true, @@ -929,654 +963,673 @@ if (process.env.DATASTORE === 'redis' || process.env.DATASTORE === 'ioredis') { maxConcurrent: 1, minTime: 50, highWater: 2, - strategy: Bottleneck.strategy.BLOCK - }) - var limiter2 - var client_num_queued_key = limiterKeys(limiter1)[5] - - return limiter1.ready() - .then(function () { - limiter2 = new Bottleneck({ - id: 'blocked', - trackDoneStatus: true, - datastore: process.env.DATASTORE, - clearDatastore: false, - }) - return limiter2.ready() - }) - .then(function () { - return Promise.all([ - limiter1.submit(c.slowJob, 100, null, 1, c.noErrVal(1)), - limiter1.submit(c.slowJob, 100, null, 2, (err) => c.mustExist(err)) - ]) - }) - .then(function () { - return Promise.all([ - limiter2.submit(c.slowJob, 100, null, 3, (err) => c.mustExist(err)), - limiter2.submit(c.slowJob, 100, null, 4, (err) => c.mustExist(err)), - limiter2.submit(c.slowJob, 100, null, 5, (err) => c.mustExist(err)) - ]) - }) - .then(function () { - return runCommand(limiter1, 'hvals', [client_num_queued_key]) - }) - .then(function (queues) { - c.mustEqual(queues, ['0', '0']) + strategy: Bottleneck.strategy.BLOCK, + }); + var limiter2; + var client_num_queued_key = limiterKeys(limiter1)[5]; - return Promise.all([ - c.limiter.clusterQueued(), - limiter2.clusterQueued() - ]) - }) - .then(function (queues) { - c.mustEqual(queues, [0, 0]) + return limiter1 + .ready() + .then(function () { + limiter2 = new Bottleneck({ + id: "blocked", + trackDoneStatus: true, + datastore: process.env.DATASTORE, + clearDatastore: false, + }); + return limiter2.ready(); + }) + .then(function () { + return Promise.all([ + limiter1.submit(c.slowJob, 100, null, 1, c.noErrVal(1)), + limiter1.submit(c.slowJob, 100, null, 2, (err) => c.mustExist(err)), + ]); + }) + .then(function () { + return Promise.all([ + limiter2.submit(c.slowJob, 100, null, 3, (err) => c.mustExist(err)), + limiter2.submit(c.slowJob, 100, null, 4, (err) => c.mustExist(err)), + limiter2.submit(c.slowJob, 100, null, 5, (err) => c.mustExist(err)), + ]); + }) + .then(function () { + return runCommand(limiter1, "hvals", [client_num_queued_key]); + }) + .then(function (queues) { + c.mustEqual(queues, ["0", "0"]); - return c.wait(100) - }) - .then(function () { - var counts1 = limiter1.counts() - c.mustEqual(counts1.RECEIVED, 0) - c.mustEqual(counts1.QUEUED, 0) - c.mustEqual(counts1.RUNNING, 0) - c.mustEqual(counts1.EXECUTING, 0) - c.mustEqual(counts1.DONE, 1) - - var counts2 = limiter2.counts() - c.mustEqual(counts2.RECEIVED, 0) - c.mustEqual(counts2.QUEUED, 0) - c.mustEqual(counts2.RUNNING, 0) - c.mustEqual(counts2.EXECUTING, 0) - c.mustEqual(counts2.DONE, 0) - - return c.last() - }) - .then(function (results) { - c.checkResultsOrder([[1]]) - c.checkDuration(100) + return Promise.all([c.limiter.clusterQueued(), limiter2.clusterQueued()]); + }) + .then(function (queues) { + c.mustEqual(queues, [0, 0]); - return Promise.all([ - limiter1.disconnect(false), - limiter2.disconnect(false) - ]) - }) - }) + return c.wait(100); + }) + .then(function () { + var counts1 = limiter1.counts(); + c.mustEqual(counts1.RECEIVED, 0); + c.mustEqual(counts1.QUEUED, 0); + c.mustEqual(counts1.RUNNING, 0); + c.mustEqual(counts1.EXECUTING, 0); + c.mustEqual(counts1.DONE, 1); + + var counts2 = limiter2.counts(); + c.mustEqual(counts2.RECEIVED, 0); + c.mustEqual(counts2.QUEUED, 0); + c.mustEqual(counts2.RUNNING, 0); + c.mustEqual(counts2.EXECUTING, 0); + c.mustEqual(counts2.DONE, 0); + + return c.last(); + }) + .then(function (_results) { + c.checkResultsOrder([[1]]); + c.checkDuration(100); + + return Promise.all([limiter1.disconnect(false), limiter2.disconnect(false)]); + }); + }); - it('Should pass messages to all limiters in Cluster', function (done) { + it("Should pass messages to all limiters in Cluster", function (done) { c = makeTest({ maxConcurrent: 1, minTime: 100, - id: 'super-duper' - }) + id: "super-duper", + }); var limiter1 = new Bottleneck({ maxConcurrent: 1, minTime: 100, - id: 'super-duper', - datastore: process.env.DATASTORE - }) + id: "super-duper", + datastore: process.env.DATASTORE, + }); var limiter2 = new Bottleneck({ maxConcurrent: 1, minTime: 100, - id: 'nope', - datastore: process.env.DATASTORE - }) - var received = [] - - c.limiter.on('message', (msg) => { - received.push(1, msg) - }) - limiter1.on('message', (msg) => { - received.push(2, msg) - }) - limiter2.on('message', (msg) => { - received.push(3, msg) - }) - - Promise.all([c.limiter.ready(), limiter2.ready()]) - .then(function () { - limiter1.publish(555) - }) + id: "nope", + datastore: process.env.DATASTORE, + }); + var received = []; + + c.limiter.on("message", (msg) => { + received.push(1, msg); + }); + limiter1.on("message", (msg) => { + received.push(2, msg); + }); + limiter2.on("message", (msg) => { + received.push(3, msg); + }); + + Promise.all([c.limiter.ready(), limiter2.ready()]).then(function () { + limiter1.publish(555); + }); setTimeout(function () { - limiter1.disconnect() - limiter2.disconnect() - c.mustEqual(received.sort(), [1, 2, '555', '555']) - done() - }, 150) - }) - - it('Should pass messages to correct limiter after Group re-instantiations', function () { - c = makeTest() + limiter1.disconnect(); + limiter2.disconnect(); + c.mustEqual(received.sort(), [1, 2, "555", "555"]); + done(); + }, 150); + }); + + it("Should pass messages to correct limiter after Group re-instantiations", function () { + c = makeTest(); var group = new Bottleneck.Group({ maxConcurrent: 1, minTime: 100, - datastore: process.env.DATASTORE - }) - var received = [] + datastore: process.env.DATASTORE, + }); + var received = []; - return new Promise(function (resolve, reject) { - var limiter = group.key('A') + return new Promise(function (resolve, _reject) { + var limiter = group.key("A"); - limiter.on('message', function (msg) { - received.push('1', msg) - return resolve() - }) - limiter.publish('Bonjour!') + limiter.on("message", function (msg) { + received.push("1", msg); + return resolve(); + }); + limiter.publish("Bonjour!"); }) - .then(function () { - return new Promise(function (resolve, reject) { - var limiter = group.key('B') - - limiter.on('message', function (msg) { - received.push('2', msg) - return resolve() - }) - limiter.publish('Comment allez-vous?') + .then(function () { + return new Promise(function (resolve, _reject) { + var limiter = group.key("B"); + + limiter.on("message", function (msg) { + received.push("2", msg); + return resolve(); + }); + limiter.publish("Comment allez-vous?"); + }); }) - }) - .then(function () { - return group.deleteKey('A') - }) - .then(function () { - return new Promise(function (resolve, reject) { - var limiter = group.key('A') - - limiter.on('message', function (msg) { - received.push('3', msg) - return resolve() - }) - limiter.publish('Au revoir!') + .then(function () { + return group.deleteKey("A"); }) - }) - .then(function () { - c.mustEqual(received, ['1', 'Bonjour!', '2', 'Comment allez-vous?', '3', 'Au revoir!']) - group.disconnect() - }) - }) + .then(function () { + return new Promise(function (resolve, _reject) { + var limiter = group.key("A"); + + limiter.on("message", function (msg) { + received.push("3", msg); + return resolve(); + }); + limiter.publish("Au revoir!"); + }); + }) + .then(function () { + c.mustEqual(received, ["1", "Bonjour!", "2", "Comment allez-vous?", "3", "Au revoir!"]); + group.disconnect(); + }); + }); - it('Should have a default key TTL when using Groups', function () { - c = makeTest() + it("Should have a default key TTL when using Groups", function () { + c = makeTest(); var group = new Bottleneck.Group({ - datastore: process.env.DATASTORE - }) + datastore: process.env.DATASTORE, + }); - return group.key('one').ready() - .then(function () { - var limiter = group.key('one') - var settings_key = limiterKeys(limiter)[0] - return runCommand(limiter, 'ttl', [settings_key]) - }) - .then(function (ttl) { - assert(ttl >= 290 && ttl <= 305) - }) - .then(function () { - return group.disconnect(false) - }) - }) + return group + .key("one") + .ready() + .then(function () { + var limiter = group.key("one"); + var settings_key = limiterKeys(limiter)[0]; + return runCommand(limiter, "ttl", [settings_key]); + }) + .then(function (ttl) { + c.mustGte(ttl, 290); + c.mustLte(ttl, 305); + }) + .then(function () { + return group.disconnect(false); + }); + }); - it('Should support Groups and expire Redis keys', function () { - c = makeTest() + it("Should support Groups and expire Redis keys", function () { + c = makeTest(); var group = new Bottleneck.Group({ datastore: process.env.DATASTORE, clearDatastore: true, minTime: 50, - timeout: 200 - }) - var limiter1 - var limiter2 - var limiter3 - - var t0 = Date.now() - var results = {} + timeout: 200, + }); + var limiter1; + var limiter2; + var limiter3; + + var t0 = Date.now(); + var results = {}; var job = function (x) { - results[x] = Date.now() - t0 - return Promise.resolve() - } - - return c.limiter.ready() - .then(function () { - limiter1 = group.key('one') - limiter2 = group.key('two') - limiter3 = group.key('three') + results[x] = Date.now() - t0; + return Promise.resolve(); + }; - return Promise.all([limiter1.ready(), limiter2.ready(), limiter3.ready()]) - }) - .then(function () { - return Promise.all([countKeys(limiter1), countKeys(limiter2), countKeys(limiter3)]) - }) - .then(function (counts) { - c.mustEqual(counts, [5, 5, 5]) - return Promise.all([ - limiter1.schedule(job, 'a'), - limiter1.schedule(job, 'b'), - limiter1.schedule(job, 'c'), - limiter2.schedule(job, 'd'), - limiter2.schedule(job, 'e'), - limiter3.schedule(job, 'f') - ]) - }) - .then(function () { - c.mustEqual(Object.keys(results).length, 6) - assert(results.a < results.b) - assert(results.b < results.c) - assert(results.b - results.a >= 40) - assert(results.c - results.b >= 40) - - assert(results.d < results.e) - assert(results.e - results.d >= 40) + return c.limiter + .ready() + .then(function () { + limiter1 = group.key("one"); + limiter2 = group.key("two"); + limiter3 = group.key("three"); - assert(Math.abs(results.a - results.d) <= 10) - assert(Math.abs(results.d - results.f) <= 10) - assert(Math.abs(results.b - results.e) <= 10) + return Promise.all([limiter1.ready(), limiter2.ready(), limiter3.ready()]); + }) + .then(function () { + return Promise.all([countKeys(limiter1), countKeys(limiter2), countKeys(limiter3)]); + }) + .then(function (counts) { + c.mustEqual(counts, [5, 5, 5]); + return Promise.all([ + limiter1.schedule(job, "a"), + limiter1.schedule(job, "b"), + limiter1.schedule(job, "c"), + limiter2.schedule(job, "d"), + limiter2.schedule(job, "e"), + limiter3.schedule(job, "f"), + ]); + }) + .then(function () { + c.mustEqual(Object.keys(results).length, 6); + c.mustLt(results.a, results.b); + c.mustLt(results.b, results.c); + c.mustGte(results.b - results.a, 40); + c.mustGte(results.c - results.b, 40); - return c.wait(400) - }) - .then(function () { - return Promise.all([countKeys(limiter1), countKeys(limiter2), countKeys(limiter3)]) - }) - .then(function (counts) { - c.mustEqual(counts, [0, 0, 0]) - c.mustEqual(group.keys().length, 0) - c.mustEqual(Object.keys(group.connection.limiters).length, 0) - return group.disconnect(false) - }) + c.mustLt(results.d, results.e); + c.mustGte(results.e - results.d, 40); - }) + c.mustLte(Math.abs(results.a - results.d), 10); + c.mustLte(Math.abs(results.d - results.f), 10); + c.mustLte(Math.abs(results.b - results.e), 10); - it('Should not recreate a key when running heartbeat', function () { - c = makeTest() + return c.wait(400); + }) + .then(function () { + return Promise.all([countKeys(limiter1), countKeys(limiter2), countKeys(limiter3)]); + }) + .then(function (counts) { + c.mustEqual(counts, [0, 0, 0]); + c.mustEqual(group.keys().length, 0); + c.mustEqual(Object.keys(group.connection.limiters).length, 0); + return group.disconnect(false); + }); + }); + + it("Should not recreate a key when running heartbeat", function () { + c = makeTest(); var group = new Bottleneck.Group({ datastore: process.env.DATASTORE, clearDatastore: true, maxConcurrent: 50, minTime: 50, timeout: 300, - heartbeatInterval: 5 - }) - var key = 'heartbeat' + heartbeatInterval: 5, + }); + var key = "heartbeat"; - var limiter = group.key(key) - return c.pNoErrVal(limiter.schedule(c.promise, null, 1), 1) - .then(function () { - return limiter.done() - }) - .then(function (done) { - c.mustEqual(done, 1) - return c.wait(400) - }) - .then(function () { - return countKeys(limiter) - }) - .then(function (count) { - c.mustEqual(count, 0) - return group.disconnect(false) - }) - }) - - it('Should delete Redis key when manually deleting a group key', function () { - c = makeTest() + var limiter = group.key(key); + return c + .pNoErrVal(limiter.schedule(c.promise, null, 1), 1) + .then(function () { + return limiter.done(); + }) + .then(function (done) { + c.mustEqual(done, 1); + return c.wait(400); + }) + .then(function () { + return countKeys(limiter); + }) + .then(function (count) { + c.mustEqual(count, 0); + return group.disconnect(false); + }); + }); + + it("Should delete Redis key when manually deleting a group key", function () { + c = makeTest(); var group1 = new Bottleneck.Group({ datastore: process.env.DATASTORE, clearDatastore: true, maxConcurrent: 50, minTime: 50, - timeout: 300 - }) + timeout: 300, + }); var group2 = new Bottleneck.Group({ datastore: process.env.DATASTORE, clearDatastore: true, maxConcurrent: 50, minTime: 50, - timeout: 300 - }) - var key = 'deleted' - var limiter = group1.key(key) // only for countKeys() use - - return c.pNoErrVal(group1.key(key).schedule(c.promise, null, 1), 1) - .then(function () { - return c.pNoErrVal(group2.key(key).schedule(c.promise, null, 2), 2) - }) - .then(function () { - c.mustEqual(group1.keys().length, 1) - c.mustEqual(group2.keys().length, 1) - return group1.deleteKey(key) - }) - .then(function (deleted) { - c.mustEqual(deleted, true) - return countKeys(limiter) - }) - .then(function (count) { - c.mustEqual(count, 0) - c.mustEqual(group1.keys().length, 0) - c.mustEqual(group2.keys().length, 1) - return c.wait(200) - }) - .then(function () { - c.mustEqual(group1.keys().length, 0) - c.mustEqual(group2.keys().length, 0) - return Promise.all([ - group1.disconnect(false), - group2.disconnect(false) - ]) - }) - }) + timeout: 300, + }); + var key = "deleted"; + var limiter = group1.key(key); // only for countKeys() use - it('Should delete Redis keys from a group even when the local limiter is not present', function () { - c = makeTest() + return c + .pNoErrVal(group1.key(key).schedule(c.promise, null, 1), 1) + .then(function () { + return c.pNoErrVal(group2.key(key).schedule(c.promise, null, 2), 2); + }) + .then(function () { + c.mustEqual(group1.keys().length, 1); + c.mustEqual(group2.keys().length, 1); + return group1.deleteKey(key); + }) + .then(function (deleted) { + c.mustEqual(deleted, true); + return countKeys(limiter); + }) + .then(function (count) { + c.mustEqual(count, 0); + c.mustEqual(group1.keys().length, 0); + c.mustEqual(group2.keys().length, 1); + return c.wait(200); + }) + .then(function () { + c.mustEqual(group1.keys().length, 0); + c.mustEqual(group2.keys().length, 0); + return Promise.all([group1.disconnect(false), group2.disconnect(false)]); + }); + }); + + it("Should delete Redis keys from a group even when the local limiter is not present", function () { + c = makeTest(); var group1 = new Bottleneck.Group({ datastore: process.env.DATASTORE, clearDatastore: true, maxConcurrent: 50, minTime: 50, - timeout: 300 - }) + timeout: 300, + }); var group2 = new Bottleneck.Group({ datastore: process.env.DATASTORE, clearDatastore: true, maxConcurrent: 50, minTime: 50, - timeout: 300 - }) - var key = 'deleted-cluster-wide' - var limiter = group1.key(key) // only for countKeys() use - - return c.pNoErrVal(group1.key(key).schedule(c.promise, null, 1), 1) - .then(function () { - c.mustEqual(group1.keys().length, 1) - c.mustEqual(group2.keys().length, 0) - return group2.deleteKey(key) - }) - .then(function (deleted) { - c.mustEqual(deleted, true) - return countKeys(limiter) - }) - .then(function (count) { - c.mustEqual(count, 0) - c.mustEqual(group1.keys().length, 1) - c.mustEqual(group2.keys().length, 0) - return c.wait(200) - }) - .then(function () { - c.mustEqual(group1.keys().length, 0) - c.mustEqual(group2.keys().length, 0) - return Promise.all([ - group1.disconnect(false), - group2.disconnect(false) - ]) - }) - }) + timeout: 300, + }); + var key = "deleted-cluster-wide"; + var limiter = group1.key(key); // only for countKeys() use - it('Should returns all Group keys in the cluster', async function () { - c = makeTest() + return c + .pNoErrVal(group1.key(key).schedule(c.promise, null, 1), 1) + .then(function () { + c.mustEqual(group1.keys().length, 1); + c.mustEqual(group2.keys().length, 0); + return group2.deleteKey(key); + }) + .then(function (deleted) { + c.mustEqual(deleted, true); + return countKeys(limiter); + }) + .then(function (count) { + c.mustEqual(count, 0); + c.mustEqual(group1.keys().length, 1); + c.mustEqual(group2.keys().length, 0); + return c.wait(200); + }) + .then(function () { + c.mustEqual(group1.keys().length, 0); + c.mustEqual(group2.keys().length, 0); + return Promise.all([group1.disconnect(false), group2.disconnect(false)]); + }); + }); + + it("Should returns all Group keys in the cluster", async function () { + c = makeTest(); var group1 = new Bottleneck.Group({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'same', - timeout: 3000 - }) + id: "same", + timeout: 3000, + }); var group2 = new Bottleneck.Group({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'same', - timeout: 3000 - }) - var keys1 = ['lorem', 'ipsum', 'dolor', 'sit', 'amet', 'consectetur'] - var keys2 = ['adipiscing', 'elit'] - var both = keys1.concat(keys2) - - await Promise.all(keys1.map((k) => group1.key(k).ready())) - await Promise.all(keys2.map((k) => group2.key(k).ready())) - - c.mustEqual(group1.keys().sort(), keys1.sort()) - c.mustEqual(group2.keys().sort(), keys2.sort()) - c.mustEqual( - (await group1.clusterKeys()).sort(), - both.sort() - ) - c.mustEqual( - (await group1.clusterKeys()).sort(), - both.sort() - ) - - var group3 = new Bottleneck.Group({ datastore: 'local' }) - c.mustEqual(await group3.clusterKeys(), []) - - await group1.disconnect(false) - await group2.disconnect(false) - }) - - it('Should queue up the least busy limiter', async function () { - c = makeTest() + id: "same", + timeout: 3000, + }); + var keys1 = ["lorem", "ipsum", "dolor", "sit", "amet", "consectetur"]; + var keys2 = ["adipiscing", "elit"]; + var both = keys1.concat(keys2); + + await Promise.all(keys1.map((k) => group1.key(k).ready())); + await Promise.all(keys2.map((k) => group2.key(k).ready())); + + c.mustEqual(group1.keys().sort(), keys1.sort()); + c.mustEqual(group2.keys().sort(), keys2.sort()); + c.mustEqual((await group1.clusterKeys()).sort(), both.sort()); + c.mustEqual((await group1.clusterKeys()).sort(), both.sort()); + + var group3 = new Bottleneck.Group({ datastore: "local" }); + c.mustEqual(await group3.clusterKeys(), []); + + await group1.disconnect(false); + await group2.disconnect(false); + }); + + it("Should queue up the least busy limiter", async function () { + c = makeTest(); var limiter1 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'busy', + id: "busy", timeout: 3000, maxConcurrent: 3, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var limiter2 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'busy', + id: "busy", timeout: 3000, maxConcurrent: 3, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var limiter3 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'busy', + id: "busy", timeout: 3000, maxConcurrent: 3, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var limiter4 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'busy', + id: "busy", timeout: 3000, maxConcurrent: 3, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var runningOrExecuting = function (limiter) { - var counts = limiter.counts() - return counts.RUNNING + counts.EXECUTING - } - - var resolve1, resolve2, resolve3, resolve4, resolve5, resolve6, resolve7 - var p1 = new Promise(function (resolve, reject) { - resolve1 = function (err, n) { resolve(n) } - }) - var p2 = new Promise(function (resolve, reject) { - resolve2 = function (err, n) { resolve(n) } - }) - var p3 = new Promise(function (resolve, reject) { - resolve3 = function (err, n) { resolve(n) } - }) - var p4 = new Promise(function (resolve, reject) { - resolve4 = function (err, n) { resolve(n) } - }) - var p5 = new Promise(function (resolve, reject) { - resolve5 = function (err, n) { resolve(n) } - }) - var p6 = new Promise(function (resolve, reject) { - resolve6 = function (err, n) { resolve(n) } - }) - var p7 = new Promise(function (resolve, reject) { - resolve7 = function (err, n) { resolve(n) } - }) - - await limiter1.schedule({id: '1'}, c.promise, null, 'A') - await limiter2.schedule({id: '2'}, c.promise, null, 'B') - await limiter3.schedule({id: '3'}, c.promise, null, 'C') - await limiter4.schedule({id: '4'}, c.promise, null, 'D') - - await limiter1.submit({id: 'A'}, c.slowJob, 50, null, 1, resolve1) - await limiter1.submit({id: 'B'}, c.slowJob, 500, null, 2, resolve2) - await limiter2.submit({id: 'C'}, c.slowJob, 550, null, 3, resolve3) - - c.mustEqual(runningOrExecuting(limiter1), 2) - c.mustEqual(runningOrExecuting(limiter2), 1) - - await limiter3.submit({id: 'D'}, c.slowJob, 50, null, 4, resolve4) - await limiter4.submit({id: 'E'}, c.slowJob, 50, null, 5, resolve5) - await limiter3.submit({id: 'F'}, c.slowJob, 50, null, 6, resolve6) - await limiter4.submit({id: 'G'}, c.slowJob, 50, null, 7, resolve7) - - c.mustEqual(limiter3.counts().QUEUED, 2) - c.mustEqual(limiter4.counts().QUEUED, 2) - - await Promise.all([p1, p2, p3, p4, p5, p6, p7]) - - c.checkResultsOrder([['A'],['B'],['C'],['D'],[1],[4],[5],[6],[7],[2],[3]]) - - await limiter1.disconnect(false) - await limiter2.disconnect(false) - await limiter3.disconnect(false) - await limiter4.disconnect(false) - }) - - it('Should pass the remaining capacity to other limiters', async function () { - c = makeTest() + var counts = limiter.counts(); + return counts.RUNNING + counts.EXECUTING; + }; + + var resolve1, resolve2, resolve3, resolve4, resolve5, resolve6, resolve7; + var p1 = new Promise(function (resolve, _reject) { + resolve1 = function (_err, n) { + resolve(n); + }; + }); + var p2 = new Promise(function (resolve, _reject) { + resolve2 = function (_err, n) { + resolve(n); + }; + }); + var p3 = new Promise(function (resolve, _reject) { + resolve3 = function (_err, n) { + resolve(n); + }; + }); + var p4 = new Promise(function (resolve, _reject) { + resolve4 = function (_err, n) { + resolve(n); + }; + }); + var p5 = new Promise(function (resolve, _reject) { + resolve5 = function (_err, n) { + resolve(n); + }; + }); + var p6 = new Promise(function (resolve, _reject) { + resolve6 = function (_err, n) { + resolve(n); + }; + }); + var p7 = new Promise(function (resolve, _reject) { + resolve7 = function (_err, n) { + resolve(n); + }; + }); + + await limiter1.schedule({ id: "1" }, c.promise, null, "A"); + await limiter2.schedule({ id: "2" }, c.promise, null, "B"); + await limiter3.schedule({ id: "3" }, c.promise, null, "C"); + await limiter4.schedule({ id: "4" }, c.promise, null, "D"); + + await limiter1.submit({ id: "A" }, c.slowJob, 50, null, 1, resolve1); + await limiter1.submit({ id: "B" }, c.slowJob, 500, null, 2, resolve2); + await limiter2.submit({ id: "C" }, c.slowJob, 550, null, 3, resolve3); + + c.mustEqual(runningOrExecuting(limiter1), 2); + c.mustEqual(runningOrExecuting(limiter2), 1); + + await limiter3.submit({ id: "D" }, c.slowJob, 50, null, 4, resolve4); + await limiter4.submit({ id: "E" }, c.slowJob, 50, null, 5, resolve5); + await limiter3.submit({ id: "F" }, c.slowJob, 50, null, 6, resolve6); + await limiter4.submit({ id: "G" }, c.slowJob, 50, null, 7, resolve7); + + c.mustEqual(limiter3.counts().QUEUED, 2); + c.mustEqual(limiter4.counts().QUEUED, 2); + + await Promise.all([p1, p2, p3, p4, p5, p6, p7]); + + c.checkResultsOrder([["A"], ["B"], ["C"], ["D"], [1], [4], [5], [6], [7], [2], [3]]); + + await limiter1.disconnect(false); + await limiter2.disconnect(false); + await limiter3.disconnect(false); + await limiter4.disconnect(false); + }); + + it("Should pass the remaining capacity to other limiters", async function () { + c = makeTest(); var limiter1 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'busy', + id: "busy", timeout: 3000, maxConcurrent: 3, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var limiter2 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'busy', + id: "busy", timeout: 3000, maxConcurrent: 3, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var limiter3 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'busy', + id: "busy", timeout: 3000, maxConcurrent: 3, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var limiter4 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'busy', + id: "busy", timeout: 3000, maxConcurrent: 3, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var runningOrExecuting = function (limiter) { - var counts = limiter.counts() - return counts.RUNNING + counts.EXECUTING - } - var t3, t4 - - var resolve1, resolve2, resolve3, resolve4, resolve5 - var p1 = new Promise(function (resolve, reject) { - resolve1 = function (err, n) { resolve(n) } - }) - var p2 = new Promise(function (resolve, reject) { - resolve2 = function (err, n) { resolve(n) } - }) - var p3 = new Promise(function (resolve, reject) { - resolve3 = function (err, n) { t3 = Date.now(); resolve(n) } - }) - var p4 = new Promise(function (resolve, reject) { - resolve4 = function (err, n) { t4 = Date.now(); resolve(n) } - }) - var p5 = new Promise(function (resolve, reject) { - resolve5 = function (err, n) { resolve(n) } - }) - - await limiter1.schedule({id: '1'}, c.promise, null, 'A') - await limiter2.schedule({id: '2'}, c.promise, null, 'B') - await limiter3.schedule({id: '3'}, c.promise, null, 'C') - await limiter4.schedule({id: '4'}, c.promise, null, 'D') - - await limiter1.submit({id: 'A', weight: 2}, c.slowJob, 50, null, 1, resolve1) - await limiter2.submit({id: 'C'}, c.slowJob, 550, null, 2, resolve2) - - c.mustEqual(runningOrExecuting(limiter1), 1) - c.mustEqual(runningOrExecuting(limiter2), 1) - - await limiter3.submit({id: 'D'}, c.slowJob, 50, null, 3, resolve3) - await limiter4.submit({id: 'E'}, c.slowJob, 50, null, 4, resolve4) - await limiter4.submit({id: 'G'}, c.slowJob, 50, null, 5, resolve5) - - c.mustEqual(limiter3.counts().QUEUED, 1) - c.mustEqual(limiter4.counts().QUEUED, 2) - - await Promise.all([p1, p2, p3, p4, p5]) - - c.checkResultsOrder([['A'],['B'],['C'],['D'],[1],[3],[4],[5],[2]]) - - assert(Math.abs(t3 - t4) < 15) - - await limiter1.disconnect(false) - await limiter2.disconnect(false) - await limiter3.disconnect(false) - await limiter4.disconnect(false) - }) - - it('Should take the capacity and blacklist if the priority limiter is not responding', async function () { - c = makeTest() + var counts = limiter.counts(); + return counts.RUNNING + counts.EXECUTING; + }; + var t3, t4; + + var resolve1, resolve2, resolve3, resolve4, resolve5; + var p1 = new Promise(function (resolve, _reject) { + resolve1 = function (_err, n) { + resolve(n); + }; + }); + var p2 = new Promise(function (resolve, _reject) { + resolve2 = function (_err, n) { + resolve(n); + }; + }); + var p3 = new Promise(function (resolve, _reject) { + resolve3 = function (_err, n) { + t3 = Date.now(); + resolve(n); + }; + }); + var p4 = new Promise(function (resolve, _reject) { + resolve4 = function (_err, n) { + t4 = Date.now(); + resolve(n); + }; + }); + var p5 = new Promise(function (resolve, _reject) { + resolve5 = function (_err, n) { + resolve(n); + }; + }); + + await limiter1.schedule({ id: "1" }, c.promise, null, "A"); + await limiter2.schedule({ id: "2" }, c.promise, null, "B"); + await limiter3.schedule({ id: "3" }, c.promise, null, "C"); + await limiter4.schedule({ id: "4" }, c.promise, null, "D"); + + await limiter1.submit({ id: "A", weight: 2 }, c.slowJob, 50, null, 1, resolve1); + await limiter2.submit({ id: "C" }, c.slowJob, 550, null, 2, resolve2); + + c.mustEqual(runningOrExecuting(limiter1), 1); + c.mustEqual(runningOrExecuting(limiter2), 1); + + await limiter3.submit({ id: "D" }, c.slowJob, 50, null, 3, resolve3); + await limiter4.submit({ id: "E" }, c.slowJob, 50, null, 4, resolve4); + await limiter4.submit({ id: "G" }, c.slowJob, 50, null, 5, resolve5); + + c.mustEqual(limiter3.counts().QUEUED, 1); + c.mustEqual(limiter4.counts().QUEUED, 2); + + await Promise.all([p1, p2, p3, p4, p5]); + + c.checkResultsOrder([["A"], ["B"], ["C"], ["D"], [1], [3], [4], [5], [2]]); + + c.mustLt(Math.abs(t3 - t4), 15); + + await limiter1.disconnect(false); + await limiter2.disconnect(false); + await limiter3.disconnect(false); + await limiter4.disconnect(false); + }); + + it("Should take the capacity and blacklist if the priority limiter is not responding", async function () { + c = makeTest(); var limiter1 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'crash', + id: "crash", timeout: 3000, maxConcurrent: 1, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var limiter2 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'crash', + id: "crash", timeout: 3000, maxConcurrent: 1, - trackDoneStatus: true - }) + trackDoneStatus: true, + }); var limiter3 = new Bottleneck({ datastore: process.env.DATASTORE, clearDatastore: true, - id: 'crash', + id: "crash", timeout: 3000, maxConcurrent: 1, - trackDoneStatus: true - }) - - await limiter1.schedule({id: '1'}, c.promise, null, 'A') - await limiter2.schedule({id: '2'}, c.promise, null, 'B') - await limiter3.schedule({id: '3'}, c.promise, null, 'C') - - var resolve1, resolve2, resolve3 - var p1 = new Promise(function (resolve, reject) { - resolve1 = function (err, n) { resolve(n) } - }) - var p2 = new Promise(function (resolve, reject) { - resolve2 = function (err, n) { resolve(n) } - }) - var p3 = new Promise(function (resolve, reject) { - resolve3 = function (err, n) { resolve(n) } - }) - - await limiter1.submit({id: '4'}, c.slowJob, 100, null, 4, resolve1) - await limiter2.submit({id: '5'}, c.slowJob, 100, null, 5, resolve2) - await limiter3.submit({id: '6'}, c.slowJob, 100, null, 6, resolve3) - await limiter2.disconnect(false) - - await Promise.all([p1, p3]) - c.checkResultsOrder([['A'], ['B'], ['C'], [4], [6]]) - - await limiter1.disconnect(false) - await limiter2.disconnect(false) - await limiter3.disconnect(false) - }) - - }) + trackDoneStatus: true, + }); + + await limiter1.schedule({ id: "1" }, c.promise, null, "A"); + await limiter2.schedule({ id: "2" }, c.promise, null, "B"); + await limiter3.schedule({ id: "3" }, c.promise, null, "C"); + + var resolve1, resolve2, resolve3; + var p1 = new Promise(function (resolve, _reject) { + resolve1 = function (_err, n) { + resolve(n); + }; + }); + new Promise(function (resolve, _reject) { + resolve2 = function (_err, n) { + resolve(n); + }; + }); + var p3 = new Promise(function (resolve, _reject) { + resolve3 = function (_err, n) { + resolve(n); + }; + }); + + await limiter1.submit({ id: "4" }, c.slowJob, 100, null, 4, resolve1); + await limiter2.submit({ id: "5" }, c.slowJob, 100, null, 5, resolve2); + await limiter3.submit({ id: "6" }, c.slowJob, 100, null, 6, resolve3); + await limiter2.disconnect(false); + + await Promise.all([p1, p3]); + c.checkResultsOrder([["A"], ["B"], ["C"], [4], [6]]); + + await limiter1.disconnect(false); + await limiter2.disconnect(false); + await limiter3.disconnect(false); + }); + }); } diff --git a/test/context.js b/test/context.js index 8d498f3..461757e 100644 --- a/test/context.js +++ b/test/context.js @@ -1,142 +1,167 @@ -global.TEST = true -var Bottleneck = require('./bottleneck') -var assert = require('assert') +global.TEST = true; +const assert = require("assert"); +var Bottleneck = require("./bottleneck"); -module.exports = function (options={}) { +module.exports = function (options = {}) { var mustEqual = function (a, b) { - var strA = JSON.stringify(a) - var strB = JSON.stringify(b) - if (strA !== strB) { - console.log(strA + ' !== ' + strB, (new Error('').stack)) - assert(strA === strB) - } - } + assert.deepStrictEqual(a, b); + }; - var start - var calls = [] + var start; + var calls = []; // set options.datastore var setRedisClientOptions = function (options) { - options.clearDatastore = true + options.clearDatastore = true; if (options.clientOptions == null) { options.clientOptions = { host: process.env.REDIS_HOST, port: process.env.REDIS_PORT, - } + }; } - } + }; - if (options.datastore == null && process.env.DATASTORE === 'redis') { - options.datastore = 'redis' - setRedisClientOptions(options) - } else if (options.datastore == null && process.env.DATASTORE === 'ioredis') { - options.datastore = 'ioredis' - setRedisClientOptions(options) + if (options.datastore == null && process.env.DATASTORE === "redis") { + options.datastore = "redis"; + setRedisClientOptions(options); + } else if (options.datastore == null && process.env.DATASTORE === "ioredis") { + options.datastore = "ioredis"; + setRedisClientOptions(options); } else { - options.datastore = 'local' + options.datastore = "local"; } - var limiter = new Bottleneck(options) + var limiter = new Bottleneck(options); // limiter.on("debug", function (str, args) { console.log(`${Date.now()-start} ${str} ${JSON.stringify(args)}`) }) if (!options.errorEventsExpected) { limiter.on("error", function (err) { - console.log('(CONTEXT) ERROR EVENT', err) - }) + console.log("(CONTEXT) ERROR EVENT", err); + }); } - limiter.ready().then(function (client) { - start = Date.now() - }) + limiter.ready().then(function (_client) { + start = Date.now(); + }); var getResults = function () { return { elapsed: Date.now() - start, callsDuration: calls.length > 0 ? calls[calls.length - 1].time : null, - calls: calls - } - } + calls: calls, + }; + }; var context = { job: function (err, ...result) { - var cb = result.pop() - calls.push({err: err, result: result, time: Date.now()-start}) - if (process.env.DEBUG) console.log(result, calls) - cb.apply({}, [err].concat(result)) + var cb = result.pop(); + calls.push({ err: err, result: result, time: Date.now() - start }); + if (process.env.DEBUG) console.log(result, calls); + cb.apply({}, [err].concat(result)); }, slowJob: function (duration, err, ...result) { setTimeout(function () { - var cb = result.pop() - calls.push({err: err, result: result, time: Date.now()-start}) - if (process.env.DEBUG) console.log(result, calls) - cb.apply({}, [err].concat(result)) - }, duration) + var cb = result.pop(); + calls.push({ err: err, result: result, time: Date.now() - start }); + if (process.env.DEBUG) console.log(result, calls); + cb.apply({}, [err].concat(result)); + }, duration); }, promise: function (err, ...result) { return new Promise(function (resolve, reject) { - if (process.env.DEBUG) console.log('In c.promise. Result: ', result) - calls.push({err: err, result: result, time: Date.now()-start}) - if (process.env.DEBUG) console.log(result, calls) + if (process.env.DEBUG) console.log("In c.promise. Result: ", result); + calls.push({ err: err, result: result, time: Date.now() - start }); + if (process.env.DEBUG) console.log(result, calls); if (err === null) { - return resolve(result) + return resolve(result); } else { - return reject(err) + return reject(err); } - }) + }); }, slowPromise: function (duration, err, ...result) { return new Promise(function (resolve, reject) { setTimeout(function () { - if (process.env.DEBUG) console.log('In c.slowPromise. Result: ', result) - calls.push({err: err, result: result, time: Date.now()-start}) - if (process.env.DEBUG) console.log(result, calls) + if (process.env.DEBUG) console.log("In c.slowPromise. Result: ", result); + calls.push({ err: err, result: result, time: Date.now() - start }); + if (process.env.DEBUG) console.log(result, calls); if (err === null) { - return resolve(result) + return resolve(result); } else { - return reject(err) + return reject(err); } - }, duration) - }) + }, duration); + }); }, pNoErrVal: function (promise, ...expected) { - if (process.env.DEBUG) console.log('In c.pNoErrVal. Expected:', expected) + if (process.env.DEBUG) console.log("In c.pNoErrVal. Expected:", expected); return promise.then(function (actual) { - mustEqual(actual, expected) - }) + mustEqual(actual, expected); + }); }, noErrVal: function (...expected) { return function (err, ...actual) { - mustEqual(err, null) - mustEqual(actual, expected) - } + mustEqual(err, null); + mustEqual(actual, expected); + }; }, last: function (options) { - var opt = options != null ? options : {} - return limiter.schedule(opt, function () { return Promise.resolve(getResults()) }) - .catch(function (err) { console.error("Error in context.last:", err)}) + var opt = options != null ? options : {}; + return limiter + .schedule(opt, function () { + return Promise.resolve(getResults()); + }) + .catch(function (err) { + console.error("Error in context.last:", err); + }); }, wait: function (wait) { - return new Promise(function (resolve, reject) { - setTimeout(resolve, wait) - }) + return new Promise(function (resolve, _reject) { + setTimeout(resolve, wait); + }); }, limiter: limiter, mustEqual: mustEqual, - mustExist: function (a) { assert(a != null) }, + mustGte: function (a, b) { + assert(a >= b, `Expected ${a} to be greater than or equal to ${b}`); + }, + mustGt: function (a, b) { + assert(a > b, `Expected ${a} to be greater than ${b}`); + }, + mustLte: function (a, b) { + assert(a <= b, `Expected ${a} to be less than or equal to ${b}`); + }, + mustLt: function (a, b) { + assert(a < b, `Expected ${a} to be less than ${b}`); + }, + mustExist: function (a) { + assert(a != null, `Expected ${a} to exist`); + }, + mustNotExist: function (a) { + assert(a == null, `Expected ${a} to not exist`); + }, results: getResults, checkResultsOrder: function (order) { - mustEqual(order.length, calls.length) - for (var i = 0; i < Math.max(calls.length, order.length); i++) { - mustEqual(order[i], calls[i].result) + assert.deepStrictEqual( + order.length, + calls.length, + `Expected ${order.length} calls, got ${calls.length} calls`, + ); + for (var i = 0; i < calls.length; i++) { + assert.deepStrictEqual( + order[i], + calls[i].result, + `Expected ${order[i]} for call ${i}, got ${calls[i].result} instead`, + ); } }, checkDuration: function (shouldBe, minBound = 10) { - var results = getResults() - var min = shouldBe - minBound - var max = shouldBe + 50 - if (!(results.callsDuration > min && results.callsDuration < max)) { - console.error('Duration not around ' + shouldBe + '. Was ' + results.callsDuration) - } - assert(results.callsDuration > min && results.callsDuration < max) - } - } + var results = getResults(); + var min = shouldBe - minBound; + var max = shouldBe + 100; + assert( + results.callsDuration > min && results.callsDuration < max, + `Expected ${results.callsDuration} to be around ${shouldBe} (between ${min} and ${max})`, + ); + }, + }; - return context -} + return context; +}; diff --git a/test/general.js b/test/general.js index a8192aa..05e7c7c 100644 --- a/test/general.js +++ b/test/general.js @@ -1,867 +1,1012 @@ -var makeTest = require('./context') -var Bottleneck = require('./bottleneck') -var assert = require('assert') -var child_process = require('child_process') +var makeTest = require("./context"); +var Bottleneck = require("./bottleneck"); +var child_process = require("child_process"); +const { describe, it, afterEach } = require("mocha"); -describe('General', function () { - var c +describe("General", function () { + var c; afterEach(function () { - return c.limiter.disconnect(false) - }) + return c.limiter.disconnect(false); + }); if ( - process.env.DATASTORE !== 'redis' && process.env.DATASTORE !== 'ioredis' && - process.env.BUILD !== 'es5' && process.env.BUILD !== 'light' + process.env.DATASTORE !== "redis" && + process.env.DATASTORE !== "ioredis" && + process.env.BUILD !== "light" ) { - it('Should not leak memory on instantiation', async function () { - c = makeTest() - this.timeout(8000) - const { iterate } = require('@token-cjg/leakage') - - const result = await iterate.async(async () => { - const limiter = new Bottleneck({ datastore: 'local' }) - await limiter.ready() - return limiter.disconnect(false) - }, { iterations: 25 }) - - }) - - it('Should not leak memory running jobs', async function () { - c = makeTest() - this.timeout(12000) - const { iterate } = require('@token-cjg/leakage') - const limiter = new Bottleneck({ datastore: 'local', maxConcurrent: 1, minTime: 10 }) - await limiter.ready() - var ctr = 0 - var i = 0 - - const result = await iterate.async(async () => { - await limiter.schedule(function (zero, one) { - i = i + zero + one - }, 0, 1) - await limiter.schedule(function (zero, one) { - i = i + zero + one - }, 0, 1) - }, { iterations: 25 }) - c.mustEqual(i, 302) - }) + it("Should not leak memory on instantiation", async function () { + c = makeTest(); + this.timeout(8000); + const { iterate } = require("@token-cjg/leakage"); + + await iterate.async( + async () => { + const limiter = new Bottleneck({ datastore: "local" }); + await limiter.ready(); + return limiter.disconnect(false); + }, + { iterations: 25 }, + ); + }); + + it("Should not leak memory running jobs", async function () { + c = makeTest(); + this.timeout(12000); + const { iterate } = require("@token-cjg/leakage"); + const limiter = new Bottleneck({ datastore: "local", maxConcurrent: 1, minTime: 10 }); + await limiter.ready(); + + var i = 0; + + await iterate.async( + async () => { + await limiter.schedule( + function (zero, one) { + i = i + zero + one; + }, + 0, + 1, + ); + await limiter.schedule( + function (zero, one) { + i = i + zero + one; + }, + 0, + 1, + ); + }, + { iterations: 25 }, + ); + c.mustEqual(i, 302); + }); } - it('Should prompt to upgrade', function () { - c = makeTest() + it("Should prompt to upgrade", function () { + c = makeTest(); try { - var limiter = new Bottleneck(1, 250) + new Bottleneck(1, 250); } catch (err) { - c.mustEqual(err.message, 'Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you\'re upgrading from Bottleneck v1.') + c.mustEqual( + err.message, + "Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1.", + ); } - }) + }); - it('Should allow null capacity', function () { - c = makeTest({ id: 'null', minTime: 0 }) - return c.limiter.updateSettings({ minTime: 10 }) - }) + it("Should allow null capacity", function () { + c = makeTest({ id: "null", minTime: 0 }); + return c.limiter.updateSettings({ minTime: 10 }); + }); - it('Should keep scope', async function () { - c = makeTest({ maxConcurrent: 1 }) + it("Should keep scope", async function () { + c = makeTest({ maxConcurrent: 1 }); class Job { constructor() { - this.value = 5 + this.value = 5; } action(x) { - return this.value + x + return this.value + x; } } - var job = new Job() + var job = new Job(); - c.mustEqual(6, await c.limiter.schedule(() => job.action.bind(job)(1))) - c.mustEqual(7, await c.limiter.wrap(job.action.bind(job))(2)) - }) + c.mustEqual(6, await c.limiter.schedule(() => job.action.bind(job)(1))); + c.mustEqual(7, await c.limiter.wrap(job.action.bind(job))(2)); + }); - it('Should pass multiple arguments back even on errors when using submit()', function (done) { - c = makeTest({ maxConcurrent: 1 }) + it("Should pass multiple arguments back even on errors when using submit()", function (done) { + c = makeTest({ maxConcurrent: 1 }); - c.limiter.submit(c.job, new Error('welp'), 1, 2, function (err, x, y) { - c.mustEqual(err.message, 'welp') - c.mustEqual(x, 1) - c.mustEqual(y, 2) - done() - }) - }) + c.limiter.submit(c.job, new Error("welp"), 1, 2, function (err, x, y) { + c.mustEqual(err.message, "welp"); + c.mustEqual(x, 1); + c.mustEqual(y, 2); + done(); + }); + }); - it('Should expose the Events library', function (cb) { - c = makeTest() + it("Should expose the Events library", function (cb) { + c = makeTest(); class Hello { constructor() { - this.emitter = new Bottleneck.Events(this) + this.emitter = new Bottleneck.Events(this); } doSomething() { - this.emitter.trigger('info', 'hello', 'world', 123) - return 5 + this.emitter.trigger("info", "hello", "world", 123); + return 5; } } const myObject = new Hello(); - myObject.on('info', (...args) => { - c.mustEqual(args, ['hello', 'world', 123]) - cb() - }) - myObject.doSomething() - c.mustEqual(myObject.emitter.listenerCount('info'), 1) - c.mustEqual(myObject.emitter.listenerCount('nothing'), 0) - - myObject.on('blah', '') - myObject.on('blah', null) - myObject.on('blah') - return myObject.emitter.trigger('blah') - }) - - describe('Counts and statuses', function () { - it('Should check() and return the queued count with and without a priority value', async function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) - - c.mustEqual(await c.limiter.check(), true) - - c.mustEqual(c.limiter.queued(), 0) - c.mustEqual(await c.limiter.clusterQueued(), 0) - - await c.limiter.submit({id: 1}, c.slowJob, 50, null, 1, c.noErrVal(1)) - c.mustEqual(c.limiter.queued(), 0) // It's already running - - c.mustEqual(await c.limiter.check(), false) - - await c.limiter.submit({id: 2}, c.slowJob, 50, null, 2, c.noErrVal(2)) - c.mustEqual(c.limiter.queued(), 1) - c.mustEqual(await c.limiter.clusterQueued(), 1) - c.mustEqual(c.limiter.queued(1), 0) - c.mustEqual(c.limiter.queued(5), 1) - - await c.limiter.submit({id: 3}, c.slowJob, 50, null, 3, c.noErrVal(3)) - c.mustEqual(c.limiter.queued(), 2) - c.mustEqual(await c.limiter.clusterQueued(), 2) - c.mustEqual(c.limiter.queued(1), 0) - c.mustEqual(c.limiter.queued(5), 2) - - await c.limiter.submit({id: 4}, c.slowJob, 50, null, 4, c.noErrVal(4)) - c.mustEqual(c.limiter.queued(), 3) - c.mustEqual(await c.limiter.clusterQueued(), 3) - c.mustEqual(c.limiter.queued(1), 0) - c.mustEqual(c.limiter.queued(5), 3) - - await c.limiter.submit({priority: 1, id: 5}, c.job, null, 5, c.noErrVal(5)) - c.mustEqual(c.limiter.queued(), 4) - c.mustEqual(await c.limiter.clusterQueued(), 4) - c.mustEqual(c.limiter.queued(1), 1) - c.mustEqual(c.limiter.queued(5), 3) - - var results = await c.last() - c.mustEqual(c.limiter.queued(), 0) - c.mustEqual(await c.limiter.clusterQueued(), 0) - c.checkResultsOrder([[1], [5], [2], [3], [4]]) - c.checkDuration(450) - }) - - it('Should return the running and done counts', function () { - c = makeTest({maxConcurrent: 5, minTime: 0}) + myObject.on("info", (...args) => { + c.mustEqual(args, ["hello", "world", 123]); + cb(); + }); + myObject.doSomething(); + c.mustEqual(myObject.emitter.listenerCount("info"), 1); + c.mustEqual(myObject.emitter.listenerCount("nothing"), 0); + + myObject.on("blah", ""); + myObject.on("blah", null); + myObject.on("blah"); + return myObject.emitter.trigger("blah"); + }); + + describe("Counts and statuses", function () { + it("Should check() and return the queued count with and without a priority value", async function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); + + c.mustEqual(await c.limiter.check(), true); + + c.mustEqual(c.limiter.queued(), 0); + c.mustEqual(await c.limiter.clusterQueued(), 0); + + await c.limiter.submit({ id: 1 }, c.slowJob, 50, null, 1, c.noErrVal(1)); + c.mustEqual(c.limiter.queued(), 0); // It's already running + + c.mustEqual(await c.limiter.check(), false); + + await c.limiter.submit({ id: 2 }, c.slowJob, 50, null, 2, c.noErrVal(2)); + c.mustEqual(c.limiter.queued(), 1); + c.mustEqual(await c.limiter.clusterQueued(), 1); + c.mustEqual(c.limiter.queued(1), 0); + c.mustEqual(c.limiter.queued(5), 1); + + await c.limiter.submit({ id: 3 }, c.slowJob, 50, null, 3, c.noErrVal(3)); + c.mustEqual(c.limiter.queued(), 2); + c.mustEqual(await c.limiter.clusterQueued(), 2); + c.mustEqual(c.limiter.queued(1), 0); + c.mustEqual(c.limiter.queued(5), 2); + + await c.limiter.submit({ id: 4 }, c.slowJob, 50, null, 4, c.noErrVal(4)); + c.mustEqual(c.limiter.queued(), 3); + c.mustEqual(await c.limiter.clusterQueued(), 3); + c.mustEqual(c.limiter.queued(1), 0); + c.mustEqual(c.limiter.queued(5), 3); + + await c.limiter.submit({ priority: 1, id: 5 }, c.job, null, 5, c.noErrVal(5)); + c.mustEqual(c.limiter.queued(), 4); + c.mustEqual(await c.limiter.clusterQueued(), 4); + c.mustEqual(c.limiter.queued(1), 1); + c.mustEqual(c.limiter.queued(5), 3); + + await c.last(); + c.mustEqual(c.limiter.queued(), 0); + c.mustEqual(await c.limiter.clusterQueued(), 0); + c.checkResultsOrder([[1], [5], [2], [3], [4]]); + c.checkDuration(450); + }); + + it("Should return the running and done counts", function () { + c = makeTest({ maxConcurrent: 5, minTime: 0 }); return Promise.all([c.limiter.running(), c.limiter.done()]) - .then(function ([running, done]) { - c.mustEqual(running, 0) - c.mustEqual(done, 0) - c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({ weight: 3, id: 2 }, c.slowPromise, 200, null, 2), 2) - c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 3 }, c.slowPromise, 100, null, 3), 3) - - return c.limiter.schedule({ weight: 0, id: 4 }, c.promise, null) - }) - .then(function () { - return Promise.all([c.limiter.running(), c.limiter.done()]) - }) - .then(function ([running, done]) { - c.mustEqual(running, 5) - c.mustEqual(done, 0) - return c.wait(125) - }) - .then(function () { - return Promise.all([c.limiter.running(), c.limiter.done()]) - }) - .then(function ([running, done]) { - c.mustEqual(running, 3) - c.mustEqual(done, 2) - return c.wait(100) - }) - .then(function () { - return Promise.all([c.limiter.running(), c.limiter.done()]) - }) - .then(function ([running, done]) { - c.mustEqual(running, 0) - c.mustEqual(done, 5) - return c.last() - }) - .then(function (results) { - c.checkDuration(200) - c.checkResultsOrder([[], [1], [3], [2]]) - }) - }) - - it('Should refuse duplicate Job IDs', async function () { - c = makeTest({maxConcurrent: 2, minTime: 100, trackDoneStatus: true}) + .then(function ([running, done]) { + c.mustEqual(running, 0); + c.mustEqual(done, 0); + c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ weight: 3, id: 2 }, c.slowPromise, 200, null, 2), 2); + c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 3 }, c.slowPromise, 100, null, 3), 3); + + return c.limiter.schedule({ weight: 0, id: 4 }, c.promise, null); + }) + .then(function () { + return Promise.all([c.limiter.running(), c.limiter.done()]); + }) + .then(function ([running, done]) { + c.mustEqual(running, 5); + c.mustEqual(done, 0); + return c.wait(125); + }) + .then(function () { + return Promise.all([c.limiter.running(), c.limiter.done()]); + }) + .then(function ([running, done]) { + c.mustEqual(running, 3); + c.mustEqual(done, 2); + return c.wait(100); + }) + .then(function () { + return Promise.all([c.limiter.running(), c.limiter.done()]); + }) + .then(function ([running, done]) { + c.mustEqual(running, 0); + c.mustEqual(done, 5); + return c.last(); + }) + .then(function (_results) { + c.checkDuration(200); + c.checkResultsOrder([[], [1], [3], [2]]); + }); + }); + + it("Should refuse duplicate Job IDs", async function () { + c = makeTest({ maxConcurrent: 2, minTime: 100, trackDoneStatus: true }); try { - await c.limiter.schedule({ id: 'a' }, c.promise, null, 1) - await c.limiter.schedule({ id: 'b' }, c.promise, null, 2) - await c.limiter.schedule({ id: 'a' }, c.promise, null, 3) + await c.limiter.schedule({ id: "a" }, c.promise, null, 1); + await c.limiter.schedule({ id: "b" }, c.promise, null, 2); + await c.limiter.schedule({ id: "a" }, c.promise, null, 3); } catch (e) { - c.mustEqual(e.message, 'A job with the same id already exists (id=a)') + c.mustEqual(e.message, "A job with the same id already exists (id=a)"); } - }) - - it('Should return job statuses', function () { - c = makeTest({maxConcurrent: 2, minTime: 100}) - - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 0, RUNNING: 0, EXECUTING: 0 }) - - c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 2 }, c.slowPromise, 200, null, 2), 2) - c.pNoErrVal(c.limiter.schedule({ weight: 2, id: 3 }, c.slowPromise, 100, null, 3), 3) - c.mustEqual(c.limiter.counts(), { RECEIVED: 3, QUEUED: 0, RUNNING: 0, EXECUTING: 0 }) - - return c.wait(50) - .then(function () { - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 1, RUNNING: 1, EXECUTING: 1 }) - c.mustEqual(c.limiter.jobStatus(1), 'EXECUTING') - c.mustEqual(c.limiter.jobStatus(2), 'RUNNING') - c.mustEqual(c.limiter.jobStatus(3), 'QUEUED') - - return c.last() - }) - .then(function (results) { - c.checkDuration(400) - c.checkResultsOrder([[1], [2], [3]]) - }) - }) - - it('Should return job statuses, including DONE', function () { - c = makeTest({maxConcurrent: 2, minTime: 100, trackDoneStatus: true}) - - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 0, RUNNING: 0, EXECUTING: 0, DONE: 0 }) - - c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 2 }, c.slowPromise, 200, null, 2), 2) - c.pNoErrVal(c.limiter.schedule({ weight: 2, id: 3 }, c.slowPromise, 100, null, 3), 3) - c.mustEqual(c.limiter.counts(), { RECEIVED: 3, QUEUED: 0, RUNNING: 0, EXECUTING: 0, DONE: 0 }) - - return c.wait(50) - .then(function () { - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 1, RUNNING: 1, EXECUTING: 1, DONE: 0 }) - c.mustEqual(c.limiter.jobStatus(1), 'EXECUTING') - c.mustEqual(c.limiter.jobStatus(2), 'RUNNING') - c.mustEqual(c.limiter.jobStatus(3), 'QUEUED') - - return c.wait(100) - }) - .then(function () { - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 1, RUNNING: 0, EXECUTING: 1, DONE: 1 }) - c.mustEqual(c.limiter.jobStatus(1), 'DONE') - c.mustEqual(c.limiter.jobStatus(2), 'EXECUTING') - c.mustEqual(c.limiter.jobStatus(3), 'QUEUED') - - return c.last() - }) - .then(function (results) { - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 0, RUNNING: 0, EXECUTING: 0, DONE: 4 }) - c.checkDuration(400) - c.checkResultsOrder([[1], [2], [3]]) - }) - }) - - it('Should return jobs for a status', function () { - c = makeTest({maxConcurrent: 2, minTime: 100, trackDoneStatus: true}) - - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 0, RUNNING: 0, EXECUTING: 0, DONE: 0 }) - - c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 2 }, c.slowPromise, 200, null, 2), 2) - c.pNoErrVal(c.limiter.schedule({ weight: 2, id: 3 }, c.slowPromise, 100, null, 3), 3) - c.mustEqual(c.limiter.counts(), { RECEIVED: 3, QUEUED: 0, RUNNING: 0, EXECUTING: 0, DONE: 0 }) - - c.mustEqual(c.limiter.jobs(), ['1', '2', '3']) - c.mustEqual(c.limiter.jobs('RECEIVED'), ['1', '2', '3']) - - return c.wait(50) - .then(function () { - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 1, RUNNING: 1, EXECUTING: 1, DONE: 0 }) - c.mustEqual(c.limiter.jobs('EXECUTING'), ['1']) - c.mustEqual(c.limiter.jobs('RUNNING'), ['2']) - c.mustEqual(c.limiter.jobs('QUEUED'), ['3']) - - return c.wait(100) - }) - .then(function () { - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 1, RUNNING: 0, EXECUTING: 1, DONE: 1 }) - c.mustEqual(c.limiter.jobs('DONE'), ['1']) - c.mustEqual(c.limiter.jobs('EXECUTING'), ['2']) - c.mustEqual(c.limiter.jobs('QUEUED'), ['3']) - - return c.last() - }) - .then(function (results) { - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 0, RUNNING: 0, EXECUTING: 0, DONE: 4 }) - c.checkDuration(400) - c.checkResultsOrder([[1], [2], [3]]) - }) - }) - - it('Should trigger events on status changes', function () { - c = makeTest({maxConcurrent: 2, minTime: 100, trackDoneStatus: true}) - var onReceived = 0 - var onQueued = 0 - var onScheduled = 0 - var onExecuting = 0 - var onDone = 0 - c.limiter.on('received', (info) => { - c.mustEqual(Object.keys(info).sort(), ['args', 'options']) - onReceived++ - }) - c.limiter.on('queued', (info) => { - c.mustEqual(Object.keys(info).sort(), ['args', 'blocked', 'options', 'reachedHWM']) - onQueued++ - }) - c.limiter.on('scheduled', (info) => { - c.mustEqual(Object.keys(info).sort(), ['args', 'options']) - onScheduled++ - }) - c.limiter.on('executing', (info) => { - c.mustEqual(Object.keys(info).sort(), ['args', 'options', 'retryCount']) - onExecuting++ - }) - c.limiter.on('done', (info) => { - c.mustEqual(Object.keys(info).sort(), ['args', 'options', 'retryCount']) - onDone++ - }) - - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 0, RUNNING: 0, EXECUTING: 0, DONE: 0 }) - - c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 2 }, c.slowPromise, 200, null, 2), 2) - c.pNoErrVal(c.limiter.schedule({ weight: 2, id: 3 }, c.slowPromise, 100, null, 3), 3) - c.mustEqual(c.limiter.counts(), { RECEIVED: 3, QUEUED: 0, RUNNING: 0, EXECUTING: 0, DONE: 0 }) - - c.mustEqual([onReceived, onQueued, onScheduled, onExecuting, onDone], [3, 0, 0, 0, 0]) - - return c.wait(50) - .then(function () { - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 1, RUNNING: 1, EXECUTING: 1, DONE: 0 }) - c.mustEqual([onReceived, onQueued, onScheduled, onExecuting, onDone], [3, 3, 2, 1, 0]) - - return c.wait(100) - }) - .then(function () { - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 1, RUNNING: 0, EXECUTING: 1, DONE: 1 }) - c.mustEqual(c.limiter.jobs('DONE'), ['1']) - c.mustEqual(c.limiter.jobs('EXECUTING'), ['2']) - c.mustEqual(c.limiter.jobs('QUEUED'), ['3']) - c.mustEqual([onReceived, onQueued, onScheduled, onExecuting, onDone], [3, 3, 2, 2, 1]) - - return c.last() - }) - .then(function (results) { - c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 0, RUNNING: 0, EXECUTING: 0, DONE: 4 }) - c.mustEqual([onReceived, onQueued, onScheduled, onExecuting, onDone], [4, 4, 4, 4, 4]) - c.checkDuration(400) - c.checkResultsOrder([[1], [2], [3]]) - }) - }) - }) - - describe('Events', function () { - it('Should return itself', function () { - c = makeTest({ id: 'test-limiter' }) - - var returned = c.limiter.on('ready', function () { }) - c.mustEqual(returned.id, 'test-limiter') - }) - - it('Should fire events on empty queue', function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) - var calledEmpty = 0 - var calledIdle = 0 - var calledDepleted = 0 - - c.limiter.on('empty', function () { calledEmpty++ }) - c.limiter.on('idle', function () { calledIdle++ }) - c.limiter.on('depleted', function () { calledDepleted++ }) - - return c.pNoErrVal(c.limiter.schedule({id: 1}, c.slowPromise, 50, null, 1), 1) - .then(function () { - c.mustEqual(calledEmpty, 1) - c.mustEqual(calledIdle, 1) - return Promise.all([ - c.pNoErrVal(c.limiter.schedule({id: 2}, c.slowPromise, 50, null, 2), 2), - c.pNoErrVal(c.limiter.schedule({id: 3}, c.slowPromise, 50, null, 3), 3) - ]) - }) - .then(function () { - return c.limiter.submit({id: 4}, c.slowJob, 50, null, 4, null) - }) - .then(function () { - c.checkDuration(250) - c.checkResultsOrder([[1], [2], [3]]) - c.mustEqual(calledEmpty, 3) - c.mustEqual(calledIdle, 2) - c.mustEqual(calledDepleted, 0) - return c.last() - }) - }) - - it('Should fire events once', function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) - var calledEmptyOnce = 0 - var calledIdleOnce = 0 - var calledEmpty = 0 - var calledIdle = 0 - var calledDepleted = 0 - - c.limiter.once('empty', function () { calledEmptyOnce++ }) - c.limiter.once('idle', function () { calledIdleOnce++ }) - c.limiter.on('empty', function () { calledEmpty++ }) - c.limiter.on('idle', function () { calledIdle++ }) - c.limiter.on('depleted', function () { calledDepleted++ }) - - c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 1), 1) - - return c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2) - .then(function () { - c.mustEqual(calledEmptyOnce, 1) - c.mustEqual(calledIdleOnce, 1) - c.mustEqual(calledEmpty, 1) - c.mustEqual(calledIdle, 1) - return c.pNoErrVal(c.limiter.schedule(c.promise, null, 3), 3) - }) - .then(function () { - c.checkDuration(200) - c.checkResultsOrder([[1], [2], [3]]) - c.mustEqual(calledEmptyOnce, 1) - c.mustEqual(calledIdleOnce, 1) - c.mustEqual(calledEmpty, 2) - c.mustEqual(calledIdle, 2) - c.mustEqual(calledDepleted, 0) - }) - }) - - it('Should support faulty event listeners', function (done) { - c = makeTest({maxConcurrent: 1, minTime: 100, errorEventsExpected: true}) - var calledError = 0 - - c.limiter.on('error', function (err) { - calledError++ - if (err.message === 'Oh noes!' && calledError === 1) { - done() - } - }) - c.limiter.on('empty', function () { - throw new Error('Oh noes!') - }) - - c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1) - }) - - it('Should wait for async event listeners', function (done) { - c = makeTest({maxConcurrent: 1, minTime: 100, errorEventsExpected: true}) - var calledError = 0 - - c.limiter.on('error', function (err) { - calledError++ - if (err.message === 'It broke!' && calledError === 1) { - done() - } - }) - c.limiter.on('empty', function () { - return c.slowPromise(100, null, 1, 2) - .then(function (x) { - c.mustEqual(x, [1, 2]) - return Promise.reject(new Error('It broke!')) - }) - }) - - c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1) - }) - }) + }); - describe('High water limit', function () { - it('Should support highWater set to 0', function () { - c = makeTest({maxConcurrent: 1, minTime: 0, highWater: 0, rejectOnDrop: false}) + it("Should return job statuses", function () { + c = makeTest({ maxConcurrent: 2, minTime: 100 }); - var first = c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 1), 1) - c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 2), 2) - c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 3), 3) - c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 4), 4) + c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 0, RUNNING: 0, EXECUTING: 0 }); - return first - .then(function () { - return c.last({ weight: 0 }) - }) - .then(function (results) { - c.checkDuration(50) - c.checkResultsOrder([[1]]) - }) - }) - - it('Should support highWater set to 1', function () { - c = makeTest({maxConcurrent: 1, minTime: 0, highWater: 1, rejectOnDrop: false}) - - var first = c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 1), 1) - c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 2), 2) - c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 3), 3) - var last = c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 4), 4) + c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 2 }, c.slowPromise, 200, null, 2), 2); + c.pNoErrVal(c.limiter.schedule({ weight: 2, id: 3 }, c.slowPromise, 100, null, 3), 3); + c.mustEqual(c.limiter.counts(), { RECEIVED: 3, QUEUED: 0, RUNNING: 0, EXECUTING: 0 }); - return Promise.all([first, last]) - .then(function () { - return c.last({ weight: 0 }) - }) - .then(function (results) { - c.checkDuration(100) - c.checkResultsOrder([[1], [4]]) - }) - }) - }) - - describe('Weight', function () { - it('Should not add jobs with a weight above the maxConcurrent', function () { - c = makeTest({maxConcurrent: 2}) - - c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.promise, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({ weight: 2 }, c.promise, null, 2), 2) - - return c.limiter.schedule({ weight: 3 }, c.promise, null, 3) - .catch(function (err) { - c.mustEqual(err.message, 'Impossible to add a job having a weight of 3 to a limiter having a maxConcurrent setting of 2') - return c.last() - }) - .then(function (results) { - c.checkDuration(0) - c.checkResultsOrder([[1], [2]]) - }) - }) - - - it('Should support custom job weights', function () { - c = makeTest({maxConcurrent: 2}) - - c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.slowPromise, 100, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({ weight: 2 }, c.slowPromise, 200, null, 2), 2) - c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.slowPromise, 100, null, 3), 3) - c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.slowPromise, 100, null, 4), 4) - c.pNoErrVal(c.limiter.schedule({ weight: 0 }, c.slowPromise, 100, null, 5), 5) - - return c.last() - .then(function (results) { - c.checkDuration(400) - c.checkResultsOrder([[1], [2], [3], [4], [5]]) - }) - }) - - it('Should overflow at the correct rate', function () { - c = makeTest({ - maxConcurrent: 2, - reservoir: 3 - }) + return c + .wait(50) + .then(function () { + c.mustEqual(c.limiter.counts(), { RECEIVED: 0, QUEUED: 1, RUNNING: 1, EXECUTING: 1 }); + c.mustEqual(c.limiter.jobStatus(1), "EXECUTING"); + c.mustEqual(c.limiter.jobStatus(2), "RUNNING"); + c.mustEqual(c.limiter.jobStatus(3), "QUEUED"); - var calledDepleted = 0 - var emptyArguments = [] - c.limiter.on('depleted', function (empty) { - emptyArguments.push(empty) - calledDepleted++ - }) + return c.last(); + }) + .then(function (_results) { + c.checkDuration(400); + c.checkResultsOrder([[1], [2], [3]]); + }); + }); + + it("Should return job statuses, including DONE", function () { + c = makeTest({ maxConcurrent: 2, minTime: 100, trackDoneStatus: true }); + + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 0, + RUNNING: 0, + EXECUTING: 0, + DONE: 0, + }); + + c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 2 }, c.slowPromise, 200, null, 2), 2); + c.pNoErrVal(c.limiter.schedule({ weight: 2, id: 3 }, c.slowPromise, 100, null, 3), 3); + c.mustEqual(c.limiter.counts(), { + RECEIVED: 3, + QUEUED: 0, + RUNNING: 0, + EXECUTING: 0, + DONE: 0, + }); + + return c + .wait(50) + .then(function () { + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 1, + RUNNING: 1, + EXECUTING: 1, + DONE: 0, + }); + c.mustEqual(c.limiter.jobStatus(1), "EXECUTING"); + c.mustEqual(c.limiter.jobStatus(2), "RUNNING"); + c.mustEqual(c.limiter.jobStatus(3), "QUEUED"); + + return c.wait(100); + }) + .then(function () { + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 1, + RUNNING: 0, + EXECUTING: 1, + DONE: 1, + }); + c.mustEqual(c.limiter.jobStatus(1), "DONE"); + c.mustEqual(c.limiter.jobStatus(2), "EXECUTING"); + c.mustEqual(c.limiter.jobStatus(3), "QUEUED"); + + return c.last(); + }) + .then(function (_results) { + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 0, + RUNNING: 0, + EXECUTING: 0, + DONE: 4, + }); + c.checkDuration(400); + c.checkResultsOrder([[1], [2], [3]]); + }); + }); + + it("Should return jobs for a status", function () { + c = makeTest({ maxConcurrent: 2, minTime: 100, trackDoneStatus: true }); + + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 0, + RUNNING: 0, + EXECUTING: 0, + DONE: 0, + }); + + c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 2 }, c.slowPromise, 200, null, 2), 2); + c.pNoErrVal(c.limiter.schedule({ weight: 2, id: 3 }, c.slowPromise, 100, null, 3), 3); + c.mustEqual(c.limiter.counts(), { + RECEIVED: 3, + QUEUED: 0, + RUNNING: 0, + EXECUTING: 0, + DONE: 0, + }); + + c.mustEqual(c.limiter.jobs(), ["1", "2", "3"]); + c.mustEqual(c.limiter.jobs("RECEIVED"), ["1", "2", "3"]); + + return c + .wait(50) + .then(function () { + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 1, + RUNNING: 1, + EXECUTING: 1, + DONE: 0, + }); + c.mustEqual(c.limiter.jobs("EXECUTING"), ["1"]); + c.mustEqual(c.limiter.jobs("RUNNING"), ["2"]); + c.mustEqual(c.limiter.jobs("QUEUED"), ["3"]); + + return c.wait(100); + }) + .then(function () { + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 1, + RUNNING: 0, + EXECUTING: 1, + DONE: 1, + }); + c.mustEqual(c.limiter.jobs("DONE"), ["1"]); + c.mustEqual(c.limiter.jobs("EXECUTING"), ["2"]); + c.mustEqual(c.limiter.jobs("QUEUED"), ["3"]); + + return c.last(); + }) + .then(function (_results) { + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 0, + RUNNING: 0, + EXECUTING: 0, + DONE: 4, + }); + c.checkDuration(400); + c.checkResultsOrder([[1], [2], [3]]); + }); + }); + + it("Should trigger events on status changes", function () { + c = makeTest({ maxConcurrent: 2, minTime: 100, trackDoneStatus: true }); + var onReceived = 0; + var onQueued = 0; + var onScheduled = 0; + var onExecuting = 0; + var onDone = 0; + c.limiter.on("received", (info) => { + c.mustEqual(Object.keys(info).sort(), ["args", "options"]); + onReceived++; + }); + c.limiter.on("queued", (info) => { + c.mustEqual(Object.keys(info).sort(), ["args", "blocked", "options", "reachedHWM"]); + onQueued++; + }); + c.limiter.on("scheduled", (info) => { + c.mustEqual(Object.keys(info).sort(), ["args", "options"]); + onScheduled++; + }); + c.limiter.on("executing", (info) => { + c.mustEqual(Object.keys(info).sort(), ["args", "options", "retryCount"]); + onExecuting++; + }); + c.limiter.on("done", (info) => { + c.mustEqual(Object.keys(info).sort(), ["args", "options", "retryCount"]); + onDone++; + }); + + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 0, + RUNNING: 0, + EXECUTING: 0, + DONE: 0, + }); + + c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 2 }, c.slowPromise, 200, null, 2), 2); + c.pNoErrVal(c.limiter.schedule({ weight: 2, id: 3 }, c.slowPromise, 100, null, 3), 3); + c.mustEqual(c.limiter.counts(), { + RECEIVED: 3, + QUEUED: 0, + RUNNING: 0, + EXECUTING: 0, + DONE: 0, + }); + + c.mustEqual([onReceived, onQueued, onScheduled, onExecuting, onDone], [3, 0, 0, 0, 0]); + + return c + .wait(50) + .then(function () { + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 1, + RUNNING: 1, + EXECUTING: 1, + DONE: 0, + }); + c.mustEqual([onReceived, onQueued, onScheduled, onExecuting, onDone], [3, 3, 2, 1, 0]); + + return c.wait(100); + }) + .then(function () { + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 1, + RUNNING: 0, + EXECUTING: 1, + DONE: 1, + }); + c.mustEqual(c.limiter.jobs("DONE"), ["1"]); + c.mustEqual(c.limiter.jobs("EXECUTING"), ["2"]); + c.mustEqual(c.limiter.jobs("QUEUED"), ["3"]); + c.mustEqual([onReceived, onQueued, onScheduled, onExecuting, onDone], [3, 3, 2, 2, 1]); + + return c.last(); + }) + .then(function (_results) { + c.mustEqual(c.limiter.counts(), { + RECEIVED: 0, + QUEUED: 0, + RUNNING: 0, + EXECUTING: 0, + DONE: 4, + }); + c.mustEqual([onReceived, onQueued, onScheduled, onExecuting, onDone], [4, 4, 4, 4, 4]); + c.checkDuration(400); + c.checkResultsOrder([[1], [2], [3]]); + }); + }); + }); + + describe("Events", function () { + it("Should return itself", function () { + c = makeTest({ id: "test-limiter" }); + + var returned = c.limiter.on("ready", function () {}); + c.mustEqual(returned.id, "test-limiter"); + }); + + it("Should fire events on empty queue", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); + var calledEmpty = 0; + var calledIdle = 0; + var calledDepleted = 0; + + c.limiter.on("empty", function () { + calledEmpty++; + }); + c.limiter.on("idle", function () { + calledIdle++; + }); + c.limiter.on("depleted", function () { + calledDepleted++; + }); + + return c + .pNoErrVal(c.limiter.schedule({ id: 1 }, c.slowPromise, 50, null, 1), 1) + .then(function () { + c.mustEqual(calledEmpty, 1); + c.mustEqual(calledIdle, 1); + return Promise.all([ + c.pNoErrVal(c.limiter.schedule({ id: 2 }, c.slowPromise, 50, null, 2), 2), + c.pNoErrVal(c.limiter.schedule({ id: 3 }, c.slowPromise, 50, null, 3), 3), + ]); + }) + .then(function () { + return c.limiter.submit({ id: 4 }, c.slowJob, 50, null, 4, null); + }) + .then(function () { + c.checkDuration(250); + c.checkResultsOrder([[1], [2], [3]]); + c.mustEqual(calledEmpty, 3); + c.mustEqual(calledIdle, 2); + c.mustEqual(calledDepleted, 0); + return c.last(); + }); + }); + + it("Should fire events once", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); + var calledEmptyOnce = 0; + var calledIdleOnce = 0; + var calledEmpty = 0; + var calledIdle = 0; + var calledDepleted = 0; + + c.limiter.once("empty", function () { + calledEmptyOnce++; + }); + c.limiter.once("idle", function () { + calledIdleOnce++; + }); + c.limiter.on("empty", function () { + calledEmpty++; + }); + c.limiter.on("idle", function () { + calledIdle++; + }); + c.limiter.on("depleted", function () { + calledDepleted++; + }); + + c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 1), 1); + + return c + .pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2) + .then(function () { + c.mustEqual(calledEmptyOnce, 1); + c.mustEqual(calledIdleOnce, 1); + c.mustEqual(calledEmpty, 1); + c.mustEqual(calledIdle, 1); + return c.pNoErrVal(c.limiter.schedule(c.promise, null, 3), 3); + }) + .then(function () { + c.checkDuration(200); + c.checkResultsOrder([[1], [2], [3]]); + c.mustEqual(calledEmptyOnce, 1); + c.mustEqual(calledIdleOnce, 1); + c.mustEqual(calledEmpty, 2); + c.mustEqual(calledIdle, 2); + c.mustEqual(calledDepleted, 0); + }); + }); + + it("Should support faulty event listeners", function (done) { + c = makeTest({ maxConcurrent: 1, minTime: 100, errorEventsExpected: true }); + var calledError = 0; + + c.limiter.on("error", function (err) { + calledError++; + if (err.message === "Oh noes!" && calledError === 1) { + done(); + } + }); + c.limiter.on("empty", function () { + throw new Error("Oh noes!"); + }); + + c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1); + }); + + it("Should wait for async event listeners", function (done) { + c = makeTest({ maxConcurrent: 1, minTime: 100, errorEventsExpected: true }); + var calledError = 0; + + c.limiter.on("error", function (err) { + calledError++; + if (err.message === "It broke!" && calledError === 1) { + done(); + } + }); + c.limiter.on("empty", function () { + return c.slowPromise(100, null, 1, 2).then(function (x) { + c.mustEqual(x, [1, 2]); + return Promise.reject(new Error("It broke!")); + }); + }); + + c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1); + }); + }); + + describe("High water limit", function () { + it("Should support highWater set to 0", function () { + c = makeTest({ maxConcurrent: 1, minTime: 0, highWater: 0, rejectOnDrop: false }); + + var first = c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 1), 1); + c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 2), 2); + c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 3), 3); + c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 4), 4); - var p1 = c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), 1) - var p2 = c.pNoErrVal(c.limiter.schedule({ weight: 2, id: 2 }, c.slowPromise, 150, null, 2), 2) - var p3 = c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 3 }, c.slowPromise, 100, null, 3), 3) - var p4 = c.pNoErrVal(c.limiter.schedule({ weight: 1, id: 4 }, c.slowPromise, 100, null, 4), 4) + return first + .then(function () { + return c.last({ weight: 0 }); + }) + .then(function (_results) { + c.checkDuration(50); + c.checkResultsOrder([[1]]); + }); + }); - return Promise.all([p1, p2]) - .then(function () { - c.mustEqual(c.limiter.queued(), 2) - return c.limiter.currentReservoir() - }) - .then(function (reservoir) { - c.mustEqual(reservoir, 0) - c.mustEqual(calledDepleted, 1) - return c.limiter.incrementReservoir(1) - }) - .then(function (reservoir) { - c.mustEqual(reservoir, 1) - return c.last({ priority: 1, weight: 0 }) - }) - .then(function (results) { - c.mustEqual(calledDepleted, 3) - c.mustEqual(c.limiter.queued(), 1) - c.checkDuration(250) - c.checkResultsOrder([[1], [2]]) - return c.limiter.currentReservoir() - }) - .then(function (reservoir) { - c.mustEqual(reservoir, 0) - return c.limiter.updateSettings({ reservoir: 1 }) - }) - .then(function () { - return Promise.all([p3, p4]) - }) - .then(function () { - return c.limiter.currentReservoir() - }) - .then(function (reservoir) { - c.mustEqual(reservoir, 0) - c.mustEqual(calledDepleted, 4) - c.mustEqual(emptyArguments, [false, false, false, true]) - }) - }) - }) - - describe('Expiration', function () { - it('Should cancel jobs', function () { - c = makeTest({ maxConcurrent: 2 }) - var t0 = Date.now() + it("Should support highWater set to 1", function () { + c = makeTest({ maxConcurrent: 1, minTime: 0, highWater: 1, rejectOnDrop: false }); - return Promise.all([ - c.pNoErrVal(c.limiter.schedule({ id: 'very-slow-no-expiration' }, c.slowPromise, 150, null, 1), 1), + var first = c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 1), 1); + c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 2), 2); + c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 3), 3); + var last = c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 4), 4); - c.limiter.schedule({ expiration: 50, id: 'slow-with-expiration' }, c.slowPromise, 75, null, 2) + return Promise.all([first, last]) .then(function () { - return Promise.reject(new Error("Should have timed out.")) + return c.last({ weight: 0 }); }) + .then(function (_results) { + c.checkDuration(100); + c.checkResultsOrder([[1], [4]]); + }); + }); + }); + + describe("Weight", function () { + it("Should not add jobs with a weight above the maxConcurrent", function () { + c = makeTest({ maxConcurrent: 2 }); + + c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.promise, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ weight: 2 }, c.promise, null, 2), 2); + + return c.limiter + .schedule({ weight: 3 }, c.promise, null, 3) .catch(function (err) { - c.mustEqual(err.message, 'This job timed out after 50 ms.') - var duration = Date.now() - t0 - assert(duration > 45 && duration < 80) + c.mustEqual( + err.message, + "Impossible to add a job having a weight of 3 to a limiter having a maxConcurrent setting of 2", + ); + return c.last(); + }) + .then(function (_results) { + c.checkDuration(0); + c.checkResultsOrder([[1], [2]]); + }); + }); + + it("Should support custom job weights", function () { + c = makeTest({ maxConcurrent: 2 }); + + c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.slowPromise, 100, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ weight: 2 }, c.slowPromise, 200, null, 2), 2); + c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.slowPromise, 100, null, 3), 3); + c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.slowPromise, 100, null, 4), 4); + c.pNoErrVal(c.limiter.schedule({ weight: 0 }, c.slowPromise, 100, null, 5), 5); + + return c.last().then(function (_results) { + c.checkDuration(400); + c.checkResultsOrder([[1], [2], [3], [4], [5]]); + }); + }); + + it("Should overflow at the correct rate", function () { + c = makeTest({ + maxConcurrent: 2, + reservoir: 3, + }); + + var calledDepleted = 0; + var emptyArguments = []; + c.limiter.on("depleted", function (empty) { + emptyArguments.push(empty); + calledDepleted++; + }); + + var p1 = c.pNoErrVal( + c.limiter.schedule({ weight: 1, id: 1 }, c.slowPromise, 100, null, 1), + 1, + ); + var p2 = c.pNoErrVal( + c.limiter.schedule({ weight: 2, id: 2 }, c.slowPromise, 150, null, 2), + 2, + ); + var p3 = c.pNoErrVal( + c.limiter.schedule({ weight: 1, id: 3 }, c.slowPromise, 100, null, 3), + 3, + ); + var p4 = c.pNoErrVal( + c.limiter.schedule({ weight: 1, id: 4 }, c.slowPromise, 100, null, 4), + 4, + ); - return Promise.all([c.limiter.running(), c.limiter.done()]) + return Promise.all([p1, p2]) + .then(function () { + c.mustEqual(c.limiter.queued(), 2); + return c.limiter.currentReservoir(); }) - .then(function ([running, done]) { - c.mustEqual(running, 1) - c.mustEqual(done, 1) + .then(function (reservoir) { + c.mustEqual(reservoir, 0); + c.mustEqual(calledDepleted, 1); + return c.limiter.incrementReservoir(1); + }) + .then(function (reservoir) { + c.mustEqual(reservoir, 1); + return c.last({ priority: 1, weight: 0 }); + }) + .then(function (_results) { + c.mustEqual(calledDepleted, 3); + c.mustEqual(c.limiter.queued(), 1); + c.checkDuration(250); + c.checkResultsOrder([[1], [2]]); + return c.limiter.currentReservoir(); + }) + .then(function (reservoir) { + c.mustEqual(reservoir, 0); + return c.limiter.updateSettings({ reservoir: 1 }); }) + .then(function () { + return Promise.all([p3, p4]); + }) + .then(function () { + return c.limiter.currentReservoir(); + }) + .then(function (reservoir) { + c.mustEqual(reservoir, 0); + c.mustEqual(calledDepleted, 4); + c.mustEqual(emptyArguments, [false, false, false, true]); + }); + }); + }); + + describe("Expiration", function () { + it("Should cancel jobs", function () { + c = makeTest({ maxConcurrent: 2 }); + var t0 = Date.now(); + return Promise.all([ + c.pNoErrVal( + c.limiter.schedule({ id: "very-slow-no-expiration" }, c.slowPromise, 150, null, 1), + 1, + ), + + c.limiter + .schedule({ expiration: 50, id: "slow-with-expiration" }, c.slowPromise, 75, null, 2) + .then(function () { + return Promise.reject(new Error("Should have timed out.")); + }) + .catch(function (err) { + c.mustEqual(err.message, "This job timed out after 50 ms."); + var duration = Date.now() - t0; + c.mustGt(duration, 45); + c.mustLt(duration, 80); + + return Promise.all([c.limiter.running(), c.limiter.done()]); + }) + .then(function ([running, done]) { + c.mustEqual(running, 1); + c.mustEqual(done, 1); + }), ]) - .then(function () { - var duration = Date.now() - t0 - assert(duration > 145 && duration < 180) - return Promise.all([c.limiter.running(), c.limiter.done()]) - }) - .then(function ([running, done]) { - c.mustEqual(running, 0) - c.mustEqual(done, 2) - }) - }) - }) - - describe('Pubsub', function () { - it('Should pass strings', function (done) { - c = makeTest({ maxConcurrent: 2 }) - - c.limiter.on('message', function (msg) { - c.mustEqual(msg, 'hello') - done() - }) - - c.limiter.publish('hello') - }) - - it('Should pass objects', function (done) { - c = makeTest({ maxConcurrent: 2 }) + .then(function () { + var duration = Date.now() - t0; + c.mustGt(duration, 145); + c.mustLt(duration, 180); + return Promise.all([c.limiter.running(), c.limiter.done()]); + }) + .then(function ([running, done]) { + c.mustEqual(running, 0); + c.mustEqual(done, 2); + }); + }); + }); + + describe("Pubsub", function () { + it("Should pass strings", function (done) { + c = makeTest({ maxConcurrent: 2 }); + + c.limiter.on("message", function (msg) { + c.mustEqual(msg, "hello"); + done(); + }); + + c.limiter.publish("hello"); + }); + + it("Should pass objects", function (done) { + c = makeTest({ maxConcurrent: 2 }); var obj = { - array: ['abc', true], - num: 235.59 - } + array: ["abc", true], + num: 235.59, + }; - c.limiter.on('message', function (msg) { - c.mustEqual(JSON.parse(msg), obj) - done() - }) + c.limiter.on("message", function (msg) { + c.mustEqual(JSON.parse(msg), obj); + done(); + }); - c.limiter.publish(JSON.stringify(obj)) - }) - }) + c.limiter.publish(JSON.stringify(obj)); + }); + }); - describe('Reservoir Refresh', function () { - it('Should auto-refresh the reservoir', function () { + describe("Reservoir Refresh", function () { + it("Should auto-refresh the reservoir", function () { c = makeTest({ reservoir: 8, reservoirRefreshInterval: 150, reservoirRefreshAmount: 5, - heartbeatInterval: 75 // not for production use - }) - var calledDepleted = 0 + heartbeatInterval: 75, // not for production use + }); + var calledDepleted = 0; - c.limiter.on('depleted', function () { - calledDepleted++ - }) + c.limiter.on("depleted", function () { + calledDepleted++; + }); return Promise.all([ c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.promise, null, 1), 1), c.pNoErrVal(c.limiter.schedule({ weight: 2 }, c.promise, null, 2), 2), c.pNoErrVal(c.limiter.schedule({ weight: 3 }, c.promise, null, 3), 3), c.pNoErrVal(c.limiter.schedule({ weight: 4 }, c.promise, null, 4), 4), - c.pNoErrVal(c.limiter.schedule({ weight: 5 }, c.promise, null, 5), 5) + c.pNoErrVal(c.limiter.schedule({ weight: 5 }, c.promise, null, 5), 5), ]) - .then(function () { - return c.limiter.currentReservoir() - }) - .then(function (reservoir) { - c.mustEqual(reservoir, 0) - return c.last({ weight: 0, priority: 9 }) - }) - .then(function (results) { - c.checkResultsOrder([[1], [2], [3], [4], [5]]) - c.mustEqual(calledDepleted, 2) - c.checkDuration(300) - }) - }) - - it('Should allow staggered X by Y type usage', function () { + .then(function () { + return c.limiter.currentReservoir(); + }) + .then(function (reservoir) { + c.mustEqual(reservoir, 0); + return c.last({ weight: 0, priority: 9 }); + }) + .then(function (_results) { + c.checkResultsOrder([[1], [2], [3], [4], [5]]); + c.mustEqual(calledDepleted, 2); + c.checkDuration(300); + }); + }); + + it("Should allow staggered X by Y type usage", function () { c = makeTest({ reservoir: 2, reservoirRefreshInterval: 150, reservoirRefreshAmount: 2, - heartbeatInterval: 75 // not for production use - }) + heartbeatInterval: 75, // not for production use + }); return Promise.all([ c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1), c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2), c.pNoErrVal(c.limiter.schedule(c.promise, null, 3), 3), - c.pNoErrVal(c.limiter.schedule(c.promise, null, 4), 4) + c.pNoErrVal(c.limiter.schedule(c.promise, null, 4), 4), ]) - .then(function () { - return c.limiter.currentReservoir() - }) - .then(function (reservoir) { - c.mustEqual(reservoir, 0) - return c.last({ weight: 0, priority: 9 }) - }) - .then(function (results) { - c.checkResultsOrder([[1], [2], [3], [4]]) - c.checkDuration(150) - }) - }) - - it('Should keep process alive until queue is empty', function (done) { - c = makeTest() + .then(function () { + return c.limiter.currentReservoir(); + }) + .then(function (reservoir) { + c.mustEqual(reservoir, 0); + return c.last({ weight: 0, priority: 9 }); + }) + .then(function (_results) { + c.checkResultsOrder([[1], [2], [3], [4]]); + c.checkDuration(150); + }); + }); + + it("Should keep process alive until queue is empty", function (done) { + c = makeTest(); var options = { - cwd: process.cwd() + '/test/spawn', - timeout: 1000 - } - child_process.exec('node refreshKeepAlive.js', options, function (err, stdout, stderr) { - c.mustEqual(stdout, '[0][0][2][2]') - c.mustEqual(stderr, '') - done(err) - }) - }) - - }) - - describe('Reservoir Increase', function () { - it('Should auto-increase the reservoir', async function () { + cwd: process.cwd() + "/test/spawn", + timeout: 1000, + }; + child_process.exec("node refreshKeepAlive.js", options, function (err, stdout, stderr) { + c.mustEqual(stdout, "[0][0][2][2]"); + c.mustEqual(stderr, ""); + done(err); + }); + }); + }); + + describe("Reservoir Increase", function () { + it("Should auto-increase the reservoir", async function () { c = makeTest({ reservoir: 3, reservoirIncreaseInterval: 150, reservoirIncreaseAmount: 5, - heartbeatInterval: 75 // not for production use - }) - var calledDepleted = 0 + heartbeatInterval: 75, // not for production use + }); + var calledDepleted = 0; - c.limiter.on('depleted', function () { - calledDepleted++ - }) + c.limiter.on("depleted", function () { + calledDepleted++; + }); await Promise.all([ c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.promise, null, 1), 1), c.pNoErrVal(c.limiter.schedule({ weight: 2 }, c.promise, null, 2), 2), c.pNoErrVal(c.limiter.schedule({ weight: 3 }, c.promise, null, 3), 3), c.pNoErrVal(c.limiter.schedule({ weight: 4 }, c.promise, null, 4), 4), - c.pNoErrVal(c.limiter.schedule({ weight: 5 }, c.promise, null, 5), 5) - ]) - const reservoir = await c.limiter.currentReservoir() - c.mustEqual(reservoir, 3) - - const results = await c.last({ weight: 0, priority: 9 }) - c.checkResultsOrder([[1], [2], [3], [4], [5]]) - c.mustEqual(calledDepleted, 1) - c.checkDuration(450) - }) - - it('Should auto-increase the reservoir up to a maximum', async function () { + c.pNoErrVal(c.limiter.schedule({ weight: 5 }, c.promise, null, 5), 5), + ]); + const reservoir = await c.limiter.currentReservoir(); + c.mustEqual(reservoir, 3); + + await c.last({ weight: 0, priority: 9 }); + c.checkResultsOrder([[1], [2], [3], [4], [5]]); + c.mustEqual(calledDepleted, 1); + c.checkDuration(450); + }); + + it("Should auto-increase the reservoir up to a maximum", async function () { c = makeTest({ reservoir: 3, reservoirIncreaseInterval: 150, reservoirIncreaseAmount: 5, reservoirIncreaseMaximum: 6, - heartbeatInterval: 75 // not for production use - }) - var calledDepleted = 0 + heartbeatInterval: 75, // not for production use + }); + var calledDepleted = 0; - c.limiter.on('depleted', function () { - calledDepleted++ - }) + c.limiter.on("depleted", function () { + calledDepleted++; + }); await Promise.all([ c.pNoErrVal(c.limiter.schedule({ weight: 1 }, c.promise, null, 1), 1), c.pNoErrVal(c.limiter.schedule({ weight: 2 }, c.promise, null, 2), 2), c.pNoErrVal(c.limiter.schedule({ weight: 3 }, c.promise, null, 3), 3), c.pNoErrVal(c.limiter.schedule({ weight: 4 }, c.promise, null, 4), 4), - c.pNoErrVal(c.limiter.schedule({ weight: 5 }, c.promise, null, 5), 5) - ]) - const reservoir = await c.limiter.currentReservoir() - c.mustEqual(reservoir, 1) - - const results = await c.last({ weight: 0, priority: 9 }) - c.checkResultsOrder([[1], [2], [3], [4], [5]]) - c.mustEqual(calledDepleted, 1) - c.checkDuration(450) - }) - - it('Should allow staggered X by Y type usage', function () { + c.pNoErrVal(c.limiter.schedule({ weight: 5 }, c.promise, null, 5), 5), + ]); + const reservoir = await c.limiter.currentReservoir(); + c.mustEqual(reservoir, 1); + + await c.last({ weight: 0, priority: 9 }); + c.checkResultsOrder([[1], [2], [3], [4], [5]]); + c.mustEqual(calledDepleted, 1); + c.checkDuration(450); + }); + + it("Should allow staggered X by Y type usage", function () { c = makeTest({ reservoir: 2, reservoirIncreaseInterval: 150, reservoirIncreaseAmount: 2, - heartbeatInterval: 75 // not for production use - }) + heartbeatInterval: 75, // not for production use + }); return Promise.all([ c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1), c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2), c.pNoErrVal(c.limiter.schedule(c.promise, null, 3), 3), - c.pNoErrVal(c.limiter.schedule(c.promise, null, 4), 4) + c.pNoErrVal(c.limiter.schedule(c.promise, null, 4), 4), ]) - .then(function () { - return c.limiter.currentReservoir() - }) - .then(function (reservoir) { - c.mustEqual(reservoir, 0) - return c.last({ weight: 0, priority: 9 }) - }) - .then(function (results) { - c.checkResultsOrder([[1], [2], [3], [4]]) - c.checkDuration(150) - }) - }) - - it('Should keep process alive until queue is empty', function (done) { - c = makeTest() + .then(function () { + return c.limiter.currentReservoir(); + }) + .then(function (reservoir) { + c.mustEqual(reservoir, 0); + return c.last({ weight: 0, priority: 9 }); + }) + .then(function (_results) { + c.checkResultsOrder([[1], [2], [3], [4]]); + c.checkDuration(150); + }); + }); + + it("Should keep process alive until queue is empty", function (done) { + c = makeTest(); var options = { - cwd: process.cwd() + '/test/spawn', - timeout: 1000 - } - child_process.exec('node increaseKeepAlive.js', options, function (err, stdout, stderr) { - c.mustEqual(stdout, '[0][0][2][2]') - c.mustEqual(stderr, '') - done(err) - }) - }) - }) - -}) + cwd: process.cwd() + "/test/spawn", + timeout: 1000, + }; + child_process.exec("node increaseKeepAlive.js", options, function (err, stdout, stderr) { + c.mustEqual(stdout, "[0][0][2][2]"); + c.mustEqual(stderr, ""); + done(err); + }); + }); + }); +}); diff --git a/test/group.js b/test/group.js index 5c21ab6..2368ac1 100644 --- a/test/group.js +++ b/test/group.js @@ -1,255 +1,266 @@ -var makeTest = require('./context') -var Bottleneck = require('./bottleneck') -var assert = require('assert') +var makeTest = require("./context"); +var Bottleneck = require("./bottleneck"); +var assert = require("assert"); +const { describe, it, afterEach } = require("mocha"); -describe('Group', function () { - var c +describe("Group", function () { + var c; afterEach(function () { - return c.limiter.disconnect(false) - }) + return c.limiter.disconnect(false); + }); - it('Should create limiters', function (done) { - c = makeTest() + it("Should create limiters", function (done) { + c = makeTest(); var group = new Bottleneck.Group({ - maxConcurrent: 1, minTime: 100 - }) + maxConcurrent: 1, + minTime: 100, + }); - var results = [] + var results = []; var job = function (...result) { - results.push(result) - return new Promise(function (resolve, reject) { + results.push(result); + return new Promise(function (resolve, _reject) { setTimeout(function () { - return resolve() - }, 50) - }) - } - - group.key('A').schedule(job, 1, 2) - group.key('A').schedule(job, 3) - group.key('A').schedule(job, 4) + return resolve(); + }, 50); + }); + }; + + group.key("A").schedule(job, 1, 2); + group.key("A").schedule(job, 3); + group.key("A").schedule(job, 4); setTimeout(function () { - group.key('B').schedule(job, 5) - }, 20) + group.key("B").schedule(job, 5); + }, 20); setTimeout(function () { - group.key('C').schedule(job, 6) - group.key('C').schedule(job, 7) - }, 40) - - group.key('A').submit(function (cb) { - c.mustEqual(results, [[1,2], [5], [6], [3], [7], [4]]) - cb() - done() - }, null) - }) - - it('Should set up the limiter IDs (default)', function () { - c = makeTest() + group.key("C").schedule(job, 6); + group.key("C").schedule(job, 7); + }, 40); + + group.key("A").submit(function (cb) { + c.mustEqual(results, [[1, 2], [5], [6], [3], [7], [4]]); + cb(); + done(); + }, null); + }); + + it("Should set up the limiter IDs (default)", function () { + c = makeTest(); var group = new Bottleneck.Group({ - maxConcurrent: 1, minTime: 100 - }) + maxConcurrent: 1, + minTime: 100, + }); - c.mustEqual(group.key('A').id, 'group-key-A') - c.mustEqual(group.key('B').id, 'group-key-B') - c.mustEqual(group.key('XYZ').id, 'group-key-XYZ') + c.mustEqual(group.key("A").id, "group-key-A"); + c.mustEqual(group.key("B").id, "group-key-B"); + c.mustEqual(group.key("XYZ").id, "group-key-XYZ"); var ids = group.keys().map(function (key) { - var limiter = group.key(key) - c.mustEqual(limiter._store.timeout, group.timeout) - return limiter.id - }) - c.mustEqual(ids.sort(), ['group-key-A', 'group-key-B', 'group-key-XYZ']) - }) - - it('Should set up the limiter IDs (custom)', function () { - c = makeTest() + var limiter = group.key(key); + c.mustEqual(limiter._store.timeout, group.timeout); + return limiter.id; + }); + c.mustEqual(ids.sort(), ["group-key-A", "group-key-B", "group-key-XYZ"]); + }); + + it("Should set up the limiter IDs (custom)", function () { + c = makeTest(); var group = new Bottleneck.Group({ - maxConcurrent: 1, minTime: 100, - id: 'custom-id' - }) + maxConcurrent: 1, + minTime: 100, + id: "custom-id", + }); - c.mustEqual(group.key('A').id, 'custom-id-A') - c.mustEqual(group.key('B').id, 'custom-id-B') - c.mustEqual(group.key('XYZ').id, 'custom-id-XYZ') + c.mustEqual(group.key("A").id, "custom-id-A"); + c.mustEqual(group.key("B").id, "custom-id-B"); + c.mustEqual(group.key("XYZ").id, "custom-id-XYZ"); var ids = group.keys().map(function (key) { - var limiter = group.key(key) - c.mustEqual(limiter._store.timeout, group.timeout) - return limiter.id - }) - c.mustEqual(ids.sort(), ['custom-id-A', 'custom-id-B', 'custom-id-XYZ']) - }) - - it('Should pass new limiter to \'created\' event', function () { - c = makeTest() + var limiter = group.key(key); + c.mustEqual(limiter._store.timeout, group.timeout); + return limiter.id; + }); + c.mustEqual(ids.sort(), ["custom-id-A", "custom-id-B", "custom-id-XYZ"]); + }); + + it("Should pass new limiter to 'created' event", function () { + c = makeTest(); var group = new Bottleneck.Group({ - maxConcurrent: 1, minTime: 100 - }) - - var keys = [] - var ids = [] - var promises = [] - group.on('created', function (created, key) { - keys.push(key) + maxConcurrent: 1, + minTime: 100, + }); + + var keys = []; + var ids = []; + var promises = []; + group.on("created", function (created, key) { + keys.push(key); promises.push( - created.updateSettings({ id: key }) - .then(function (limiter) { - ids.push(limiter.id) - }) - ) - }) - - group.key('A') - group.key('B') - group.key('A') - group.key('B') - group.key('B') - group.key('BB') - group.key('C') - group.key('A') - - return Promise.all(promises) - .then(function () { - c.mustEqual(keys, ids) - return c.limiter.ready() - }) - - }) - - it('Should pass error on failure', function (done) { - var failureMessage = 'SOMETHING BLEW UP!!' - c = makeTest() + created.updateSettings({ id: key }).then(function (limiter) { + ids.push(limiter.id); + }), + ); + }); + + group.key("A"); + group.key("B"); + group.key("A"); + group.key("B"); + group.key("B"); + group.key("BB"); + group.key("C"); + group.key("A"); + + return Promise.all(promises).then(function () { + c.mustEqual(keys, ids); + return c.limiter.ready(); + }); + }); + + it("Should pass error on failure", function (done) { + var failureMessage = "SOMETHING BLEW UP!!"; + c = makeTest(); var group = new Bottleneck.Group({ - maxConcurrent: 1, minTime: 100 - }) - c.mustEqual(Object.keys(group.limiters), []) + maxConcurrent: 1, + minTime: 100, + }); + c.mustEqual(Object.keys(group.limiters), []); - var results = [] + var results = []; var job = function (...result) { - results.push(result) - return new Promise(function (resolve, reject) { + results.push(result); + return new Promise(function (resolve, _reject) { setTimeout(function () { - return resolve() - }, 50) - }) - } - - group.key('A').schedule(job, 1, 2) - group.key('A').schedule(job, 3) - group.key('A').schedule(job, 4) - group.key('B').schedule(() => Promise.reject(new Error(failureMessage))) - .catch(function (err) { - results.push(['CAUGHT', err.message]) - }) + return resolve(); + }, 50); + }); + }; + + group.key("A").schedule(job, 1, 2); + group.key("A").schedule(job, 3); + group.key("A").schedule(job, 4); + group + .key("B") + .schedule(() => Promise.reject(new Error(failureMessage))) + .catch(function (err) { + results.push(["CAUGHT", err.message]); + }); setTimeout(function () { - group.key('C').schedule(job, 6) - group.key('C').schedule(job, 7) - }, 40) - - - group.key('A').submit(function (cb) { - c.mustEqual(results, [[1,2], ['CAUGHT', failureMessage], [6], [3], [7], [4]]) - cb() - done() - }, null) - }) - - it('Should update its timeout', function () { - c = makeTest() + group.key("C").schedule(job, 6); + group.key("C").schedule(job, 7); + }, 40); + + group.key("A").submit(function (cb) { + c.mustEqual(results, [[1, 2], ["CAUGHT", failureMessage], [6], [3], [7], [4]]); + cb(); + done(); + }, null); + }); + + it("Should update its timeout", function () { + c = makeTest(); var group1 = new Bottleneck.Group({ - maxConcurrent: 1, minTime: 100 - }) + maxConcurrent: 1, + minTime: 100, + }); var group2 = new Bottleneck.Group({ - maxConcurrent: 1, minTime: 100, timeout: 5000 - }) - - c.mustEqual(group1.timeout, 300000) - c.mustEqual(group2.timeout, 5000) - - var p1 = group1.updateSettings({ timeout: 123 }) - var p2 = group2.updateSettings({ timeout: 456 }) - return Promise.all([p1, p2]) - .then(function () { - c.mustEqual(group1.timeout, 123) - c.mustEqual(group2.timeout, 456) - }) - }) - - it('Should update its limiter options', function () { - c = makeTest() + maxConcurrent: 1, + minTime: 100, + timeout: 5000, + }); + + c.mustEqual(group1.timeout, 300000); + c.mustEqual(group2.timeout, 5000); + + var p1 = group1.updateSettings({ timeout: 123 }); + var p2 = group2.updateSettings({ timeout: 456 }); + return Promise.all([p1, p2]).then(function () { + c.mustEqual(group1.timeout, 123); + c.mustEqual(group2.timeout, 456); + }); + }); + + it("Should update its limiter options", function () { + c = makeTest(); var group = new Bottleneck.Group({ - maxConcurrent: 1, minTime: 100 - }) + maxConcurrent: 1, + minTime: 100, + }); - var limiter1 = group.key('AAA') - c.mustEqual(limiter1._store.storeOptions.minTime, 100) + var limiter1 = group.key("AAA"); + c.mustEqual(limiter1._store.storeOptions.minTime, 100); - group.updateSettings({ minTime: 200 }) - c.mustEqual(limiter1._store.storeOptions.minTime, 100) + group.updateSettings({ minTime: 200 }); + c.mustEqual(limiter1._store.storeOptions.minTime, 100); - var limiter2 = group.key('BBB') - c.mustEqual(limiter2._store.storeOptions.minTime, 200) - }) + var limiter2 = group.key("BBB"); + c.mustEqual(limiter2._store.storeOptions.minTime, 200); + }); - it('Should support keys(), limiters(), deleteKey()', function () { - c = makeTest() + it("Should support keys(), limiters(), deleteKey()", function () { + c = makeTest(); var group1 = new Bottleneck.Group({ - maxConcurrent: 1 - }) - var KEY_A = "AAA" - var KEY_B = "BBB" + maxConcurrent: 1, + }); + var KEY_A = "AAA"; + var KEY_B = "BBB"; return Promise.all([ c.pNoErrVal(group1.key(KEY_A).schedule(c.promise, null, 1), 1), - c.pNoErrVal(group1.key(KEY_B).schedule(c.promise, null, 2), 2) + c.pNoErrVal(group1.key(KEY_B).schedule(c.promise, null, 2), 2), ]) - .then(function () { - var keys = group1.keys() - var limiters = group1.limiters() - c.mustEqual(keys, [KEY_A, KEY_B]) - c.mustEqual(limiters.length, 2) - - limiters.forEach(function (limiter, i) { - c.mustEqual(limiter.key, keys[i]) - assert(limiter.limiter instanceof Bottleneck) + .then(function () { + var keys = group1.keys(); + var limiters = group1.limiters(); + c.mustEqual(keys, [KEY_A, KEY_B]); + c.mustEqual(limiters.length, 2); + + limiters.forEach(function (limiter, i) { + c.mustEqual(limiter.key, keys[i]); + assert( + limiter.limiter instanceof Bottleneck, + "Expected limiter.limiter to be a Bottleneck", + ); + }); + + return group1.deleteKey(KEY_A); }) - - return group1.deleteKey(KEY_A) - }) - .then(function (deleted) { - c.mustEqual(deleted, true) - c.mustEqual(group1.keys().length, 1) - return group1.deleteKey(KEY_A) - }) - .then(function (deleted) { - c.mustEqual(deleted, false) - c.mustEqual(group1.keys().length, 1) - }) - }) - - it('Should call autocleanup', function () { - var KEY = 'test-key' + .then(function (deleted) { + c.mustEqual(deleted, true); + c.mustEqual(group1.keys().length, 1); + return group1.deleteKey(KEY_A); + }) + .then(function (deleted) { + c.mustEqual(deleted, false); + c.mustEqual(group1.keys().length, 1); + }); + }); + + it("Should call autocleanup", function () { + var KEY = "test-key"; var group = new Bottleneck.Group({ - maxConcurrent: 1 - }) - group.updateSettings({ timeout: 50 }) - c = makeTest({ id: 'something', timeout: group.timeout }) - - group.instances[KEY] = c.limiter - return group.key(KEY).schedule(function () { - return Promise.resolve() - }) - .then(function () { - assert(group.instances[KEY] != null) - return new Promise(function (resolve, reject) { - setTimeout(resolve, 100) + maxConcurrent: 1, + }); + group.updateSettings({ timeout: 50 }); + c = makeTest({ id: "something", timeout: group.timeout }); + + group.instances[KEY] = c.limiter; + return group + .key(KEY) + .schedule(function () { + return Promise.resolve(); }) - }) - .then(function () { - assert(group.instances[KEY] == null) - }) - }) - -}) + .then(function () { + c.mustExist(group.instances[KEY]); + return new Promise(function (resolve, _reject) { + setTimeout(resolve, 100); + }); + }) + .then(function () { + c.mustNotExist(group.instances[KEY]); + }); + }); +}); diff --git a/test/ioredis.js b/test/ioredis.js index 3a68d84..a2afea7 100644 --- a/test/ioredis.js +++ b/test/ioredis.js @@ -1,135 +1,143 @@ -var makeTest = require('./context') -var Bottleneck = require('./bottleneck') -var assert = require('assert') -var Redis = require('ioredis') +var makeTest = require("./context"); +var Bottleneck = require("./bottleneck"); +var Redis = require("ioredis"); +const { describe, it, afterEach } = require("mocha"); -if (process.env.DATASTORE === 'ioredis') { - describe('ioredis-only', function () { - var c +if (process.env.DATASTORE === "ioredis") { + describe("ioredis-only", function () { + var c; afterEach(function () { - return c.limiter.disconnect(false) - }) + return c.limiter.disconnect(false); + }); - it('Should accept ioredis lib override', function () { + it("Should accept ioredis lib override", function () { c = makeTest({ maxConcurrent: 2, Redis, clientOptions: {}, - clusterNodes: [{ - host: process.env.REDIS_HOST, - port: process.env.REDIS_PORT - }] - }) - - c.mustEqual(c.limiter.datastore, 'ioredis') - }) - - it('Should connect in Redis Cluster mode', function () { + clusterNodes: [ + { + host: process.env.REDIS_HOST, + port: process.env.REDIS_PORT, + }, + ], + }); + + c.mustEqual(c.limiter.datastore, "ioredis"); + }); + + it("Should connect in Redis Cluster mode", function () { c = makeTest({ maxConcurrent: 2, clientOptions: {}, - clusterNodes: [{ - host: process.env.REDIS_HOST, - port: process.env.REDIS_PORT - }] - }) - - c.mustEqual(c.limiter.datastore, 'ioredis') - assert(c.limiter._store.connection.client.nodes().length >= 0) - }) - - it('Should connect in Redis Cluster mode with premade client', function () { - var client = new Redis.Cluster('') - var connection = new Bottleneck.IORedisConnection({ client }) + clusterNodes: [ + { + host: process.env.REDIS_HOST, + port: process.env.REDIS_PORT, + }, + ], + }); + + c.mustEqual(c.limiter.datastore, "ioredis"); + c.mustGte(c.limiter._store.connection.client.nodes().length, 0); + }); + + it("Should connect in Redis Cluster mode with premade client", function () { + var client = new Redis.Cluster(""); + var connection = new Bottleneck.IORedisConnection({ client }); c = makeTest({ maxConcurrent: 2, clientOptions: {}, - clusterNodes: [{ - host: process.env.REDIS_HOST, - port: process.env.REDIS_PORT - }] - }) - - c.mustEqual(c.limiter.datastore, 'ioredis') - assert(c.limiter._store.connection.client.nodes().length >= 0) - connection.disconnect(false) - }) - - it('Should accept existing connections', function () { - var connection = new Bottleneck.IORedisConnection() - connection.id = 'super-connection' + clusterNodes: [ + { + host: process.env.REDIS_HOST, + port: process.env.REDIS_PORT, + }, + ], + }); + + c.mustEqual(c.limiter.datastore, "ioredis"); + c.mustGte(c.limiter._store.connection.client.nodes().length, 0); + connection.disconnect(false); + }); + + it("Should accept existing connections", function () { + var connection = new Bottleneck.IORedisConnection(); + connection.id = "super-connection"; c = makeTest({ minTime: 50, - connection - }) + connection, + }); - c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1) - c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2) + c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1); + c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2); - return c.last() - .then(function (results) { - c.checkResultsOrder([[1], [2]]) - c.checkDuration(50) - c.mustEqual(c.limiter.connection.id, 'super-connection') - c.mustEqual(c.limiter.datastore, 'ioredis') + return c + .last() + .then(function (_results) { + c.checkResultsOrder([[1], [2]]); + c.checkDuration(50); + c.mustEqual(c.limiter.connection.id, "super-connection"); + c.mustEqual(c.limiter.datastore, "ioredis"); - return c.limiter.disconnect() + return c.limiter.disconnect(); }) .then(function () { - // Shared connections should not be disconnected by the limiter - c.mustEqual(c.limiter.clients().client.status, 'ready') - return connection.disconnect() - }) - }) - - it('Should accept existing redis clients', function () { - var client = new Redis() - client.id = 'super-client' - - var connection = new Bottleneck.IORedisConnection({ client }) - connection.id = 'super-connection' + // Shared connections should not be disconnected by the limiter + c.mustEqual(c.limiter.clients().client.status, "ready"); + return connection.disconnect(); + }); + }); + + it("Should accept existing redis clients", function () { + var client = new Redis(); + client.id = "super-client"; + + var connection = new Bottleneck.IORedisConnection({ client }); + connection.id = "super-connection"; c = makeTest({ minTime: 50, - connection - }) - - c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1) - c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2) - - return c.last() - .then(function (results) { - c.checkResultsOrder([[1], [2]]) - c.checkDuration(50) - c.mustEqual(c.limiter.clients().client.id, 'super-client') - c.mustEqual(c.limiter.connection.id, 'super-connection') - c.mustEqual(c.limiter.datastore, 'ioredis') - - return c.limiter.disconnect() + connection, + }); + + c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1); + c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2); + + return c + .last() + .then(function (_results) { + c.checkResultsOrder([[1], [2]]); + c.checkDuration(50); + c.mustEqual(c.limiter.clients().client.id, "super-client"); + c.mustEqual(c.limiter.connection.id, "super-connection"); + c.mustEqual(c.limiter.datastore, "ioredis"); + + return c.limiter.disconnect(); }) .then(function () { - // Shared connections should not be disconnected by the limiter - c.mustEqual(c.limiter.clients().client.status, 'ready') - return connection.disconnect() - }) - }) + // Shared connections should not be disconnected by the limiter + c.mustEqual(c.limiter.clients().client.status, "ready"); + return connection.disconnect(); + }); + }); - it('Should trigger error events on the shared connection', function (done) { + it("Should trigger error events on the shared connection", function (done) { var connection = new Bottleneck.IORedisConnection({ clientOptions: { - port: 1 - } - }) - connection.on('error', function (err) { - c.mustEqual(c.limiter.datastore, 'ioredis') - connection.disconnect() - done() - }) - - c = makeTest({ connection }) - c.limiter.on('error', function (err) { - done(err) - }) - }) - }) + port: 1, + }, + }); + connection.on("error", function (_err) { + c.mustEqual(c.limiter.datastore, "ioredis"); + connection.disconnect(); + done(); + }); + + c = makeTest({ connection }); + c.limiter.on("error", function (err) { + done(err); + }); + }); + }); } diff --git a/test/node_redis.js b/test/node_redis.js index cd204d3..cea122f 100644 --- a/test/node_redis.js +++ b/test/node_redis.js @@ -1,100 +1,102 @@ -var makeTest = require('./context') -var Bottleneck = require('./bottleneck') -var assert = require('assert') -var Redis = require('redis') +var makeTest = require("./context"); +var Bottleneck = require("./bottleneck"); +const { describe, it, afterEach } = require("mocha"); +var Redis = require("redis"); -if (process.env.DATASTORE === 'redis') { - describe('node_redis-only', function () { - var c +if (process.env.DATASTORE === "redis") { + describe("node_redis-only", function () { + var c; afterEach(function () { - return c.limiter.disconnect(false) - }) + return c.limiter.disconnect(false); + }); - it('Should accept node_redis lib override', function () { + it("Should accept node_redis lib override", function () { c = makeTest({ maxConcurrent: 2, Redis, - clientOptions: {} - }) + clientOptions: {}, + }); - c.mustEqual(c.limiter.datastore, 'redis') - }) + c.mustEqual(c.limiter.datastore, "redis"); + }); - it('Should accept existing connections', function () { - var connection = new Bottleneck.RedisConnection() - connection.id = 'super-connection' + it("Should accept existing connections", function () { + var connection = new Bottleneck.RedisConnection(); + connection.id = "super-connection"; c = makeTest({ minTime: 50, - connection - }) + connection, + }); - c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1) - c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2) + c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1); + c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2); - return c.last() - .then(function (results) { - c.checkResultsOrder([[1], [2]]) - c.checkDuration(50) - c.mustEqual(c.limiter.connection.id, 'super-connection') - c.mustEqual(c.limiter.datastore, 'redis') + return c + .last() + .then(function (_results) { + c.checkResultsOrder([[1], [2]]); + c.checkDuration(50); + c.mustEqual(c.limiter.connection.id, "super-connection"); + c.mustEqual(c.limiter.datastore, "redis"); - return c.limiter.disconnect() + return c.limiter.disconnect(); }) .then(function () { - // Shared connections should not be disconnected by the limiter - c.mustEqual(c.limiter.clients().client.ready, true) - return connection.disconnect() - }) - }) + // Shared connections should not be disconnected by the limiter + c.mustEqual(c.limiter.clients().client.ready, true); + return connection.disconnect(); + }); + }); - it('Should accept existing redis clients', function () { - var client = Redis.createClient() - client.id = 'super-client' + it("Should accept existing redis clients", function () { + var client = Redis.createClient(); + client.id = "super-client"; - var connection = new Bottleneck.RedisConnection({ client }) - connection.id = 'super-connection' + var connection = new Bottleneck.RedisConnection({ client }); + connection.id = "super-connection"; c = makeTest({ minTime: 50, - connection - }) + connection, + }); - c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1) - c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2) + c.pNoErrVal(c.limiter.schedule(c.promise, null, 1), 1); + c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2); - return c.last() - .then(function (results) { - c.checkResultsOrder([[1], [2]]) - c.checkDuration(50) - c.mustEqual(c.limiter.clients().client.id, 'super-client') - c.mustEqual(c.limiter.connection.id, 'super-connection') - c.mustEqual(c.limiter.datastore, 'redis') + return c + .last() + .then(function (_results) { + c.checkResultsOrder([[1], [2]]); + c.checkDuration(50); + c.mustEqual(c.limiter.clients().client.id, "super-client"); + c.mustEqual(c.limiter.connection.id, "super-connection"); + c.mustEqual(c.limiter.datastore, "redis"); - return c.limiter.disconnect() + return c.limiter.disconnect(); }) .then(function () { - // Shared connections should not be disconnected by the limiter - c.mustEqual(c.limiter.clients().client.ready, true) - return connection.disconnect() - }) - }) + // Shared connections should not be disconnected by the limiter + c.mustEqual(c.limiter.clients().client.ready, true); + return connection.disconnect(); + }); + }); - it('Should trigger error events on the shared connection', function (done) { + it("Should trigger error events on the shared connection", function (done) { var connection = new Bottleneck.RedisConnection({ clientOptions: { - port: 1 - } - }) - connection.on('error', function (err) { - c.mustEqual(c.limiter.datastore, 'redis') - connection.disconnect() - done() - }) + port: 1, + }, + }); + connection.on("error", function (_err) { + c.mustEqual(c.limiter.datastore, "redis"); + connection.disconnect(); + done(); + }); - c = makeTest({ connection }) - c.limiter.on('error', function (err) { - done(err) - }) - }) - }) + c = makeTest({ connection }); + c.limiter.on("error", function (err) { + done(err); + }); + }); + }); } diff --git a/test/priority.js b/test/priority.js index f89b85f..e447442 100644 --- a/test/priority.js +++ b/test/priority.js @@ -1,184 +1,187 @@ -var makeTest = require('./context') -var Bottleneck = require('./bottleneck') -var assert = require('assert') - -describe('Priority', function () { - var c +var makeTest = require("./context"); +var Bottleneck = require("./bottleneck"); +var assert = require("assert"); +const { describe, it, afterEach } = require("mocha"); +describe("Priority", function () { + var c; afterEach(function () { - return c.limiter.disconnect(false) - }) + return c.limiter.disconnect(false); + }); - it('Should do basic ordering', function () { - c = makeTest({maxConcurrent: 1, minTime: 100, rejectOnDrop: false}) + it("Should do basic ordering", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100, rejectOnDrop: false }); return Promise.all([ c.pNoErrVal(c.limiter.schedule(c.slowPromise, 50, null, 1), 1), c.pNoErrVal(c.limiter.schedule(c.promise, null, 2), 2), - c.pNoErrVal(c.limiter.schedule({priority: 1}, c.promise, null, 5, 6), 5, 6), + c.pNoErrVal(c.limiter.schedule({ priority: 1 }, c.promise, null, 5, 6), 5, 6), c.pNoErrVal(c.limiter.schedule(c.promise, null, 3), 3), - c.pNoErrVal(c.limiter.schedule(c.promise, null, 4), 4) + c.pNoErrVal(c.limiter.schedule(c.promise, null, 4), 4), ]) - .then(function () { - return c.last() - }) - .then(function (results) { - c.checkResultsOrder([[1], [5,6], [2] ,[3], [4]]) - c.checkDuration(400) - }) - }) - - it('Should support LEAK', function () { + .then(function () { + return c.last(); + }) + .then(function (_results) { + c.checkResultsOrder([[1], [5, 6], [2], [3], [4]]); + c.checkDuration(400); + }); + }); + + it("Should support LEAK", function () { c = makeTest({ maxConcurrent: 1, minTime: 100, highWater: 3, strategy: Bottleneck.strategy.LEAK, - rejectOnDrop: false - }) - - var called = false - c.limiter.on('dropped', function (dropped) { - c.mustExist(dropped.task) - c.mustExist(dropped.args) - c.mustExist(dropped.promise) - called = true - }) - - c.limiter.submit(c.slowJob, 50, null, 1, c.noErrVal(1)) - c.limiter.submit(c.job, null, 2, c.noErrVal(2)) - c.limiter.submit(c.job, null, 3, c.noErrVal(3)) - c.limiter.submit(c.job, null, 4, c.noErrVal(4)) - c.limiter.submit({priority: 2}, c.job, null, 5, c.noErrVal(5)) - c.limiter.submit({priority: 1}, c.job, null, 6, c.noErrVal(6)) - c.limiter.submit({priority: 9}, c.job, null, 7, c.noErrVal(7)) - - return c.last({ weight: 0 }) - .then(function (results) { - c.checkDuration(200) - c.checkResultsOrder([[1], [6], [5]]) - c.mustEqual(called, true) - }) - }) - - it('Should support OVERFLOW', function () { + rejectOnDrop: false, + }); + + var called = false; + c.limiter.on("dropped", function (dropped) { + c.mustExist(dropped.task); + c.mustExist(dropped.args); + c.mustExist(dropped.promise); + called = true; + }); + + c.limiter.submit(c.slowJob, 50, null, 1, c.noErrVal(1)); + c.limiter.submit(c.job, null, 2, c.noErrVal(2)); + c.limiter.submit(c.job, null, 3, c.noErrVal(3)); + c.limiter.submit(c.job, null, 4, c.noErrVal(4)); + c.limiter.submit({ priority: 2 }, c.job, null, 5, c.noErrVal(5)); + c.limiter.submit({ priority: 1 }, c.job, null, 6, c.noErrVal(6)); + c.limiter.submit({ priority: 9 }, c.job, null, 7, c.noErrVal(7)); + + return c.last({ weight: 0 }).then(function (_results) { + c.checkDuration(200); + c.checkResultsOrder([[1], [6], [5]]); + c.mustEqual(called, true); + }); + }); + + it("Should support OVERFLOW", function () { c = makeTest({ maxConcurrent: 1, minTime: 100, highWater: 2, strategy: Bottleneck.strategy.OVERFLOW, - rejectOnDrop: false - }) - var called = false - c.limiter.on('dropped', function (dropped) { - c.mustExist(dropped.task) - c.mustExist(dropped.args) - c.mustExist(dropped.promise) - called = true - }) - - c.limiter.submit(c.slowJob, 50, null, 1, c.noErrVal(1)) - c.limiter.submit(c.job, null, 2, c.noErrVal(2)) - c.limiter.submit(c.job, null, 3, c.noErrVal(3)) - c.limiter.submit(c.job, null, 4, c.noErrVal(4)) - c.limiter.submit({priority: 2}, c.job, null, 5, c.noErrVal(5)) - c.limiter.submit({priority: 1}, c.job, null, 6, c.noErrVal(6)) - - return c.limiter.submit({priority: 9}, c.job, null, 7, c.noErrVal(7)) - .then(function () { - return c.limiter.updateSettings({ highWater: null }) - }) - .then(c.last) - .then(function (results) { - c.checkDuration(200) - c.checkResultsOrder([[1], [2], [3]]) - c.mustEqual(called, true) - }) - }) - - it('Should support OVERFLOW_PRIORITY', function () { + rejectOnDrop: false, + }); + var called = false; + c.limiter.on("dropped", function (dropped) { + c.mustExist(dropped.task); + c.mustExist(dropped.args); + c.mustExist(dropped.promise); + called = true; + }); + + c.limiter.submit(c.slowJob, 50, null, 1, c.noErrVal(1)); + c.limiter.submit(c.job, null, 2, c.noErrVal(2)); + c.limiter.submit(c.job, null, 3, c.noErrVal(3)); + c.limiter.submit(c.job, null, 4, c.noErrVal(4)); + c.limiter.submit({ priority: 2 }, c.job, null, 5, c.noErrVal(5)); + c.limiter.submit({ priority: 1 }, c.job, null, 6, c.noErrVal(6)); + + return c.limiter + .submit({ priority: 9 }, c.job, null, 7, c.noErrVal(7)) + .then(function () { + return c.limiter.updateSettings({ highWater: null }); + }) + .then(c.last) + .then(function (_results) { + c.checkDuration(200); + c.checkResultsOrder([[1], [2], [3]]); + c.mustEqual(called, true); + }); + }); + + it("Should support OVERFLOW_PRIORITY", function () { c = makeTest({ maxConcurrent: 1, minTime: 100, highWater: 2, strategy: Bottleneck.strategy.OVERFLOW_PRIORITY, - rejectOnDrop: false - }) - var called = false - c.limiter.on('dropped', function (dropped) { - c.mustExist(dropped.task) - c.mustExist(dropped.args) - c.mustExist(dropped.promise) - called = true - }) - - c.limiter.submit(c.slowJob, 50, null, 1, c.noErrVal(1)) - c.limiter.submit(c.job, null, 2, c.noErrVal(2)) - c.limiter.submit(c.job, null, 3, c.noErrVal(3)) - c.limiter.submit(c.job, null, 4, c.noErrVal(4)) - c.limiter.submit({priority: 2}, c.job, null, 5, c.noErrVal(5)) - c.limiter.submit({priority: 2}, c.job, null, 6, c.noErrVal(6)) - - return c.limiter.submit({priority: 2}, c.job, null, 7, c.noErrVal(7)) - .then(function () { - return c.limiter.updateSettings({highWater: null}) - }) - .then(c.last) - .then(function (results) { - c.checkDuration(200) - c.checkResultsOrder([[1], [5], [6]]) - c.mustEqual(called, true) - }) - }) - - it('Should support BLOCK', function (done) { + rejectOnDrop: false, + }); + var called = false; + c.limiter.on("dropped", function (dropped) { + c.mustExist(dropped.task); + c.mustExist(dropped.args); + c.mustExist(dropped.promise); + called = true; + }); + + c.limiter.submit(c.slowJob, 50, null, 1, c.noErrVal(1)); + c.limiter.submit(c.job, null, 2, c.noErrVal(2)); + c.limiter.submit(c.job, null, 3, c.noErrVal(3)); + c.limiter.submit(c.job, null, 4, c.noErrVal(4)); + c.limiter.submit({ priority: 2 }, c.job, null, 5, c.noErrVal(5)); + c.limiter.submit({ priority: 2 }, c.job, null, 6, c.noErrVal(6)); + + return c.limiter + .submit({ priority: 2 }, c.job, null, 7, c.noErrVal(7)) + .then(function () { + return c.limiter.updateSettings({ highWater: null }); + }) + .then(c.last) + .then(function (_results) { + c.checkDuration(200); + c.checkResultsOrder([[1], [5], [6]]); + c.mustEqual(called, true); + }); + }); + + it("Should support BLOCK", function (done) { c = makeTest({ maxConcurrent: 1, minTime: 100, highWater: 2, trackDoneStatus: true, - strategy: Bottleneck.strategy.BLOCK - }) - var called = 0 - - c.limiter.on('dropped', function (dropped) { - c.mustExist(dropped.task) - c.mustExist(dropped.args) - c.mustExist(dropped.promise) - called++ + strategy: Bottleneck.strategy.BLOCK, + }); + var called = 0; + + c.limiter.on("dropped", function (dropped) { + c.mustExist(dropped.task); + c.mustExist(dropped.args); + c.mustExist(dropped.promise); + called++; if (called === 3) { - c.limiter.updateSettings({ highWater: null }) - .then(function () { - return c.limiter.schedule(c.job, null, 8) - }) - .catch(function (err) { - assert(err instanceof Bottleneck.BottleneckError) - c.mustEqual(err.message, 'This job has been dropped by Bottleneck') - c.limiter.removeAllListeners('error') - done() - }) + c.limiter + .updateSettings({ highWater: null }) + .then(function () { + return c.limiter.schedule(c.job, null, 8); + }) + .catch(function (err) { + assert( + err instanceof Bottleneck.BottleneckError, + "Expected err to be a BottleneckError", + ); + c.mustEqual(err.message, "This job has been dropped by Bottleneck"); + c.limiter.removeAllListeners("error"); + done(); + }); } - }) - - c.limiter.submit(c.slowJob, 20, null, 1, c.noErrVal(1)) - c.limiter.submit(c.slowJob, 20, null, 2, (err) => c.mustExist(err)) - c.limiter.submit(c.slowJob, 20, null, 3, (err) => c.mustExist(err)) - c.limiter.submit(c.slowJob, 20, null, 4, (err) => c.mustExist(err)) - }) - - it('Should have the right priority', function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) - - c.pNoErrVal(c.limiter.schedule({priority: 6}, c.slowPromise, 50, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({priority: 5}, c.promise, null, 2), 2) - c.pNoErrVal(c.limiter.schedule({priority: 4}, c.promise, null, 3), 3) - c.pNoErrVal(c.limiter.schedule({priority: 3}, c.promise, null, 4), 4) - - return c.last() - .then(function (results) { - c.checkDuration(300) - c.checkResultsOrder([[1], [4], [3], [2]]) - }) - }) - -}) + }); + + c.limiter.submit(c.slowJob, 20, null, 1, c.noErrVal(1)); + c.limiter.submit(c.slowJob, 20, null, 2, (err) => c.mustExist(err)); + c.limiter.submit(c.slowJob, 20, null, 3, (err) => c.mustExist(err)); + c.limiter.submit(c.slowJob, 20, null, 4, (err) => c.mustExist(err)); + }); + + it("Should have the right priority", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); + + c.pNoErrVal(c.limiter.schedule({ priority: 6 }, c.slowPromise, 50, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ priority: 5 }, c.promise, null, 2), 2); + c.pNoErrVal(c.limiter.schedule({ priority: 4 }, c.promise, null, 3), 3); + c.pNoErrVal(c.limiter.schedule({ priority: 3 }, c.promise, null, 4), 4); + + return c.last().then(function (_results) { + c.checkDuration(300); + c.checkResultsOrder([[1], [4], [3], [2]]); + }); + }); +}); diff --git a/test/promises.js b/test/promises.js index b20022f..0f0e2f5 100644 --- a/test/promises.js +++ b/test/promises.js @@ -1,202 +1,220 @@ -var makeTest = require('./context') -var Bottleneck = require('./bottleneck') -var assert = require('assert') +var makeTest = require("./context"); +var Bottleneck = require("./bottleneck"); +var assert = require("assert"); +const { describe, it, afterEach } = require("mocha"); -describe('Promises', function () { - var c +describe("Promises", function () { + var c; afterEach(function () { - return c.limiter.disconnect(false) - }) - - it('Should support promises', function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) - - c.limiter.submit(c.job, null, 1, 9, c.noErrVal(1, 9)) - c.limiter.submit(c.job, null, 2, c.noErrVal(2)) - c.limiter.submit(c.job, null, 3, c.noErrVal(3)) - c.pNoErrVal(c.limiter.schedule(c.promise, null, 4, 5), 4, 5) - - return c.last() - .then(function (results) { - c.checkResultsOrder([[1,9], [2], [3], [4,5]]) - c.checkDuration(300) - }) - }) - - it('Should pass error on failure', function () { - var failureMessage = 'failed' - c = makeTest({maxConcurrent: 1, minTime: 100}) - - return c.limiter.schedule(c.promise, new Error(failureMessage)) - .catch(function (err) { - c.mustEqual(err.message, failureMessage) - }) - }) - - it('Should allow non-Promise returns', function () { - c = makeTest() - var str = 'This is a string' - - return c.limiter.schedule(() => str) - .then(function (x) { - c.mustEqual(x, str) - }) - }) - - it('Should get rejected when rejectOnDrop is true', function () { + return c.limiter.disconnect(false); + }); + + it("Should support promises", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); + + c.limiter.submit(c.job, null, 1, 9, c.noErrVal(1, 9)); + c.limiter.submit(c.job, null, 2, c.noErrVal(2)); + c.limiter.submit(c.job, null, 3, c.noErrVal(3)); + c.pNoErrVal(c.limiter.schedule(c.promise, null, 4, 5), 4, 5); + + return c.last().then(function (_results) { + c.checkResultsOrder([[1, 9], [2], [3], [4, 5]]); + c.checkDuration(300); + }); + }); + + it("Should pass error on failure", function () { + var failureMessage = "failed"; + c = makeTest({ maxConcurrent: 1, minTime: 100 }); + + return c.limiter.schedule(c.promise, new Error(failureMessage)).catch(function (err) { + c.mustEqual(err.message, failureMessage); + }); + }); + + it("Should allow non-Promise returns", function () { + c = makeTest(); + var str = "This is a string"; + + return c.limiter + .schedule(() => str) + .then(function (x) { + c.mustEqual(x, str); + }); + }); + + it("Should get rejected when rejectOnDrop is true", function () { c = makeTest({ maxConcurrent: 1, minTime: 0, highWater: 1, strategy: Bottleneck.strategy.OVERFLOW, - rejectOnDrop: true - }) - var dropped = 0 - var caught = 0 - var p1 - var p2 - - c.limiter.on('dropped', function () { - dropped++ - }) - - p1 = c.pNoErrVal(c.limiter.schedule({id: 1}, c.slowPromise, 50, null, 1), 1) - p2 = c.pNoErrVal(c.limiter.schedule({id: 2}, c.slowPromise, 50, null, 2), 2) - - return c.limiter.schedule({id: 3}, c.slowPromise, 50, null, 3) - .catch(function (err) { - c.mustEqual(err.message, 'This job has been dropped by Bottleneck') - assert(err instanceof Bottleneck.BottleneckError) - caught++ - return Promise.all([p1, p2]) - }) - .then(c.last) - .then(function (results) { - c.checkResultsOrder([[1], [2]]) - c.checkDuration(100) - c.mustEqual(dropped, 1) - c.mustEqual(caught, 1) - }) - }) - - it('Should automatically wrap an exception in a rejected promise - schedule()', function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) - - return c.limiter.schedule(() => { - throw new Error('I will reject') - }) - .then(() => assert(false)) - .catch(err => { - assert(err.message === 'I will reject'); - }) - }) - - describe('Wrap', function () { - it('Should wrap', function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) - - c.limiter.submit(c.job, null, 1, c.noErrVal(1)) - c.limiter.submit(c.job, null, 2, c.noErrVal(2)) - c.limiter.submit(c.job, null, 3, c.noErrVal(3)) - - var wrapped = c.limiter.wrap(c.promise) - c.pNoErrVal(wrapped(null, 4), 4) - - return c.last() - .then(function (results) { - c.checkResultsOrder([[1], [2], [3], [4]]) - c.checkDuration(300) + rejectOnDrop: true, + }); + var dropped = 0; + var caught = 0; + var p1; + var p2; + + c.limiter.on("dropped", function () { + dropped++; + }); + + p1 = c.pNoErrVal(c.limiter.schedule({ id: 1 }, c.slowPromise, 50, null, 1), 1); + p2 = c.pNoErrVal(c.limiter.schedule({ id: 2 }, c.slowPromise, 50, null, 2), 2); + + return c.limiter + .schedule({ id: 3 }, c.slowPromise, 50, null, 3) + .catch(function (err) { + c.mustEqual(err.message, "This job has been dropped by Bottleneck"); + assert( + err instanceof Bottleneck.BottleneckError, + `Expected err to be a BottleneckError but was actually a ${err.constructor.name}`, + ); + caught++; + return Promise.all([p1, p2]); }) - }) - - it('Should automatically wrap a returned value in a resolved promise', function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) - - fn = c.limiter.wrap(() => { return 7 }); - - return fn().then(result => { - assert(result === 7); + .then(c.last) + .then(function (_results) { + c.checkResultsOrder([[1], [2]]); + c.checkDuration(100); + c.mustEqual(dropped, 1); + c.mustEqual(caught, 1); + }); + }); + + it("Should automatically wrap an exception in a rejected promise - schedule()", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); + + return c.limiter + .schedule(() => { + throw new Error("I will reject"); }) - }) - - it('Should automatically wrap an exception in a rejected promise', function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) - - fn = c.limiter.wrap(() => { throw new Error('I will reject') }); - - return fn().then(() => assert(false)).catch(error => { - assert(error.message === 'I will reject'); - }) - }) - - it('Should inherit the original target for wrapped methods', function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) + .then(() => assert(false)) + .catch((err) => { + assert(err.message === "I will reject"); + }); + }); + + describe("Wrap", function () { + let fn; + it("Should wrap", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); + + c.limiter.submit(c.job, null, 1, c.noErrVal(1)); + c.limiter.submit(c.job, null, 2, c.noErrVal(2)); + c.limiter.submit(c.job, null, 3, c.noErrVal(3)); + + var wrapped = c.limiter.wrap(c.promise); + c.pNoErrVal(wrapped(null, 4), 4); + + return c.last().then(function (_results) { + c.checkResultsOrder([[1], [2], [3], [4]]); + c.checkDuration(300); + }); + }); + + it("Should automatically wrap a returned value in a resolved promise", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); + + fn = c.limiter.wrap(() => { + return 7; + }); + + return fn().then((result) => { + c.mustEqual(result, 7); + }); + }); + + it("Should automatically wrap an exception in a rejected promise", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); + + fn = c.limiter.wrap(() => { + throw new Error("I will reject"); + }); + + return fn() + .then(() => assert(false)) + .catch((error) => { + assert(error.message === "I will reject"); + }); + }); + + it("Should inherit the original target for wrapped methods", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); var object = { - fn: c.limiter.wrap(function () { return this }) - } + fn: c.limiter.wrap(function () { + return this; + }), + }; - return object.fn().then(result => { - assert(result === object) - }) - }) + return object.fn().then((result) => { + c.mustEqual(result, object); + }); + }); - it('Should inherit the original target on prototype methods', function () { - c = makeTest({maxConcurrent: 1, minTime: 100}) + it("Should inherit the original target on prototype methods", function () { + c = makeTest({ maxConcurrent: 1, minTime: 100 }); class Animal { - constructor(name) { this.name = name } - getName() { return this.name } + constructor(name) { + this.name = name; + } + getName() { + return this.name; + } } - Animal.prototype.getName = c.limiter.wrap(Animal.prototype.getName) - let elephant = new Animal('Dumbo') + Animal.prototype.getName = c.limiter.wrap(Animal.prototype.getName); + let elephant = new Animal("Dumbo"); - return elephant.getName().then(result => { - assert(result === 'Dumbo') - }) - }) + return elephant.getName().then((result) => { + c.mustEqual(result, "Dumbo"); + }); + }); - it('Should pass errors back', function () { - var failureMessage = 'BLEW UP!!!' - c = makeTest({maxConcurrent: 1, minTime: 100}) + it("Should pass errors back", function () { + var failureMessage = "BLEW UP!!!"; + c = makeTest({ maxConcurrent: 1, minTime: 100 }); - var wrapped = c.limiter.wrap(c.promise) - c.pNoErrVal(wrapped(null, 1), 1) - c.pNoErrVal(wrapped(null, 2), 2) + var wrapped = c.limiter.wrap(c.promise); + c.pNoErrVal(wrapped(null, 1), 1); + c.pNoErrVal(wrapped(null, 2), 2); return wrapped(new Error(failureMessage), 3) - .catch(function (err) { - c.mustEqual(err.message, failureMessage) - return c.last() - }) - .then(function (results) { - c.checkResultsOrder([[1], [2], [3]]) - c.checkDuration(200) - }) - }) - - it('Should allow passing options', function () { - var failureMessage = 'BLEW UP!!!' - c = makeTest({maxConcurrent: 1, minTime: 50}) - - var wrapped = c.limiter.wrap(c.promise) - c.pNoErrVal(wrapped(null, 1), 1) - c.pNoErrVal(wrapped(null, 2), 2) - c.pNoErrVal(wrapped(null, 3), 3) - c.pNoErrVal(wrapped(null, 4), 4) - c.pNoErrVal(wrapped.withOptions({ priority: 1 }, null, 5), 5) - - return wrapped.withOptions({ priority: 1 }, new Error(failureMessage), 6) - .catch(function (err) { - c.mustEqual(err.message, failureMessage) - return c.last() - }) - .then(function (results) { - c.checkResultsOrder([[1], [2], [5], [6], [3], [4]]) - c.checkDuration(250) - }) - }) - }) -}) + .catch(function (err) { + c.mustEqual(err.message, failureMessage); + return c.last(); + }) + .then(function (_results) { + c.checkResultsOrder([[1], [2], [3]]); + c.checkDuration(200); + }); + }); + + it("Should allow passing options", function () { + var failureMessage = "BLEW UP!!!"; + c = makeTest({ maxConcurrent: 1, minTime: 50 }); + + var wrapped = c.limiter.wrap(c.promise); + c.pNoErrVal(wrapped(null, 1), 1); + c.pNoErrVal(wrapped(null, 2), 2); + c.pNoErrVal(wrapped(null, 3), 3); + c.pNoErrVal(wrapped(null, 4), 4); + c.pNoErrVal(wrapped.withOptions({ priority: 1 }, null, 5), 5); + + return wrapped + .withOptions({ priority: 1 }, new Error(failureMessage), 6) + .catch(function (err) { + c.mustEqual(err.message, failureMessage); + return c.last(); + }) + .then(function (_results) { + c.checkResultsOrder([[1], [2], [5], [6], [3], [4]]); + c.checkDuration(250); + }); + }); + }); +}); diff --git a/test/retries.js b/test/retries.js index 7570516..19a9648 100644 --- a/test/retries.js +++ b/test/retries.js @@ -1,237 +1,236 @@ -var makeTest = require('./context') -var Bottleneck = require('./bottleneck') -var assert = require('assert') -var child_process = require('child_process') +const { describe, it, afterEach } = require("mocha"); +var makeTest = require("./context"); -describe('Retries', function () { - var c +describe("Retries", function () { + var c; afterEach(function () { - return c.limiter.disconnect(false) - }) - - it('Should retry when requested by the user (sync)', async function () { - c = makeTest({ trackDoneStatus: true }) - var failedEvents = 0 - var retryEvents = 0 - - c.limiter.on('failed', function (error, info) { - c.mustEqual(c.limiter.counts().EXECUTING, 1) - c.mustEqual(info.retryCount, failedEvents) - failedEvents++ - return 50 - }) - - c.limiter.on('retry', function (error, info) { - c.mustEqual(c.limiter.counts().EXECUTING, 1) - retryEvents++ - }) - - var times = 0 + return c.limiter.disconnect(false); + }); + + it("Should retry when requested by the user (sync)", async function () { + c = makeTest({ trackDoneStatus: true }); + var failedEvents = 0; + var retryEvents = 0; + + c.limiter.on("failed", function (error, info) { + c.mustEqual(c.limiter.counts().EXECUTING, 1); + c.mustEqual(info.retryCount, failedEvents); + failedEvents++; + return 50; + }); + + c.limiter.on("retry", function (_error, _info) { + c.mustEqual(c.limiter.counts().EXECUTING, 1); + retryEvents++; + }); + + var times = 0; const job = function () { - times++ + times++; if (times <= 2) { - return Promise.reject(new Error('boom')) + return Promise.reject(new Error("boom")); } - return Promise.resolve('Success!') - } - - c.mustEqual(await c.limiter.schedule(job), 'Success!') - const results = await c.results() - assert(results.elapsed > 90 && results.elapsed < 130) - c.mustEqual(failedEvents, 2) - c.mustEqual(retryEvents, 2) - c.mustEqual(c.limiter.counts().EXECUTING, 0) - c.mustEqual(c.limiter.counts().DONE, 1) - }) - - it('Should retry when requested by the user (async)', async function () { - c = makeTest({ trackDoneStatus: true }) - var failedEvents = 0 - var retryEvents = 0 - - c.limiter.on('failed', function (error, info) { - c.mustEqual(c.limiter.counts().EXECUTING, 1) - c.mustEqual(info.retryCount, failedEvents) - failedEvents++ - return Promise.resolve(50) - }) - - c.limiter.on('retry', function (error, info) { - c.mustEqual(c.limiter.counts().EXECUTING, 1) - retryEvents++ - }) - - var times = 0 + return Promise.resolve("Success!"); + }; + + c.mustEqual(await c.limiter.schedule(job), "Success!"); + const results = await c.results(); + c.mustGt(results.elapsed, 90); + c.mustLt(results.elapsed, 130); + c.mustEqual(failedEvents, 2); + c.mustEqual(retryEvents, 2); + c.mustEqual(c.limiter.counts().EXECUTING, 0); + c.mustEqual(c.limiter.counts().DONE, 1); + }); + + it("Should retry when requested by the user (async)", async function () { + c = makeTest({ trackDoneStatus: true }); + var failedEvents = 0; + var retryEvents = 0; + + c.limiter.on("failed", function (error, info) { + c.mustEqual(c.limiter.counts().EXECUTING, 1); + c.mustEqual(info.retryCount, failedEvents); + failedEvents++; + return Promise.resolve(50); + }); + + c.limiter.on("retry", function (_error, _info) { + c.mustEqual(c.limiter.counts().EXECUTING, 1); + retryEvents++; + }); + + var times = 0; const job = function () { - times++ + times++; if (times <= 2) { - return Promise.reject(new Error('boom')) + return Promise.reject(new Error("boom")); } - return Promise.resolve('Success!') - } - - c.mustEqual(await c.limiter.schedule(job), 'Success!') - const results = await c.results() - assert(results.elapsed > 90 && results.elapsed < 130) - c.mustEqual(failedEvents, 2) - c.mustEqual(retryEvents, 2) - c.mustEqual(c.limiter.counts().EXECUTING, 0) - c.mustEqual(c.limiter.counts().DONE, 1) - }) - - it('Should not retry when user returns an error (sync)', async function () { - c = makeTest({ errorEventsExpected: true, trackDoneStatus: true }) - var failedEvents = 0 - var retryEvents = 0 - var errorEvents = 0 - var caught = false - - c.limiter.on('failed', function (error, info) { - c.mustEqual(c.limiter.counts().EXECUTING, 1) - c.mustEqual(info.retryCount, failedEvents) - failedEvents++ - throw new Error('Nope') - }) - - c.limiter.on('retry', function (error, info) { - retryEvents++ - }) - - c.limiter.on('error', function (error, info) { - c.mustEqual(error.message, 'Nope') - errorEvents++ - }) + return Promise.resolve("Success!"); + }; + + c.mustEqual(await c.limiter.schedule(job), "Success!"); + const results = await c.results(); + c.mustGt(results.elapsed, 90); + c.mustLt(results.elapsed, 130); + c.mustEqual(failedEvents, 2); + c.mustEqual(retryEvents, 2); + c.mustEqual(c.limiter.counts().EXECUTING, 0); + c.mustEqual(c.limiter.counts().DONE, 1); + }); + + it("Should not retry when user returns an error (sync)", async function () { + c = makeTest({ errorEventsExpected: true, trackDoneStatus: true }); + var failedEvents = 0; + var retryEvents = 0; + var errorEvents = 0; + var caught = false; + + c.limiter.on("failed", function (error, info) { + c.mustEqual(c.limiter.counts().EXECUTING, 1); + c.mustEqual(info.retryCount, failedEvents); + failedEvents++; + throw new Error("Nope"); + }); + + c.limiter.on("retry", function (_error, _info) { + retryEvents++; + }); + + c.limiter.on("error", function (error, _info) { + c.mustEqual(error.message, "Nope"); + errorEvents++; + }); const job = function () { - return Promise.reject(new Error('boom')) - } + return Promise.reject(new Error("boom")); + }; try { - await c.limiter.schedule(job) - throw new Error('Should not reach') + await c.limiter.schedule(job); + throw new Error("Should not reach"); } catch (error) { - c.mustEqual(error.message, 'boom') - caught = true + c.mustEqual(error.message, "boom"); + caught = true; } - c.mustEqual(failedEvents, 1) - c.mustEqual(retryEvents, 0) - c.mustEqual(errorEvents, 1) - c.mustEqual(caught, true) - c.mustEqual(c.limiter.counts().EXECUTING, 0) - c.mustEqual(c.limiter.counts().DONE, 1) - }) - - it('Should not retry when user returns an error (async)', async function () { - c = makeTest({ errorEventsExpected: true, trackDoneStatus: true }) - var failedEvents = 0 - var retryEvents = 0 - var errorEvents = 0 - var caught = false - - c.limiter.on('failed', function (error, info) { - c.mustEqual(c.limiter.counts().EXECUTING, 1) - c.mustEqual(info.retryCount, failedEvents) - failedEvents++ - return Promise.reject(new Error('Nope')) - }) - - c.limiter.on('retry', function (error, info) { - retryEvents++ - }) - - c.limiter.on('error', function (error, info) { - c.mustEqual(error.message, 'Nope') - errorEvents++ - }) + c.mustEqual(failedEvents, 1); + c.mustEqual(retryEvents, 0); + c.mustEqual(errorEvents, 1); + c.mustEqual(caught, true); + c.mustEqual(c.limiter.counts().EXECUTING, 0); + c.mustEqual(c.limiter.counts().DONE, 1); + }); + + it("Should not retry when user returns an error (async)", async function () { + c = makeTest({ errorEventsExpected: true, trackDoneStatus: true }); + var failedEvents = 0; + var retryEvents = 0; + var errorEvents = 0; + var caught = false; + + c.limiter.on("failed", function (error, info) { + c.mustEqual(c.limiter.counts().EXECUTING, 1); + c.mustEqual(info.retryCount, failedEvents); + failedEvents++; + return Promise.reject(new Error("Nope")); + }); + + c.limiter.on("retry", function (_error, _info) { + retryEvents++; + }); + + c.limiter.on("error", function (error, _info) { + c.mustEqual(error.message, "Nope"); + errorEvents++; + }); const job = function () { - return Promise.reject(new Error('boom')) - } + return Promise.reject(new Error("boom")); + }; try { - await c.limiter.schedule(job) - throw new Error('Should not reach') + await c.limiter.schedule(job); + throw new Error("Should not reach"); } catch (error) { - c.mustEqual(error.message, 'boom') - caught = true + c.mustEqual(error.message, "boom"); + caught = true; } - c.mustEqual(failedEvents, 1) - c.mustEqual(retryEvents, 0) - c.mustEqual(errorEvents, 1) - c.mustEqual(caught, true) - c.mustEqual(c.limiter.counts().EXECUTING, 0) - c.mustEqual(c.limiter.counts().DONE, 1) - }) - - it('Should not retry when user returns null (sync)', async function () { - c = makeTest({ trackDoneStatus: true }) - var failedEvents = 0 - var retryEvents = 0 - var caught = false - - c.limiter.on('failed', function (error, info) { - c.mustEqual(c.limiter.counts().EXECUTING, 1) - c.mustEqual(info.retryCount, failedEvents) - failedEvents++ - return null - }) - - c.limiter.on('retry', function (error, info) { - retryEvents++ - }) + c.mustEqual(failedEvents, 1); + c.mustEqual(retryEvents, 0); + c.mustEqual(errorEvents, 1); + c.mustEqual(caught, true); + c.mustEqual(c.limiter.counts().EXECUTING, 0); + c.mustEqual(c.limiter.counts().DONE, 1); + }); + + it("Should not retry when user returns null (sync)", async function () { + c = makeTest({ trackDoneStatus: true }); + var failedEvents = 0; + var retryEvents = 0; + var caught = false; + + c.limiter.on("failed", function (error, info) { + c.mustEqual(c.limiter.counts().EXECUTING, 1); + c.mustEqual(info.retryCount, failedEvents); + failedEvents++; + return null; + }); + + c.limiter.on("retry", function (_error, _info) { + retryEvents++; + }); const job = function () { - return Promise.reject(new Error('boom')) - } + return Promise.reject(new Error("boom")); + }; try { - await c.limiter.schedule(job) - throw new Error('Should not reach') + await c.limiter.schedule(job); + throw new Error("Should not reach"); } catch (error) { - c.mustEqual(error.message, 'boom') - caught = true + c.mustEqual(error.message, "boom"); + caught = true; } - c.mustEqual(failedEvents, 1) - c.mustEqual(retryEvents, 0) - c.mustEqual(caught, true) - c.mustEqual(c.limiter.counts().EXECUTING, 0) - c.mustEqual(c.limiter.counts().DONE, 1) - }) - - it('Should not retry when user returns null (async)', async function () { - c = makeTest({ trackDoneStatus: true }) - var failedEvents = 0 - var retryEvents = 0 - var caught = false - - c.limiter.on('failed', function (error, info) { - c.mustEqual(c.limiter.counts().EXECUTING, 1) - c.mustEqual(info.retryCount, failedEvents) - failedEvents++ - return Promise.resolve(null) - }) - - c.limiter.on('retry', function (error, info) { - retryEvents++ - }) + c.mustEqual(failedEvents, 1); + c.mustEqual(retryEvents, 0); + c.mustEqual(caught, true); + c.mustEqual(c.limiter.counts().EXECUTING, 0); + c.mustEqual(c.limiter.counts().DONE, 1); + }); + + it("Should not retry when user returns null (async)", async function () { + c = makeTest({ trackDoneStatus: true }); + var failedEvents = 0; + var retryEvents = 0; + var caught = false; + + c.limiter.on("failed", function (error, info) { + c.mustEqual(c.limiter.counts().EXECUTING, 1); + c.mustEqual(info.retryCount, failedEvents); + failedEvents++; + return Promise.resolve(null); + }); + + c.limiter.on("retry", function (_error, _info) { + retryEvents++; + }); const job = function () { - return Promise.reject(new Error('boom')) - } + return Promise.reject(new Error("boom")); + }; try { - await c.limiter.schedule(job) - throw new Error('Should not reach') + await c.limiter.schedule(job); + throw new Error("Should not reach"); } catch (error) { - c.mustEqual(error.message, 'boom') - caught = true + c.mustEqual(error.message, "boom"); + caught = true; } - c.mustEqual(failedEvents, 1) - c.mustEqual(retryEvents, 0) - c.mustEqual(caught, true) - c.mustEqual(c.limiter.counts().EXECUTING, 0) - c.mustEqual(c.limiter.counts().DONE, 1) - }) - -}) + c.mustEqual(failedEvents, 1); + c.mustEqual(retryEvents, 0); + c.mustEqual(caught, true); + c.mustEqual(c.limiter.counts().EXECUTING, 0); + c.mustEqual(c.limiter.counts().DONE, 1); + }); +}); diff --git a/test/spawn/increaseKeepAlive.js b/test/spawn/increaseKeepAlive.js index 4bea612..f3821f9 100644 --- a/test/spawn/increaseKeepAlive.js +++ b/test/spawn/increaseKeepAlive.js @@ -1,17 +1,17 @@ -var Bottleneck = require('../bottleneck.js') -var now = Date.now() +var Bottleneck = require("../bottleneck.js"); +var now = Date.now(); var limiter = new Bottleneck({ reservoir: 2, reservoirIncreaseAmount: 2, - reservoirIncreaseInterval: 200 -}) + reservoirIncreaseInterval: 200, +}); var f1 = () => { - var secDiff = Math.floor((Date.now() - now) / 100) - return Promise.resolve(`[${secDiff}]`) -} + var secDiff = Math.floor((Date.now() - now) / 100); + return Promise.resolve(`[${secDiff}]`); +}; -limiter.schedule(f1).then((x) => process.stdout.write(x)) -limiter.schedule(f1).then((x) => process.stdout.write(x)) -limiter.schedule(f1).then((x) => process.stdout.write(x)) -limiter.schedule(f1).then((x) => process.stdout.write(x)) +limiter.schedule(f1).then((x) => process.stdout.write(x)); +limiter.schedule(f1).then((x) => process.stdout.write(x)); +limiter.schedule(f1).then((x) => process.stdout.write(x)); +limiter.schedule(f1).then((x) => process.stdout.write(x)); diff --git a/test/spawn/refreshKeepAlive.js b/test/spawn/refreshKeepAlive.js index deb0992..17d4420 100644 --- a/test/spawn/refreshKeepAlive.js +++ b/test/spawn/refreshKeepAlive.js @@ -1,17 +1,17 @@ -var Bottleneck = require('../bottleneck.js') -var now = Date.now() +var Bottleneck = require("../bottleneck.js"); +var now = Date.now(); var limiter = new Bottleneck({ reservoir: 2, reservoirRefreshAmount: 2, - reservoirRefreshInterval: 200 -}) + reservoirRefreshInterval: 200, +}); var f1 = () => { - var secDiff = Math.floor((Date.now() - now) / 100) - return Promise.resolve(`[${secDiff}]`) -} + var secDiff = Math.floor((Date.now() - now) / 100); + return Promise.resolve(`[${secDiff}]`); +}; -limiter.schedule(f1).then((x) => process.stdout.write(x)) -limiter.schedule(f1).then((x) => process.stdout.write(x)) -limiter.schedule(f1).then((x) => process.stdout.write(x)) -limiter.schedule(f1).then((x) => process.stdout.write(x)) +limiter.schedule(f1).then((x) => process.stdout.write(x)); +limiter.schedule(f1).then((x) => process.stdout.write(x)); +limiter.schedule(f1).then((x) => process.stdout.write(x)); +limiter.schedule(f1).then((x) => process.stdout.write(x)); diff --git a/test/states.js b/test/states.js index c65ed77..bc1b65e 100644 --- a/test/states.js +++ b/test/states.js @@ -1,103 +1,106 @@ -var States = require('../lib/States') -var assert = require('assert') -var c = require('./context')({datastore: 'local'}) -var Bottleneck = require('./bottleneck') - -describe('States', function () { - - it('Should be created and be empty', function () { - var states = new States(["A", "B", "C"]) - c.mustEqual(states.statusCounts(), { A: 0, B: 0, C: 0 }) - }) - - it('Should start new series', function () { - var states = new States(["A", "B", "C"]) - - states.start('x') - states.start('y') - - c.mustEqual(states.statusCounts(), { A: 2, B: 0, C: 0 }) - }) - - it('Should increment', function () { - var states = new States(["A", "B", "C"]) - - states.start('x') - states.start('y') - states.next('x') - states.next('y') - states.next('x') - c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 1 }) - - states.next('z') - c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 1 }) - - states.next('x') - c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 0 }) - - states.next('x') - c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 0 }) - - states.next('y') - states.next('y') - c.mustEqual(states.statusCounts(), { A: 0, B: 0, C: 0 }) - }) - - it('Should remove', function () { - var states = new States(["A", "B", "C"]) - - states.start('x') - states.start('y') - states.next('x') - states.next('y') - states.next('x') - c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 1 }) - - states.remove('x') - c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 0 }) - - states.remove('y') - c.mustEqual(states.statusCounts(), { A: 0, B: 0, C: 0 }) - }) - - it('Should return current status', function () { - var states = new States(["A", "B", "C"]) - - states.start('x') - states.start('y') - states.next('x') - states.next('y') - states.next('x') - c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 1 }) - - c.mustEqual(states.jobStatus('x'), 'C') - c.mustEqual(states.jobStatus('y'), 'B') - c.mustEqual(states.jobStatus('z'), null) - }) - - it('Should return job ids for a status', function (done) { - var states = new States(["A", "B", "C"]) - - states.start('x') - states.start('y') - states.start('z') - states.next('x') - states.next('y') - states.next('x') - states.next('z') - c.mustEqual(states.statusCounts(), { A: 0, B: 2, C: 1 }) - - c.mustEqual(states.statusJobs().sort(), ['x', 'y', 'z']) - c.mustEqual(states.statusJobs('A'), []) - c.mustEqual(states.statusJobs('B').sort(), ['y', 'z']) - c.mustEqual(states.statusJobs('C'), ['x']) +var States = require("../src/States"); +var assert = require("assert"); +var c = require("./context")({ datastore: "local" }); +var Bottleneck = require("./bottleneck"); +const { describe, it } = require("mocha"); + +describe("States", function () { + it("Should be created and be empty", function () { + var states = new States(["A", "B", "C"]); + c.mustEqual(states.statusCounts(), { A: 0, B: 0, C: 0 }); + }); + + it("Should start new series", function () { + var states = new States(["A", "B", "C"]); + + states.start("x"); + states.start("y"); + + c.mustEqual(states.statusCounts(), { A: 2, B: 0, C: 0 }); + }); + + it("Should increment", function () { + var states = new States(["A", "B", "C"]); + + states.start("x"); + states.start("y"); + states.next("x"); + states.next("y"); + states.next("x"); + c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 1 }); + + states.next("z"); + c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 1 }); + + states.next("x"); + c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 0 }); + + states.next("x"); + c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 0 }); + + states.next("y"); + states.next("y"); + c.mustEqual(states.statusCounts(), { A: 0, B: 0, C: 0 }); + }); + + it("Should remove", function () { + var states = new States(["A", "B", "C"]); + + states.start("x"); + states.start("y"); + states.next("x"); + states.next("y"); + states.next("x"); + c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 1 }); + + states.remove("x"); + c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 0 }); + + states.remove("y"); + c.mustEqual(states.statusCounts(), { A: 0, B: 0, C: 0 }); + }); + + it("Should return current status", function () { + var states = new States(["A", "B", "C"]); + + states.start("x"); + states.start("y"); + states.next("x"); + states.next("y"); + states.next("x"); + c.mustEqual(states.statusCounts(), { A: 0, B: 1, C: 1 }); + + c.mustEqual(states.jobStatus("x"), "C"); + c.mustEqual(states.jobStatus("y"), "B"); + c.mustEqual(states.jobStatus("z"), null); + }); + + it("Should return job ids for a status", function (done) { + var states = new States(["A", "B", "C"]); + + states.start("x"); + states.start("y"); + states.start("z"); + states.next("x"); + states.next("y"); + states.next("x"); + states.next("z"); + c.mustEqual(states.statusCounts(), { A: 0, B: 2, C: 1 }); + + c.mustEqual(states.statusJobs().sort(), ["x", "y", "z"]); + c.mustEqual(states.statusJobs("A"), []); + c.mustEqual(states.statusJobs("B").sort(), ["y", "z"]); + c.mustEqual(states.statusJobs("C"), ["x"]); try { - states.statusJobs('Z') + states.statusJobs("Z"); } catch (err) { - if (process.env.BUILD !== 'es5' && process.env.BUILD !== 'light') { - assert(err instanceof Bottleneck.BottleneckError) + if (process.env.BUILD !== "light") { + assert( + err instanceof Bottleneck.BottleneckError, + `Expected err to be a BottleneckError but was actually a ${err.constructor.name}`, + ); } - done() + done(); } - }) -}) + }); +}); diff --git a/test/stop.js b/test/stop.js index 2300e4f..06f255a 100644 --- a/test/stop.js +++ b/test/stop.js @@ -1,208 +1,209 @@ -var makeTest = require('./context') -var Bottleneck = require('./bottleneck') -var assert = require('assert') +const { describe, it, afterEach } = require("mocha"); +const makeTest = require("./context"); -describe('Stop', function () { - var c +describe("Stop", function () { + var c; afterEach(function () { - return c.limiter.disconnect(false) - }) + return c.limiter.disconnect(false); + }); - it('Should stop and drop the queue', function (done) { + it("Should stop and drop the queue", function (done) { c = makeTest({ maxConcurrent: 2, minTime: 100, - trackDoneStatus: true - }) - var submitFailed = false - var queuedDropped = false - var scheduledDropped = false - var dropped = 0 + trackDoneStatus: true, + }); + var submitFailed = false; + var queuedDropped = false; + var scheduledDropped = false; + var dropped = 0; - c.limiter.on('dropped', function () { - dropped++ - }) + c.limiter.on("dropped", function () { + dropped++; + }); - c.pNoErrVal(c.limiter.schedule({id: '0'}, c.promise, null, 0), 0) + c.pNoErrVal(c.limiter.schedule({ id: "0" }, c.promise, null, 0), 0); - c.pNoErrVal(c.limiter.schedule({id: '1'}, c.slowPromise, 100, null, 1), 1) + c.pNoErrVal(c.limiter.schedule({ id: "1" }, c.slowPromise, 100, null, 1), 1); - c.limiter.schedule({id: '2'}, c.promise, null, 2) - .catch(function (err) { - c.mustEqual(err.message, 'Dropped!') - scheduledDropped = true - }) + c.limiter.schedule({ id: "2" }, c.promise, null, 2).catch(function (err) { + c.mustEqual(err.message, "Dropped!"); + scheduledDropped = true; + }); - c.limiter.schedule({id: '3'}, c.promise, null, 3) - .catch(function (err) { - c.mustEqual(err.message, 'Dropped!') - queuedDropped = true - }) + c.limiter.schedule({ id: "3" }, c.promise, null, 3).catch(function (err) { + c.mustEqual(err.message, "Dropped!"); + queuedDropped = true; + }); setTimeout(function () { - var counts = c.limiter.counts() - c.mustEqual(counts.RECEIVED, 0) - c.mustEqual(counts.QUEUED, 1) - c.mustEqual(counts.RUNNING, 1) - c.mustEqual(counts.EXECUTING, 1) - c.mustEqual(counts.DONE, 1) - - c.limiter.stop({ - enqueueErrorMessage: 'Stopped!', - dropErrorMessage: 'Dropped!' - }) - .then(function () { - counts = c.limiter.counts() - c.mustEqual(submitFailed, true) - c.mustEqual(scheduledDropped, true) - c.mustEqual(queuedDropped, true) - c.mustEqual(dropped, 2) - c.mustEqual(counts.RECEIVED, 0) - c.mustEqual(counts.QUEUED, 0) - c.mustEqual(counts.RUNNING, 0) - c.mustEqual(counts.EXECUTING, 0) - c.mustEqual(counts.DONE, 2) - - c.checkResultsOrder([[0], [1]]) - done() - }) + var counts = c.limiter.counts(); + c.mustEqual(counts.RECEIVED, 0); + c.mustEqual(counts.QUEUED, 1); + c.mustEqual(counts.RUNNING, 1); + c.mustEqual(counts.EXECUTING, 1); + c.mustEqual(counts.DONE, 1); + + c.limiter + .stop({ + enqueueErrorMessage: "Stopped!", + dropErrorMessage: "Dropped!", + }) + .then(function () { + counts = c.limiter.counts(); + c.mustEqual(submitFailed, true); + c.mustEqual(scheduledDropped, true); + c.mustEqual(queuedDropped, true); + c.mustEqual(dropped, 2); + c.mustEqual(counts.RECEIVED, 0); + c.mustEqual(counts.QUEUED, 0); + c.mustEqual(counts.RUNNING, 0); + c.mustEqual(counts.EXECUTING, 0); + c.mustEqual(counts.DONE, 2); + + c.checkResultsOrder([[0], [1]]); + done(); + }); + + c.limiter + .schedule(() => Promise.resolve(true)) + .catch(function (err) { + c.mustEqual(err.message, "Stopped!"); + submitFailed = true; + }); + }, 125); + }); + + it("Should stop and let the queue finish", function (done) { + c = makeTest({ + maxConcurrent: 1, + minTime: 100, + trackDoneStatus: true, + }); + var submitFailed = false; + var dropped = 0; - c.limiter.schedule(() => Promise.resolve(true)) - .catch(function (err) { - c.mustEqual(err.message, 'Stopped!') - submitFailed = true - }) + c.limiter.on("dropped", function () { + dropped++; + }); - }, 125) - }) + c.pNoErrVal(c.limiter.schedule({ id: "1" }, c.promise, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ id: "2" }, c.promise, null, 2), 2); + c.pNoErrVal(c.limiter.schedule({ id: "3" }, c.slowPromise, 100, null, 3), 3); - it('Should stop and let the queue finish', function (done) { + setTimeout(function () { + var counts = c.limiter.counts(); + c.mustEqual(counts.RECEIVED, 0); + c.mustEqual(counts.QUEUED, 1); + c.mustEqual(counts.RUNNING, 1); + c.mustEqual(counts.EXECUTING, 0); + c.mustEqual(counts.DONE, 1); + + c.limiter + .stop({ + enqueueErrorMessage: "Stopped!", + dropWaitingJobs: false, + }) + .then(function () { + counts = c.limiter.counts(); + c.mustEqual(submitFailed, true); + c.mustEqual(dropped, 0); + c.mustEqual(counts.RECEIVED, 0); + c.mustEqual(counts.QUEUED, 0); + c.mustEqual(counts.RUNNING, 0); + c.mustEqual(counts.EXECUTING, 0); + c.mustEqual(counts.DONE, 4); + + c.checkResultsOrder([[1], [2], [3]]); + done(); + }); + + c.limiter + .schedule(() => Promise.resolve(true)) + .catch(function (err) { + c.mustEqual(err.message, "Stopped!"); + submitFailed = true; + }); + }, 75); + }); + + it("Should still resolve when rejectOnDrop is false", function (done) { c = makeTest({ maxConcurrent: 1, minTime: 100, - trackDoneStatus: true - }) - var submitFailed = false - var dropped = 0 + rejectOnDrop: false, + }); - c.limiter.on('dropped', function () { - dropped++ - }) + c.pNoErrVal(c.limiter.schedule({ id: "1" }, c.promise, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ id: "2" }, c.promise, null, 2), 2); + c.pNoErrVal(c.limiter.schedule({ id: "3" }, c.slowPromise, 100, null, 3), 3); - c.pNoErrVal(c.limiter.schedule({id: '1'}, c.promise, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({id: '2'}, c.promise, null, 2), 2) - c.pNoErrVal(c.limiter.schedule({id: '3'}, c.slowPromise, 100, null, 3), 3) - - setTimeout(function () { - var counts = c.limiter.counts() - c.mustEqual(counts.RECEIVED, 0) - c.mustEqual(counts.QUEUED, 1) - c.mustEqual(counts.RUNNING, 1) - c.mustEqual(counts.EXECUTING, 0) - c.mustEqual(counts.DONE, 1) - - c.limiter.stop({ - enqueueErrorMessage: 'Stopped!', - dropWaitingJobs: false + c.limiter + .stop() + .then(function () { + return c.limiter.stop(); }) .then(function () { - counts = c.limiter.counts() - c.mustEqual(submitFailed, true) - c.mustEqual(dropped, 0) - c.mustEqual(counts.RECEIVED, 0) - c.mustEqual(counts.QUEUED, 0) - c.mustEqual(counts.RUNNING, 0) - c.mustEqual(counts.EXECUTING, 0) - c.mustEqual(counts.DONE, 4) - - c.checkResultsOrder([[1], [2], [3]]) - done() + done(new Error("Should not be here")); }) - - c.limiter.schedule(() => Promise.resolve(true)) .catch(function (err) { - c.mustEqual(err.message, 'Stopped!') - submitFailed = true - }) - - }, 75) - }) + c.mustEqual(err.message, "stop() has already been called"); + done(); + }); + }); - it('Should still resolve when rejectOnDrop is false', function (done) { + it("Should not allow calling stop() twice when dropWaitingJobs=true", function (done) { c = makeTest({ maxConcurrent: 1, minTime: 100, - rejectOnDrop: false - }) - - c.pNoErrVal(c.limiter.schedule({id: '1'}, c.promise, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({id: '2'}, c.promise, null, 2), 2) - c.pNoErrVal(c.limiter.schedule({id: '3'}, c.slowPromise, 100, null, 3), 3) - - c.limiter.stop() - .then(function () { - return c.limiter.stop() - }) - .then(function () { - done(new Error("Should not be here")) - }) - .catch(function (err) { - c.mustEqual(err.message, "stop() has already been called") - done() - }) - }) - - it('Should not allow calling stop() twice when dropWaitingJobs=true', function (done) { - c = makeTest({ - maxConcurrent: 1, - minTime: 100 - }) - var failed = 0 + }); + var failed = 0; var handler = function (err) { - c.mustEqual(err.message, "This limiter has been stopped.") - failed++ - } - - c.pNoErrVal(c.limiter.schedule({id: '1'}, c.promise, null, 1), 1).catch(handler) - c.pNoErrVal(c.limiter.schedule({id: '2'}, c.promise, null, 2), 2).catch(handler) - c.pNoErrVal(c.limiter.schedule({id: '3'}, c.slowPromise, 100, null, 3), 3).catch(handler) - - c.limiter.stop({ dropWaitingJobs: true }) - .then(function () { - return c.limiter.stop({ dropWaitingJobs: true }) - }) - .then(function () { - done(new Error("Should not be here")) - }) - .catch(function (err) { - c.mustEqual(err.message, "stop() has already been called") - c.mustEqual(failed, 3) - done() - }) - }) - - it('Should not allow calling stop() twice when dropWaitingJobs=false', function (done) { + c.mustEqual(err.message, "This limiter has been stopped."); + failed++; + }; + + c.pNoErrVal(c.limiter.schedule({ id: "1" }, c.promise, null, 1), 1).catch(handler); + c.pNoErrVal(c.limiter.schedule({ id: "2" }, c.promise, null, 2), 2).catch(handler); + c.pNoErrVal(c.limiter.schedule({ id: "3" }, c.slowPromise, 100, null, 3), 3).catch(handler); + + c.limiter + .stop({ dropWaitingJobs: true }) + .then(function () { + return c.limiter.stop({ dropWaitingJobs: true }); + }) + .then(function () { + done(new Error("Should not be here")); + }) + .catch(function (err) { + c.mustEqual(err.message, "stop() has already been called"); + c.mustEqual(failed, 3); + done(); + }); + }); + + it("Should not allow calling stop() twice when dropWaitingJobs=false", function (done) { c = makeTest({ maxConcurrent: 1, - minTime: 100 - }) - - c.pNoErrVal(c.limiter.schedule({id: '1'}, c.promise, null, 1), 1) - c.pNoErrVal(c.limiter.schedule({id: '2'}, c.promise, null, 2), 2) - c.pNoErrVal(c.limiter.schedule({id: '3'}, c.slowPromise, 100, null, 3), 3) - - c.limiter.stop({ dropWaitingJobs: false }) - .then(function () { - return c.limiter.stop({ dropWaitingJobs: false }) - }) - .then(function () { - done(new Error("Should not be here")) - }) - .catch(function (err) { - c.mustEqual(err.message, "stop() has already been called") - done() - }) - }) - -}) + minTime: 100, + }); + + c.pNoErrVal(c.limiter.schedule({ id: "1" }, c.promise, null, 1), 1); + c.pNoErrVal(c.limiter.schedule({ id: "2" }, c.promise, null, 2), 2); + c.pNoErrVal(c.limiter.schedule({ id: "3" }, c.slowPromise, 100, null, 3), 3); + + c.limiter + .stop({ dropWaitingJobs: false }) + .then(function () { + return c.limiter.stop({ dropWaitingJobs: false }); + }) + .then(function () { + done(new Error("Should not be here")); + }) + .catch(function (err) { + c.mustEqual(err.message, "stop() has already been called"); + done(); + }); + }); +}); diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..365ba67 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,24 @@ +{ + "include": ["src/*.js"], + "compilerOptions": { + /* Base Options: */ + "esModuleInterop": true, + "skipLibCheck": true, + "target": "es2015", + "allowJs": true, + "resolveJsonModule": false, + "moduleDetection": "force", + "isolatedModules": true, + "verbatimModuleSyntax": true, + + /* Strictness */ + "strict": true, + "noUncheckedIndexedAccess": true, + "noImplicitOverride": true, + + /* Transpilation */ + "module": "preserve", + "noEmit": false, + "outDir": "lib" + } +} diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 0000000..de4dabb --- /dev/null +++ b/yarn.lock @@ -0,0 +1,2842 @@ +# This file is generated by running "yarn install" inside your project. +# Manual changes might be lost - proceed with caution! + +__metadata: + version: 8 + cacheKey: 10c0 + +"@airbnb/node-memwatch@npm:^2.0.0": + version: 2.0.0 + resolution: "@airbnb/node-memwatch@npm:2.0.0" + dependencies: + bindings: "npm:^1.5.0" + nan: "npm:^2.14.1" + node-gyp: "npm:latest" + checksum: 10c0/87f71ef4bfc757c73b69e2591badddcf6174aea1845e315bca3a47ea85d4e23a40a7bfa6376544b4e06532ac6ecc9d531fdd770cc27eee390dc0454187a015b2 + languageName: node + linkType: hard + +"@eslint-community/eslint-utils@npm:^4.2.0": + version: 4.4.1 + resolution: "@eslint-community/eslint-utils@npm:4.4.1" + dependencies: + eslint-visitor-keys: "npm:^3.4.3" + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + checksum: 10c0/2aa0ac2fc50ff3f234408b10900ed4f1a0b19352f21346ad4cc3d83a1271481bdda11097baa45d484dd564c895e0762a27a8240be7a256b3ad47129e96528252 + languageName: node + linkType: hard + +"@eslint-community/regexpp@npm:^4.12.1": + version: 4.12.1 + resolution: "@eslint-community/regexpp@npm:4.12.1" + checksum: 10c0/a03d98c246bcb9109aec2c08e4d10c8d010256538dcb3f56610191607214523d4fb1b00aa81df830b6dffb74c5fa0be03642513a289c567949d3e550ca11cdf6 + languageName: node + linkType: hard + +"@eslint/config-array@npm:^0.21.0": + version: 0.21.0 + resolution: "@eslint/config-array@npm:0.21.0" + dependencies: + "@eslint/object-schema": "npm:^2.1.6" + debug: "npm:^4.3.1" + minimatch: "npm:^3.1.2" + checksum: 10c0/0ea801139166c4aa56465b309af512ef9b2d3c68f9198751bbc3e21894fe70f25fbf26e1b0e9fffff41857bc21bfddeee58649ae6d79aadcd747db0c5dca771f + languageName: node + linkType: hard + +"@eslint/config-helpers@npm:^0.3.1": + version: 0.3.1 + resolution: "@eslint/config-helpers@npm:0.3.1" + checksum: 10c0/f6c5b3a0b76a0d7d84cc93e310c259e6c3e0792ddd0a62c5fc0027796ffae44183432cb74b2c2b1162801ee1b1b34a6beb5d90a151632b4df7349f994146a856 + languageName: node + linkType: hard + +"@eslint/core@npm:^0.15.2": + version: 0.15.2 + resolution: "@eslint/core@npm:0.15.2" + dependencies: + "@types/json-schema": "npm:^7.0.15" + checksum: 10c0/c17a6dc4f5a6006ecb60165cc38bcd21fefb4a10c7a2578a0cfe5813bbd442531a87ed741da5adab5eb678e8e693fda2e2b14555b035355537e32bcec367ea17 + languageName: node + linkType: hard + +"@eslint/eslintrc@npm:^3.3.1": + version: 3.3.1 + resolution: "@eslint/eslintrc@npm:3.3.1" + dependencies: + ajv: "npm:^6.12.4" + debug: "npm:^4.3.2" + espree: "npm:^10.0.1" + globals: "npm:^14.0.0" + ignore: "npm:^5.2.0" + import-fresh: "npm:^3.2.1" + js-yaml: "npm:^4.1.0" + minimatch: "npm:^3.1.2" + strip-json-comments: "npm:^3.1.1" + checksum: 10c0/b0e63f3bc5cce4555f791a4e487bf999173fcf27c65e1ab6e7d63634d8a43b33c3693e79f192cbff486d7df1be8ebb2bd2edc6e70ddd486cbfa84a359a3e3b41 + languageName: node + linkType: hard + +"@eslint/js@npm:9.34.0, @eslint/js@npm:^9.34.0": + version: 9.34.0 + resolution: "@eslint/js@npm:9.34.0" + checksum: 10c0/53f1bfd2a374683d9382a6850354555f6e89a88416c34a5d34e9fbbaf717e97c2b06300e8f93e5eddba8bda8951ccab7f93a680e56ded1a3d21d526019e69bab + languageName: node + linkType: hard + +"@eslint/object-schema@npm:^2.1.6": + version: 2.1.6 + resolution: "@eslint/object-schema@npm:2.1.6" + checksum: 10c0/b8cdb7edea5bc5f6a96173f8d768d3554a628327af536da2fc6967a93b040f2557114d98dbcdbf389d5a7b290985ad6a9ce5babc547f36fc1fde42e674d11a56 + languageName: node + linkType: hard + +"@eslint/plugin-kit@npm:^0.3.5": + version: 0.3.5 + resolution: "@eslint/plugin-kit@npm:0.3.5" + dependencies: + "@eslint/core": "npm:^0.15.2" + levn: "npm:^0.4.1" + checksum: 10c0/c178c1b58c574200c0fd125af3e4bc775daba7ce434ba6d1eeaf9bcb64b2e9fea75efabffb3ed3ab28858e55a016a5efa95f509994ee4341b341199ca630b89e + languageName: node + linkType: hard + +"@humanfs/core@npm:^0.19.1": + version: 0.19.1 + resolution: "@humanfs/core@npm:0.19.1" + checksum: 10c0/aa4e0152171c07879b458d0e8a704b8c3a89a8c0541726c6b65b81e84fd8b7564b5d6c633feadc6598307d34564bd53294b533491424e8e313d7ab6c7bc5dc67 + languageName: node + linkType: hard + +"@humanfs/node@npm:^0.16.6": + version: 0.16.6 + resolution: "@humanfs/node@npm:0.16.6" + dependencies: + "@humanfs/core": "npm:^0.19.1" + "@humanwhocodes/retry": "npm:^0.3.0" + checksum: 10c0/8356359c9f60108ec204cbd249ecd0356667359b2524886b357617c4a7c3b6aace0fd5a369f63747b926a762a88f8a25bc066fa1778508d110195ce7686243e1 + languageName: node + linkType: hard + +"@humanwhocodes/module-importer@npm:^1.0.1": + version: 1.0.1 + resolution: "@humanwhocodes/module-importer@npm:1.0.1" + checksum: 10c0/909b69c3b86d482c26b3359db16e46a32e0fb30bd306a3c176b8313b9e7313dba0f37f519de6aa8b0a1921349e505f259d19475e123182416a506d7f87e7f529 + languageName: node + linkType: hard + +"@humanwhocodes/retry@npm:^0.3.0": + version: 0.3.1 + resolution: "@humanwhocodes/retry@npm:0.3.1" + checksum: 10c0/f0da1282dfb45e8120480b9e2e275e2ac9bbe1cf016d046fdad8e27cc1285c45bb9e711681237944445157b430093412b4446c1ab3fc4bb037861b5904101d3b + languageName: node + linkType: hard + +"@humanwhocodes/retry@npm:^0.4.2": + version: 0.4.2 + resolution: "@humanwhocodes/retry@npm:0.4.2" + checksum: 10c0/0235525d38f243bee3bf8b25ed395fbf957fb51c08adae52787e1325673071abe856c7e18e530922ed2dd3ce12ed82ba01b8cee0279ac52a3315fcdc3a69ef0c + languageName: node + linkType: hard + +"@ioredis/commands@npm:^1.3.0": + version: 1.3.0 + resolution: "@ioredis/commands@npm:1.3.0" + checksum: 10c0/5ab990a8f69c20daf3d7d64307aa9f13ee727c92ab4c7664a6943bb500227667a0c368892e9c4913f06416377db47dba78d58627fe723da476d25f2c04a6d5aa + languageName: node + linkType: hard + +"@isaacs/cliui@npm:^8.0.2": + version: 8.0.2 + resolution: "@isaacs/cliui@npm:8.0.2" + dependencies: + string-width: "npm:^5.1.2" + string-width-cjs: "npm:string-width@^4.2.0" + strip-ansi: "npm:^7.0.1" + strip-ansi-cjs: "npm:strip-ansi@^6.0.1" + wrap-ansi: "npm:^8.1.0" + wrap-ansi-cjs: "npm:wrap-ansi@^7.0.0" + checksum: 10c0/b1bf42535d49f11dc137f18d5e4e63a28c5569de438a221c369483731e9dac9fb797af554e8bf02b6192d1e5eba6e6402cf93900c3d0ac86391d00d04876789e + languageName: node + linkType: hard + +"@isaacs/fs-minipass@npm:^4.0.0": + version: 4.0.1 + resolution: "@isaacs/fs-minipass@npm:4.0.1" + dependencies: + minipass: "npm:^7.0.4" + checksum: 10c0/c25b6dc1598790d5b55c0947a9b7d111cfa92594db5296c3b907e2f533c033666f692a3939eadac17b1c7c40d362d0b0635dc874cbfe3e70db7c2b07cc97a5d2 + languageName: node + linkType: hard + +"@jridgewell/sourcemap-codec@npm:^1.5.0": + version: 1.5.0 + resolution: "@jridgewell/sourcemap-codec@npm:1.5.0" + checksum: 10c0/2eb864f276eb1096c3c11da3e9bb518f6d9fc0023c78344cdc037abadc725172c70314bdb360f2d4b7bffec7f5d657ce006816bc5d4ecb35e61b66132db00c18 + languageName: node + linkType: hard + +"@npmcli/agent@npm:^3.0.0": + version: 3.0.0 + resolution: "@npmcli/agent@npm:3.0.0" + dependencies: + agent-base: "npm:^7.1.0" + http-proxy-agent: "npm:^7.0.0" + https-proxy-agent: "npm:^7.0.1" + lru-cache: "npm:^10.0.1" + socks-proxy-agent: "npm:^8.0.3" + checksum: 10c0/efe37b982f30740ee77696a80c196912c274ecd2cb243bc6ae7053a50c733ce0f6c09fda085145f33ecf453be19654acca74b69e81eaad4c90f00ccffe2f9271 + languageName: node + linkType: hard + +"@npmcli/fs@npm:^4.0.0": + version: 4.0.0 + resolution: "@npmcli/fs@npm:4.0.0" + dependencies: + semver: "npm:^7.3.5" + checksum: 10c0/c90935d5ce670c87b6b14fab04a965a3b8137e585f8b2a6257263bd7f97756dd736cb165bb470e5156a9e718ecd99413dccc54b1138c1a46d6ec7cf325982fe5 + languageName: node + linkType: hard + +"@pkgjs/parseargs@npm:^0.11.0": + version: 0.11.0 + resolution: "@pkgjs/parseargs@npm:0.11.0" + checksum: 10c0/5bd7576bb1b38a47a7fc7b51ac9f38748e772beebc56200450c4a817d712232b8f1d3ef70532c80840243c657d491cf6a6be1e3a214cff907645819fdc34aadd + languageName: node + linkType: hard + +"@reteps/dockerfmt@npm:^0.3.6": + version: 0.3.6 + resolution: "@reteps/dockerfmt@npm:0.3.6" + checksum: 10c0/b6ca467ba97ea49071c44d0fbecf131fc8045165e950d0d01372c1834000c58d53f62bff42f09b851f7a9d91899047f071cd8fe57e1fc88fc27e2a3d2bdb214d + languageName: node + linkType: hard + +"@rollup/plugin-commonjs@npm:^28.0.6": + version: 28.0.6 + resolution: "@rollup/plugin-commonjs@npm:28.0.6" + dependencies: + "@rollup/pluginutils": "npm:^5.0.1" + commondir: "npm:^1.0.1" + estree-walker: "npm:^2.0.2" + fdir: "npm:^6.2.0" + is-reference: "npm:1.2.1" + magic-string: "npm:^0.30.3" + picomatch: "npm:^4.0.2" + peerDependencies: + rollup: ^2.68.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 10c0/67fa297384c2494c8f85df102c030e7f8ed8f600cfccdd1143266112ee4037d37faa1bda44a571dab35b48297342024551e995ad2f8a4d86da0aa1f33ec61868 + languageName: node + linkType: hard + +"@rollup/plugin-json@npm:^6.1.0": + version: 6.1.0 + resolution: "@rollup/plugin-json@npm:6.1.0" + dependencies: + "@rollup/pluginutils": "npm:^5.1.0" + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 10c0/9400c431b5e0cf3088ba2eb2d038809a2b0fb2a84ed004997da85582f48cd64958ed3168893c4f2c8109e38652400ed68282d0c92bf8ec07a3b2ef2e1ceab0b7 + languageName: node + linkType: hard + +"@rollup/plugin-node-resolve@npm:^16.0.1": + version: 16.0.1 + resolution: "@rollup/plugin-node-resolve@npm:16.0.1" + dependencies: + "@rollup/pluginutils": "npm:^5.0.1" + "@types/resolve": "npm:1.20.2" + deepmerge: "npm:^4.2.2" + is-module: "npm:^1.0.0" + resolve: "npm:^1.22.1" + peerDependencies: + rollup: ^2.78.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 10c0/54d33282321492fafec29b49c66dd1efd90c72a24f9d1569dcb57a72ab8de8a782810f39fdb917b96ec6a598c18f3416588b419bf7af331793a010de1fe28c60 + languageName: node + linkType: hard + +"@rollup/pluginutils@npm:^5.0.1, @rollup/pluginutils@npm:^5.1.0": + version: 5.1.4 + resolution: "@rollup/pluginutils@npm:5.1.4" + dependencies: + "@types/estree": "npm:^1.0.0" + estree-walker: "npm:^2.0.2" + picomatch: "npm:^4.0.2" + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 10c0/6d58fbc6f1024eb4b087bc9bf59a1d655a8056a60c0b4021d3beaeec3f0743503f52467fd89d2cf0e7eccf2831feb40a05ad541a17637ea21ba10b21c2004deb + languageName: node + linkType: hard + +"@rollup/rollup-android-arm-eabi@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-android-arm-eabi@npm:4.48.1" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@rollup/rollup-android-arm64@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-android-arm64@npm:4.48.1" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-darwin-arm64@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-darwin-arm64@npm:4.48.1" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-darwin-x64@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-darwin-x64@npm:4.48.1" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-freebsd-arm64@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-freebsd-arm64@npm:4.48.1" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-freebsd-x64@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-freebsd-x64@npm:4.48.1" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm-gnueabihf@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.48.1" + conditions: os=linux & cpu=arm & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm-musleabihf@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.48.1" + conditions: os=linux & cpu=arm & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm64-gnu@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.48.1" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm64-musl@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-arm64-musl@npm:4.48.1" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-loongarch64-gnu@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-loongarch64-gnu@npm:4.48.1" + conditions: os=linux & cpu=loong64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-ppc64-gnu@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-ppc64-gnu@npm:4.48.1" + conditions: os=linux & cpu=ppc64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-riscv64-gnu@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.48.1" + conditions: os=linux & cpu=riscv64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-riscv64-musl@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.48.1" + conditions: os=linux & cpu=riscv64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-s390x-gnu@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.48.1" + conditions: os=linux & cpu=s390x & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-gnu@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.48.1" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-musl@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.48.1" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-win32-arm64-msvc@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.48.1" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-ia32-msvc@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.48.1" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@rollup/rollup-win32-x64-msvc@npm:4.48.1": + version: 4.48.1 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.48.1" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@sderrow/bottleneck@workspace:.": + version: 0.0.0-use.local + resolution: "@sderrow/bottleneck@workspace:." + dependencies: + "@eslint/js": "npm:^9.34.0" + "@rollup/plugin-commonjs": "npm:^28.0.6" + "@rollup/plugin-json": "npm:^6.1.0" + "@rollup/plugin-node-resolve": "npm:^16.0.1" + "@token-cjg/leakage": "npm:^0.6.0" + "@types/node": "npm:^22.18.0" + eslint: "npm:^9.34.0" + eslint-config-prettier: "npm:^10.1.8" + globals: "npm:^16.3.0" + husky: "npm:^9.1.7" + ioredis: "npm:^5.7.0" + lint-staged: "npm:^16.1.5" + mocha: "npm:^11.7.1" + prettier: "npm:^3.6.2" + prettier-plugin-organize-imports: "npm:^4.2.0" + prettier-plugin-pkg: "npm:^0.21.2" + prettier-plugin-sh: "npm:^0.18.0" + redis: "npm:^2.8.0" + rollup: "npm:^4.48.1" + typescript: "npm:^5.9.2" + languageName: unknown + linkType: soft + +"@token-cjg/leakage@npm:^0.6.0": + version: 0.6.0 + resolution: "@token-cjg/leakage@npm:0.6.0" + dependencies: + "@airbnb/node-memwatch": "npm:^2.0.0" + es6-error: "npm:^4.0.2" + minimist: "npm:^1.2.0" + pretty-bytes: "npm:^4.0.2" + checksum: 10c0/63e8cc55ff25a99a424a31a107b3dd487f659f9911429f361a20ee19f986dbca74a66104f10f84fbc97e91ce47fbb3f2e1141b7f37ded1e72d2468b4884b398c + languageName: node + linkType: hard + +"@types/estree@npm:*, @types/estree@npm:^1.0.0": + version: 1.0.7 + resolution: "@types/estree@npm:1.0.7" + checksum: 10c0/be815254316882f7c40847336cd484c3bc1c3e34f710d197160d455dc9d6d050ffbf4c3bc76585dba86f737f020ab20bdb137ebe0e9116b0c86c7c0342221b8c + languageName: node + linkType: hard + +"@types/estree@npm:1.0.8": + version: 1.0.8 + resolution: "@types/estree@npm:1.0.8" + checksum: 10c0/39d34d1afaa338ab9763f37ad6066e3f349444f9052b9676a7cc0252ef9485a41c6d81c9c4e0d26e9077993354edf25efc853f3224dd4b447175ef62bdcc86a5 + languageName: node + linkType: hard + +"@types/estree@npm:^1.0.6": + version: 1.0.6 + resolution: "@types/estree@npm:1.0.6" + checksum: 10c0/cdfd751f6f9065442cd40957c07fd80361c962869aa853c1c2fd03e101af8b9389d8ff4955a43a6fcfa223dd387a089937f95be0f3eec21ca527039fd2d9859a + languageName: node + linkType: hard + +"@types/json-schema@npm:^7.0.15": + version: 7.0.15 + resolution: "@types/json-schema@npm:7.0.15" + checksum: 10c0/a996a745e6c5d60292f36731dd41341339d4eeed8180bb09226e5c8d23759067692b1d88e5d91d72ee83dfc00d3aca8e7bd43ea120516c17922cbcb7c3e252db + languageName: node + linkType: hard + +"@types/node@npm:^22.18.0": + version: 22.18.0 + resolution: "@types/node@npm:22.18.0" + dependencies: + undici-types: "npm:~6.21.0" + checksum: 10c0/02cce4493eee8408e66e76fcad164f33c0600ed0854ad08e5519a76a06402da5b589b278cf71bc975c9e014f2668bdf758bc3be7fed63bdbfd0900495372797c + languageName: node + linkType: hard + +"@types/resolve@npm:1.20.2": + version: 1.20.2 + resolution: "@types/resolve@npm:1.20.2" + checksum: 10c0/c5b7e1770feb5ccfb6802f6ad82a7b0d50874c99331e0c9b259e415e55a38d7a86ad0901c57665d93f75938be2a6a0bc9aa06c9749192cadb2e4512800bbc6e6 + languageName: node + linkType: hard + +"abbrev@npm:^3.0.0": + version: 3.0.1 + resolution: "abbrev@npm:3.0.1" + checksum: 10c0/21ba8f574ea57a3106d6d35623f2c4a9111d9ee3e9a5be47baed46ec2457d2eac46e07a5c4a60186f88cb98abbe3e24f2d4cca70bc2b12f1692523e2209a9ccf + languageName: node + linkType: hard + +"acorn-jsx@npm:^5.3.2": + version: 5.3.2 + resolution: "acorn-jsx@npm:5.3.2" + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + checksum: 10c0/4c54868fbef3b8d58927d5e33f0a4de35f59012fe7b12cf9dfbb345fb8f46607709e1c4431be869a23fb63c151033d84c4198fa9f79385cec34fcb1dd53974c1 + languageName: node + linkType: hard + +"acorn@npm:^8.14.0": + version: 8.14.0 + resolution: "acorn@npm:8.14.0" + bin: + acorn: bin/acorn + checksum: 10c0/6d4ee461a7734b2f48836ee0fbb752903606e576cc100eb49340295129ca0b452f3ba91ddd4424a1d4406a98adfb2ebb6bd0ff4c49d7a0930c10e462719bbfd7 + languageName: node + linkType: hard + +"acorn@npm:^8.15.0": + version: 8.15.0 + resolution: "acorn@npm:8.15.0" + bin: + acorn: bin/acorn + checksum: 10c0/dec73ff59b7d6628a01eebaece7f2bdb8bb62b9b5926dcad0f8931f2b8b79c2be21f6c68ac095592adb5adb15831a3635d9343e6a91d028bbe85d564875ec3ec + languageName: node + linkType: hard + +"agent-base@npm:^7.1.0, agent-base@npm:^7.1.2": + version: 7.1.3 + resolution: "agent-base@npm:7.1.3" + checksum: 10c0/6192b580c5b1d8fb399b9c62bf8343d76654c2dd62afcb9a52b2cf44a8b6ace1e3b704d3fe3547d91555c857d3df02603341ff2cb961b9cfe2b12f9f3c38ee11 + languageName: node + linkType: hard + +"ajv@npm:^6.12.4": + version: 6.12.6 + resolution: "ajv@npm:6.12.6" + dependencies: + fast-deep-equal: "npm:^3.1.1" + fast-json-stable-stringify: "npm:^2.0.0" + json-schema-traverse: "npm:^0.4.1" + uri-js: "npm:^4.2.2" + checksum: 10c0/41e23642cbe545889245b9d2a45854ebba51cda6c778ebced9649420d9205f2efb39cb43dbc41e358409223b1ea43303ae4839db682c848b891e4811da1a5a71 + languageName: node + linkType: hard + +"ansi-escapes@npm:^7.0.0": + version: 7.0.0 + resolution: "ansi-escapes@npm:7.0.0" + dependencies: + environment: "npm:^1.0.0" + checksum: 10c0/86e51e36fabef18c9c004af0a280573e828900641cea35134a124d2715e0c5a473494ab4ce396614505da77638ae290ff72dd8002d9747d2ee53f5d6bbe336be + languageName: node + linkType: hard + +"ansi-regex@npm:^5.0.1": + version: 5.0.1 + resolution: "ansi-regex@npm:5.0.1" + checksum: 10c0/9a64bb8627b434ba9327b60c027742e5d17ac69277960d041898596271d992d4d52ba7267a63ca10232e29f6107fc8a835f6ce8d719b88c5f8493f8254813737 + languageName: node + linkType: hard + +"ansi-regex@npm:^6.0.1": + version: 6.1.0 + resolution: "ansi-regex@npm:6.1.0" + checksum: 10c0/a91daeddd54746338478eef88af3439a7edf30f8e23196e2d6ed182da9add559c601266dbef01c2efa46a958ad6f1f8b176799657616c702b5b02e799e7fd8dc + languageName: node + linkType: hard + +"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": + version: 4.3.0 + resolution: "ansi-styles@npm:4.3.0" + dependencies: + color-convert: "npm:^2.0.1" + checksum: 10c0/895a23929da416f2bd3de7e9cb4eabd340949328ab85ddd6e484a637d8f6820d485f53933446f5291c3b760cbc488beb8e88573dd0f9c7daf83dccc8fe81b041 + languageName: node + linkType: hard + +"ansi-styles@npm:^6.0.0, ansi-styles@npm:^6.1.0, ansi-styles@npm:^6.2.1": + version: 6.2.1 + resolution: "ansi-styles@npm:6.2.1" + checksum: 10c0/5d1ec38c123984bcedd996eac680d548f31828bd679a66db2bdf11844634dde55fec3efa9c6bb1d89056a5e79c1ac540c4c784d592ea1d25028a92227d2f2d5c + languageName: node + linkType: hard + +"argparse@npm:^2.0.1": + version: 2.0.1 + resolution: "argparse@npm:2.0.1" + checksum: 10c0/c5640c2d89045371c7cedd6a70212a04e360fd34d6edeae32f6952c63949e3525ea77dbec0289d8213a99bbaeab5abfa860b5c12cf88a2e6cf8106e90dd27a7e + languageName: node + linkType: hard + +"balanced-match@npm:^1.0.0": + version: 1.0.2 + resolution: "balanced-match@npm:1.0.2" + checksum: 10c0/9308baf0a7e4838a82bbfd11e01b1cb0f0cf2893bc1676c27c2a8c0e70cbae1c59120c3268517a8ae7fb6376b4639ef81ca22582611dbee4ed28df945134aaee + languageName: node + linkType: hard + +"bindings@npm:^1.5.0": + version: 1.5.0 + resolution: "bindings@npm:1.5.0" + dependencies: + file-uri-to-path: "npm:1.0.0" + checksum: 10c0/3dab2491b4bb24124252a91e656803eac24292473e56554e35bbfe3cc1875332cfa77600c3bac7564049dc95075bf6fcc63a4609920ff2d64d0fe405fcf0d4ba + languageName: node + linkType: hard + +"brace-expansion@npm:^1.1.7": + version: 1.1.11 + resolution: "brace-expansion@npm:1.1.11" + dependencies: + balanced-match: "npm:^1.0.0" + concat-map: "npm:0.0.1" + checksum: 10c0/695a56cd058096a7cb71fb09d9d6a7070113c7be516699ed361317aca2ec169f618e28b8af352e02ab4233fb54eb0168460a40dc320bab0034b36ab59aaad668 + languageName: node + linkType: hard + +"brace-expansion@npm:^2.0.1": + version: 2.0.1 + resolution: "brace-expansion@npm:2.0.1" + dependencies: + balanced-match: "npm:^1.0.0" + checksum: 10c0/b358f2fe060e2d7a87aa015979ecea07f3c37d4018f8d6deb5bd4c229ad3a0384fe6029bb76cd8be63c81e516ee52d1a0673edbe2023d53a5191732ae3c3e49f + languageName: node + linkType: hard + +"braces@npm:^3.0.3": + version: 3.0.3 + resolution: "braces@npm:3.0.3" + dependencies: + fill-range: "npm:^7.1.1" + checksum: 10c0/7c6dfd30c338d2997ba77500539227b9d1f85e388a5f43220865201e407e076783d0881f2d297b9f80951b4c957fcf0b51c1d2d24227631643c3f7c284b0aa04 + languageName: node + linkType: hard + +"browser-stdout@npm:^1.3.1": + version: 1.3.1 + resolution: "browser-stdout@npm:1.3.1" + checksum: 10c0/c40e482fd82be872b6ea7b9f7591beafbf6f5ba522fe3dade98ba1573a1c29a11101564993e4eb44e5488be8f44510af072df9a9637c739217eb155ceb639205 + languageName: node + linkType: hard + +"cacache@npm:^19.0.1": + version: 19.0.1 + resolution: "cacache@npm:19.0.1" + dependencies: + "@npmcli/fs": "npm:^4.0.0" + fs-minipass: "npm:^3.0.0" + glob: "npm:^10.2.2" + lru-cache: "npm:^10.0.1" + minipass: "npm:^7.0.3" + minipass-collect: "npm:^2.0.1" + minipass-flush: "npm:^1.0.5" + minipass-pipeline: "npm:^1.2.4" + p-map: "npm:^7.0.2" + ssri: "npm:^12.0.0" + tar: "npm:^7.4.3" + unique-filename: "npm:^4.0.0" + checksum: 10c0/01f2134e1bd7d3ab68be851df96c8d63b492b1853b67f2eecb2c37bb682d37cb70bb858a16f2f0554d3c0071be6dfe21456a1ff6fa4b7eed996570d6a25ffe9c + languageName: node + linkType: hard + +"callsites@npm:^3.0.0": + version: 3.1.0 + resolution: "callsites@npm:3.1.0" + checksum: 10c0/fff92277400eb06c3079f9e74f3af120db9f8ea03bad0e84d9aede54bbe2d44a56cccb5f6cf12211f93f52306df87077ecec5b712794c5a9b5dac6d615a3f301 + languageName: node + linkType: hard + +"camelcase@npm:^6.0.0": + version: 6.3.0 + resolution: "camelcase@npm:6.3.0" + checksum: 10c0/0d701658219bd3116d12da3eab31acddb3f9440790c0792e0d398f0a520a6a4058018e546862b6fba89d7ae990efaeb97da71e1913e9ebf5a8b5621a3d55c710 + languageName: node + linkType: hard + +"chalk@npm:^4.0.0, chalk@npm:^4.1.0": + version: 4.1.2 + resolution: "chalk@npm:4.1.2" + dependencies: + ansi-styles: "npm:^4.1.0" + supports-color: "npm:^7.1.0" + checksum: 10c0/4a3fef5cc34975c898ffe77141450f679721df9dde00f6c304353fa9c8b571929123b26a0e4617bde5018977eb655b31970c297b91b63ee83bb82aeb04666880 + languageName: node + linkType: hard + +"chalk@npm:^5.5.0": + version: 5.6.0 + resolution: "chalk@npm:5.6.0" + checksum: 10c0/f8558fc12fd9805f167611803b325b0098bbccdc9f1d3bafead41c9bac61f263357f3c0df0cbe28bc2fd5fca3edcf618b01d6771a5a776b4c15d061482a72b23 + languageName: node + linkType: hard + +"chokidar@npm:^4.0.1": + version: 4.0.3 + resolution: "chokidar@npm:4.0.3" + dependencies: + readdirp: "npm:^4.0.1" + checksum: 10c0/a58b9df05bb452f7d105d9e7229ac82fa873741c0c40ddcc7bb82f8a909fbe3f7814c9ebe9bc9a2bef9b737c0ec6e2d699d179048ef06ad3ec46315df0ebe6ad + languageName: node + linkType: hard + +"chownr@npm:^3.0.0": + version: 3.0.0 + resolution: "chownr@npm:3.0.0" + checksum: 10c0/43925b87700f7e3893296c8e9c56cc58f926411cce3a6e5898136daaf08f08b9a8eb76d37d3267e707d0dcc17aed2e2ebdf5848c0c3ce95cf910a919935c1b10 + languageName: node + linkType: hard + +"cli-cursor@npm:^5.0.0": + version: 5.0.0 + resolution: "cli-cursor@npm:5.0.0" + dependencies: + restore-cursor: "npm:^5.0.0" + checksum: 10c0/7ec62f69b79f6734ab209a3e4dbdc8af7422d44d360a7cb1efa8a0887bbe466a6e625650c466fe4359aee44dbe2dc0b6994b583d40a05d0808a5cb193641d220 + languageName: node + linkType: hard + +"cli-truncate@npm:^4.0.0": + version: 4.0.0 + resolution: "cli-truncate@npm:4.0.0" + dependencies: + slice-ansi: "npm:^5.0.0" + string-width: "npm:^7.0.0" + checksum: 10c0/d7f0b73e3d9b88cb496e6c086df7410b541b56a43d18ade6a573c9c18bd001b1c3fba1ad578f741a4218fdc794d042385f8ac02c25e1c295a2d8b9f3cb86eb4c + languageName: node + linkType: hard + +"cliui@npm:^8.0.1": + version: 8.0.1 + resolution: "cliui@npm:8.0.1" + dependencies: + string-width: "npm:^4.2.0" + strip-ansi: "npm:^6.0.1" + wrap-ansi: "npm:^7.0.0" + checksum: 10c0/4bda0f09c340cbb6dfdc1ed508b3ca080f12992c18d68c6be4d9cf51756033d5266e61ec57529e610dacbf4da1c634423b0c1b11037709cc6b09045cbd815df5 + languageName: node + linkType: hard + +"cluster-key-slot@npm:^1.1.0": + version: 1.1.2 + resolution: "cluster-key-slot@npm:1.1.2" + checksum: 10c0/d7d39ca28a8786e9e801eeb8c770e3c3236a566625d7299a47bb71113fb2298ce1039596acb82590e598c52dbc9b1f088c8f587803e697cb58e1867a95ff94d3 + languageName: node + linkType: hard + +"color-convert@npm:^2.0.1": + version: 2.0.1 + resolution: "color-convert@npm:2.0.1" + dependencies: + color-name: "npm:~1.1.4" + checksum: 10c0/37e1150172f2e311fe1b2df62c6293a342ee7380da7b9cfdba67ea539909afbd74da27033208d01d6d5cfc65ee7868a22e18d7e7648e004425441c0f8a15a7d7 + languageName: node + linkType: hard + +"color-name@npm:~1.1.4": + version: 1.1.4 + resolution: "color-name@npm:1.1.4" + checksum: 10c0/a1a3f914156960902f46f7f56bc62effc6c94e84b2cae157a526b1c1f74b677a47ec602bf68a61abfa2b42d15b7c5651c6dbe72a43af720bc588dff885b10f95 + languageName: node + linkType: hard + +"colorette@npm:^2.0.20": + version: 2.0.20 + resolution: "colorette@npm:2.0.20" + checksum: 10c0/e94116ff33b0ff56f3b83b9ace895e5bf87c2a7a47b3401b8c3f3226e050d5ef76cf4072fb3325f9dc24d1698f9b730baf4e05eeaf861d74a1883073f4c98a40 + languageName: node + linkType: hard + +"commander@npm:^14.0.0": + version: 14.0.0 + resolution: "commander@npm:14.0.0" + checksum: 10c0/73c4babfa558077868d84522b11ef56834165d472b9e86a634cd4c3ae7fc72d59af6377d8878e06bd570fe8f3161eced3cbe383c38f7093272bb65bd242b595b + languageName: node + linkType: hard + +"commondir@npm:^1.0.1": + version: 1.0.1 + resolution: "commondir@npm:1.0.1" + checksum: 10c0/33a124960e471c25ee19280c9ce31ccc19574b566dc514fe4f4ca4c34fa8b0b57cf437671f5de380e11353ea9426213fca17687dd2ef03134fea2dbc53809fd6 + languageName: node + linkType: hard + +"concat-map@npm:0.0.1": + version: 0.0.1 + resolution: "concat-map@npm:0.0.1" + checksum: 10c0/c996b1cfdf95b6c90fee4dae37e332c8b6eb7d106430c17d538034c0ad9a1630cb194d2ab37293b1bdd4d779494beee7786d586a50bd9376fd6f7bcc2bd4c98f + languageName: node + linkType: hard + +"cross-spawn@npm:^7.0.6": + version: 7.0.6 + resolution: "cross-spawn@npm:7.0.6" + dependencies: + path-key: "npm:^3.1.0" + shebang-command: "npm:^2.0.0" + which: "npm:^2.0.1" + checksum: 10c0/053ea8b2135caff68a9e81470e845613e374e7309a47731e81639de3eaeb90c3d01af0e0b44d2ab9d50b43467223b88567dfeb3262db942dc063b9976718ffc1 + languageName: node + linkType: hard + +"debug@npm:4, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.4, debug@npm:^4.3.5": + version: 4.4.0 + resolution: "debug@npm:4.4.0" + dependencies: + ms: "npm:^2.1.3" + peerDependenciesMeta: + supports-color: + optional: true + checksum: 10c0/db94f1a182bf886f57b4755f85b3a74c39b5114b9377b7ab375dc2cfa3454f09490cc6c30f829df3fc8042bc8b8995f6567ce5cd96f3bc3688bd24027197d9de + languageName: node + linkType: hard + +"debug@npm:^4.4.1": + version: 4.4.1 + resolution: "debug@npm:4.4.1" + dependencies: + ms: "npm:^2.1.3" + peerDependenciesMeta: + supports-color: + optional: true + checksum: 10c0/d2b44bc1afd912b49bb7ebb0d50a860dc93a4dd7d946e8de94abc957bb63726b7dd5aa48c18c2386c379ec024c46692e15ed3ed97d481729f929201e671fcd55 + languageName: node + linkType: hard + +"decamelize@npm:^4.0.0": + version: 4.0.0 + resolution: "decamelize@npm:4.0.0" + checksum: 10c0/e06da03fc05333e8cd2778c1487da67ffbea5b84e03ca80449519b8fa61f888714bbc6f459ea963d5641b4aa98832130eb5cd193d90ae9f0a27eee14be8e278d + languageName: node + linkType: hard + +"deep-is@npm:^0.1.3": + version: 0.1.4 + resolution: "deep-is@npm:0.1.4" + checksum: 10c0/7f0ee496e0dff14a573dc6127f14c95061b448b87b995fc96c017ce0a1e66af1675e73f1d6064407975bc4ea6ab679497a29fff7b5b9c4e99cb10797c1ad0b4c + languageName: node + linkType: hard + +"deepmerge@npm:^4.2.2": + version: 4.3.1 + resolution: "deepmerge@npm:4.3.1" + checksum: 10c0/e53481aaf1aa2c4082b5342be6b6d8ad9dfe387bc92ce197a66dea08bd4265904a087e75e464f14d1347cf2ac8afe1e4c16b266e0561cc5df29382d3c5f80044 + languageName: node + linkType: hard + +"denque@npm:^2.1.0": + version: 2.1.0 + resolution: "denque@npm:2.1.0" + checksum: 10c0/f9ef81aa0af9c6c614a727cb3bd13c5d7db2af1abf9e6352045b86e85873e629690f6222f4edd49d10e4ccf8f078bbeec0794fafaf61b659c0589d0c511ec363 + languageName: node + linkType: hard + +"diff@npm:^7.0.0": + version: 7.0.0 + resolution: "diff@npm:7.0.0" + checksum: 10c0/251fd15f85ffdf814cfc35a728d526b8d2ad3de338dcbd011ac6e57c461417090766b28995f8ff733135b5fbc3699c392db1d5e27711ac4e00244768cd1d577b + languageName: node + linkType: hard + +"double-ended-queue@npm:^2.1.0-0": + version: 2.1.0-0 + resolution: "double-ended-queue@npm:2.1.0-0" + checksum: 10c0/9a412a556b3646e1a4667eaf201b1b8c210926a5ca943c819df7dfa7622cb1636c7ec4de388b48248d44cca799071ea05ca2bd76048d3f7671324ce84e58722f + languageName: node + linkType: hard + +"eastasianwidth@npm:^0.2.0": + version: 0.2.0 + resolution: "eastasianwidth@npm:0.2.0" + checksum: 10c0/26f364ebcdb6395f95124fda411f63137a4bfb5d3a06453f7f23dfe52502905bd84e0488172e0f9ec295fdc45f05c23d5d91baf16bd26f0fe9acd777a188dc39 + languageName: node + linkType: hard + +"emoji-regex@npm:^10.3.0": + version: 10.4.0 + resolution: "emoji-regex@npm:10.4.0" + checksum: 10c0/a3fcedfc58bfcce21a05a5f36a529d81e88d602100145fcca3dc6f795e3c8acc4fc18fe773fbf9b6d6e9371205edb3afa2668ec3473fa2aa7fd47d2a9d46482d + languageName: node + linkType: hard + +"emoji-regex@npm:^8.0.0": + version: 8.0.0 + resolution: "emoji-regex@npm:8.0.0" + checksum: 10c0/b6053ad39951c4cf338f9092d7bfba448cdfd46fe6a2a034700b149ac9ffbc137e361cbd3c442297f86bed2e5f7576c1b54cc0a6bf8ef5106cc62f496af35010 + languageName: node + linkType: hard + +"emoji-regex@npm:^9.2.2": + version: 9.2.2 + resolution: "emoji-regex@npm:9.2.2" + checksum: 10c0/af014e759a72064cf66e6e694a7fc6b0ed3d8db680427b021a89727689671cefe9d04151b2cad51dbaf85d5ba790d061cd167f1cf32eb7b281f6368b3c181639 + languageName: node + linkType: hard + +"encoding@npm:^0.1.13": + version: 0.1.13 + resolution: "encoding@npm:0.1.13" + dependencies: + iconv-lite: "npm:^0.6.2" + checksum: 10c0/36d938712ff00fe1f4bac88b43bcffb5930c1efa57bbcdca9d67e1d9d6c57cfb1200fb01efe0f3109b2ce99b231f90779532814a81370a1bd3274a0f58585039 + languageName: node + linkType: hard + +"env-paths@npm:^2.2.0": + version: 2.2.1 + resolution: "env-paths@npm:2.2.1" + checksum: 10c0/285325677bf00e30845e330eec32894f5105529db97496ee3f598478e50f008c5352a41a30e5e72ec9de8a542b5a570b85699cd63bd2bc646dbcb9f311d83bc4 + languageName: node + linkType: hard + +"environment@npm:^1.0.0": + version: 1.1.0 + resolution: "environment@npm:1.1.0" + checksum: 10c0/fb26434b0b581ab397039e51ff3c92b34924a98b2039dcb47e41b7bca577b9dbf134a8eadb364415c74464b682e2d3afe1a4c0eb9873dc44ea814c5d3103331d + languageName: node + linkType: hard + +"err-code@npm:^2.0.2": + version: 2.0.3 + resolution: "err-code@npm:2.0.3" + checksum: 10c0/b642f7b4dd4a376e954947550a3065a9ece6733ab8e51ad80db727aaae0817c2e99b02a97a3d6cecc648a97848305e728289cf312d09af395403a90c9d4d8a66 + languageName: node + linkType: hard + +"es6-error@npm:^4.0.2": + version: 4.1.1 + resolution: "es6-error@npm:4.1.1" + checksum: 10c0/357663fb1e845c047d548c3d30f86e005db71e122678f4184ced0693f634688c3f3ef2d7de7d4af732f734de01f528b05954e270f06aa7d133679fb9fe6600ef + languageName: node + linkType: hard + +"escalade@npm:^3.1.1": + version: 3.2.0 + resolution: "escalade@npm:3.2.0" + checksum: 10c0/ced4dd3a78e15897ed3be74e635110bbf3b08877b0a41be50dcb325ee0e0b5f65fc2d50e9845194d7c4633f327e2e1c6cce00a71b617c5673df0374201d67f65 + languageName: node + linkType: hard + +"escape-string-regexp@npm:^4.0.0": + version: 4.0.0 + resolution: "escape-string-regexp@npm:4.0.0" + checksum: 10c0/9497d4dd307d845bd7f75180d8188bb17ea8c151c1edbf6b6717c100e104d629dc2dfb687686181b0f4b7d732c7dfdc4d5e7a8ff72de1b0ca283a75bbb3a9cd9 + languageName: node + linkType: hard + +"eslint-config-prettier@npm:^10.1.8": + version: 10.1.8 + resolution: "eslint-config-prettier@npm:10.1.8" + peerDependencies: + eslint: ">=7.0.0" + bin: + eslint-config-prettier: bin/cli.js + checksum: 10c0/e1bcfadc9eccd526c240056b1e59c5cd26544fe59feb85f38f4f1f116caed96aea0b3b87868e68b3099e55caaac3f2e5b9f58110f85db893e83a332751192682 + languageName: node + linkType: hard + +"eslint-scope@npm:^8.4.0": + version: 8.4.0 + resolution: "eslint-scope@npm:8.4.0" + dependencies: + esrecurse: "npm:^4.3.0" + estraverse: "npm:^5.2.0" + checksum: 10c0/407f6c600204d0f3705bd557f81bd0189e69cd7996f408f8971ab5779c0af733d1af2f1412066b40ee1588b085874fc37a2333986c6521669cdbdd36ca5058e0 + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^3.4.3": + version: 3.4.3 + resolution: "eslint-visitor-keys@npm:3.4.3" + checksum: 10c0/92708e882c0a5ffd88c23c0b404ac1628cf20104a108c745f240a13c332a11aac54f49a22d5762efbffc18ecbc9a580d1b7ad034bf5f3cc3307e5cbff2ec9820 + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^4.2.0": + version: 4.2.0 + resolution: "eslint-visitor-keys@npm:4.2.0" + checksum: 10c0/2ed81c663b147ca6f578312919483eb040295bbab759e5a371953456c636c5b49a559883e2677112453728d66293c0a4c90ab11cab3428cf02a0236d2e738269 + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^4.2.1": + version: 4.2.1 + resolution: "eslint-visitor-keys@npm:4.2.1" + checksum: 10c0/fcd43999199d6740db26c58dbe0c2594623e31ca307e616ac05153c9272f12f1364f5a0b1917a8e962268fdecc6f3622c1c2908b4fcc2e047a106fe6de69dc43 + languageName: node + linkType: hard + +"eslint@npm:^9.34.0": + version: 9.34.0 + resolution: "eslint@npm:9.34.0" + dependencies: + "@eslint-community/eslint-utils": "npm:^4.2.0" + "@eslint-community/regexpp": "npm:^4.12.1" + "@eslint/config-array": "npm:^0.21.0" + "@eslint/config-helpers": "npm:^0.3.1" + "@eslint/core": "npm:^0.15.2" + "@eslint/eslintrc": "npm:^3.3.1" + "@eslint/js": "npm:9.34.0" + "@eslint/plugin-kit": "npm:^0.3.5" + "@humanfs/node": "npm:^0.16.6" + "@humanwhocodes/module-importer": "npm:^1.0.1" + "@humanwhocodes/retry": "npm:^0.4.2" + "@types/estree": "npm:^1.0.6" + "@types/json-schema": "npm:^7.0.15" + ajv: "npm:^6.12.4" + chalk: "npm:^4.0.0" + cross-spawn: "npm:^7.0.6" + debug: "npm:^4.3.2" + escape-string-regexp: "npm:^4.0.0" + eslint-scope: "npm:^8.4.0" + eslint-visitor-keys: "npm:^4.2.1" + espree: "npm:^10.4.0" + esquery: "npm:^1.5.0" + esutils: "npm:^2.0.2" + fast-deep-equal: "npm:^3.1.3" + file-entry-cache: "npm:^8.0.0" + find-up: "npm:^5.0.0" + glob-parent: "npm:^6.0.2" + ignore: "npm:^5.2.0" + imurmurhash: "npm:^0.1.4" + is-glob: "npm:^4.0.0" + json-stable-stringify-without-jsonify: "npm:^1.0.1" + lodash.merge: "npm:^4.6.2" + minimatch: "npm:^3.1.2" + natural-compare: "npm:^1.4.0" + optionator: "npm:^0.9.3" + peerDependencies: + jiti: "*" + peerDependenciesMeta: + jiti: + optional: true + bin: + eslint: bin/eslint.js + checksum: 10c0/ba3e54fa0c8ed23d062f91519afaae77fed922a6c4d76130b6cd32154bcb406aaea4b3c5ed88e0be40828c1d5b6921592f3947dbdc5e2043de6bd7aa341fe5ea + languageName: node + linkType: hard + +"espree@npm:^10.0.1": + version: 10.3.0 + resolution: "espree@npm:10.3.0" + dependencies: + acorn: "npm:^8.14.0" + acorn-jsx: "npm:^5.3.2" + eslint-visitor-keys: "npm:^4.2.0" + checksum: 10c0/272beeaca70d0a1a047d61baff64db04664a33d7cfb5d144f84bc8a5c6194c6c8ebe9cc594093ca53add88baa23e59b01e69e8a0160ab32eac570482e165c462 + languageName: node + linkType: hard + +"espree@npm:^10.4.0": + version: 10.4.0 + resolution: "espree@npm:10.4.0" + dependencies: + acorn: "npm:^8.15.0" + acorn-jsx: "npm:^5.3.2" + eslint-visitor-keys: "npm:^4.2.1" + checksum: 10c0/c63fe06131c26c8157b4083313cb02a9a54720a08e21543300e55288c40e06c3fc284bdecf108d3a1372c5934a0a88644c98714f38b6ae8ed272b40d9ea08d6b + languageName: node + linkType: hard + +"esquery@npm:^1.5.0": + version: 1.6.0 + resolution: "esquery@npm:1.6.0" + dependencies: + estraverse: "npm:^5.1.0" + checksum: 10c0/cb9065ec605f9da7a76ca6dadb0619dfb611e37a81e318732977d90fab50a256b95fee2d925fba7c2f3f0523aa16f91587246693bc09bc34d5a59575fe6e93d2 + languageName: node + linkType: hard + +"esrecurse@npm:^4.3.0": + version: 4.3.0 + resolution: "esrecurse@npm:4.3.0" + dependencies: + estraverse: "npm:^5.2.0" + checksum: 10c0/81a37116d1408ded88ada45b9fb16dbd26fba3aadc369ce50fcaf82a0bac12772ebd7b24cd7b91fc66786bf2c1ac7b5f196bc990a473efff972f5cb338877cf5 + languageName: node + linkType: hard + +"estraverse@npm:^5.1.0, estraverse@npm:^5.2.0": + version: 5.3.0 + resolution: "estraverse@npm:5.3.0" + checksum: 10c0/1ff9447b96263dec95d6d67431c5e0771eb9776427421260a3e2f0fdd5d6bd4f8e37a7338f5ad2880c9f143450c9b1e4fc2069060724570a49cf9cf0312bd107 + languageName: node + linkType: hard + +"estree-walker@npm:^2.0.2": + version: 2.0.2 + resolution: "estree-walker@npm:2.0.2" + checksum: 10c0/53a6c54e2019b8c914dc395890153ffdc2322781acf4bd7d1a32d7aedc1710807bdcd866ac133903d5629ec601fbb50abe8c2e5553c7f5a0afdd9b6af6c945af + languageName: node + linkType: hard + +"esutils@npm:^2.0.2": + version: 2.0.3 + resolution: "esutils@npm:2.0.3" + checksum: 10c0/9a2fe69a41bfdade834ba7c42de4723c97ec776e40656919c62cbd13607c45e127a003f05f724a1ea55e5029a4cf2de444b13009f2af71271e42d93a637137c7 + languageName: node + linkType: hard + +"eventemitter3@npm:^5.0.1": + version: 5.0.1 + resolution: "eventemitter3@npm:5.0.1" + checksum: 10c0/4ba5c00c506e6c786b4d6262cfbce90ddc14c10d4667e5c83ae993c9de88aa856033994dd2b35b83e8dc1170e224e66a319fa80adc4c32adcd2379bbc75da814 + languageName: node + linkType: hard + +"exponential-backoff@npm:^3.1.1": + version: 3.1.2 + resolution: "exponential-backoff@npm:3.1.2" + checksum: 10c0/d9d3e1eafa21b78464297df91f1776f7fbaa3d5e3f7f0995648ca5b89c069d17055033817348d9f4a43d1c20b0eab84f75af6991751e839df53e4dfd6f22e844 + languageName: node + linkType: hard + +"fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": + version: 3.1.3 + resolution: "fast-deep-equal@npm:3.1.3" + checksum: 10c0/40dedc862eb8992c54579c66d914635afbec43350afbbe991235fdcb4e3a8d5af1b23ae7e79bef7d4882d0ecee06c3197488026998fb19f72dc95acff1d1b1d0 + languageName: node + linkType: hard + +"fast-json-stable-stringify@npm:^2.0.0": + version: 2.1.0 + resolution: "fast-json-stable-stringify@npm:2.1.0" + checksum: 10c0/7f081eb0b8a64e0057b3bb03f974b3ef00135fbf36c1c710895cd9300f13c94ba809bb3a81cf4e1b03f6e5285610a61abbd7602d0652de423144dfee5a389c9b + languageName: node + linkType: hard + +"fast-levenshtein@npm:^2.0.6": + version: 2.0.6 + resolution: "fast-levenshtein@npm:2.0.6" + checksum: 10c0/111972b37338bcb88f7d9e2c5907862c280ebf4234433b95bc611e518d192ccb2d38119c4ac86e26b668d75f7f3894f4ff5c4982899afced7ca78633b08287c4 + languageName: node + linkType: hard + +"fdir@npm:^6.2.0, fdir@npm:^6.4.4": + version: 6.4.4 + resolution: "fdir@npm:6.4.4" + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + checksum: 10c0/6ccc33be16945ee7bc841e1b4178c0b4cf18d3804894cb482aa514651c962a162f96da7ffc6ebfaf0df311689fb70091b04dd6caffe28d56b9ebdc0e7ccadfdd + languageName: node + linkType: hard + +"file-entry-cache@npm:^8.0.0": + version: 8.0.0 + resolution: "file-entry-cache@npm:8.0.0" + dependencies: + flat-cache: "npm:^4.0.0" + checksum: 10c0/9e2b5938b1cd9b6d7e3612bdc533afd4ac17b2fc646569e9a8abbf2eb48e5eb8e316bc38815a3ef6a1b456f4107f0d0f055a614ca613e75db6bf9ff4d72c1638 + languageName: node + linkType: hard + +"file-uri-to-path@npm:1.0.0": + version: 1.0.0 + resolution: "file-uri-to-path@npm:1.0.0" + checksum: 10c0/3b545e3a341d322d368e880e1c204ef55f1d45cdea65f7efc6c6ce9e0c4d22d802d5629320eb779d006fe59624ac17b0e848d83cc5af7cd101f206cb704f5519 + languageName: node + linkType: hard + +"fill-range@npm:^7.1.1": + version: 7.1.1 + resolution: "fill-range@npm:7.1.1" + dependencies: + to-regex-range: "npm:^5.0.1" + checksum: 10c0/b75b691bbe065472f38824f694c2f7449d7f5004aa950426a2c28f0306c60db9b880c0b0e4ed819997ffb882d1da02cfcfc819bddc94d71627f5269682edf018 + languageName: node + linkType: hard + +"find-up@npm:^5.0.0": + version: 5.0.0 + resolution: "find-up@npm:5.0.0" + dependencies: + locate-path: "npm:^6.0.0" + path-exists: "npm:^4.0.0" + checksum: 10c0/062c5a83a9c02f53cdd6d175a37ecf8f87ea5bbff1fdfb828f04bfa021441bc7583e8ebc0872a4c1baab96221fb8a8a275a19809fb93fbc40bd69ec35634069a + languageName: node + linkType: hard + +"flat-cache@npm:^4.0.0": + version: 4.0.1 + resolution: "flat-cache@npm:4.0.1" + dependencies: + flatted: "npm:^3.2.9" + keyv: "npm:^4.5.4" + checksum: 10c0/2c59d93e9faa2523e4fda6b4ada749bed432cfa28c8e251f33b25795e426a1c6dbada777afb1f74fcfff33934fdbdea921ee738fcc33e71adc9d6eca984a1cfc + languageName: node + linkType: hard + +"flat@npm:^5.0.2": + version: 5.0.2 + resolution: "flat@npm:5.0.2" + bin: + flat: cli.js + checksum: 10c0/f178b13482f0cd80c7fede05f4d10585b1f2fdebf26e12edc138e32d3150c6ea6482b7f12813a1091143bad52bb6d3596bca51a162257a21163c0ff438baa5fe + languageName: node + linkType: hard + +"flatted@npm:^3.2.9": + version: 3.3.2 + resolution: "flatted@npm:3.3.2" + checksum: 10c0/24cc735e74d593b6c767fe04f2ef369abe15b62f6906158079b9874bdb3ee5ae7110bb75042e70cd3f99d409d766f357caf78d5ecee9780206f5fdc5edbad334 + languageName: node + linkType: hard + +"foreground-child@npm:^3.1.0": + version: 3.3.1 + resolution: "foreground-child@npm:3.3.1" + dependencies: + cross-spawn: "npm:^7.0.6" + signal-exit: "npm:^4.0.1" + checksum: 10c0/8986e4af2430896e65bc2788d6679067294d6aee9545daefc84923a0a4b399ad9c7a3ea7bd8c0b2b80fdf4a92de4c69df3f628233ff3224260e9c1541a9e9ed3 + languageName: node + linkType: hard + +"fs-minipass@npm:^3.0.0": + version: 3.0.3 + resolution: "fs-minipass@npm:3.0.3" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/63e80da2ff9b621e2cb1596abcb9207f1cf82b968b116ccd7b959e3323144cce7fb141462200971c38bbf2ecca51695069db45265705bed09a7cd93ae5b89f94 + languageName: node + linkType: hard + +"fsevents@npm:~2.3.2": + version: 2.3.3 + resolution: "fsevents@npm:2.3.3" + dependencies: + node-gyp: "npm:latest" + checksum: 10c0/a1f0c44595123ed717febbc478aa952e47adfc28e2092be66b8ab1635147254ca6cfe1df792a8997f22716d4cbafc73309899ff7bfac2ac3ad8cf2e4ecc3ec60 + conditions: os=darwin + languageName: node + linkType: hard + +"fsevents@patch:fsevents@npm%3A~2.3.2#optional!builtin": + version: 2.3.3 + resolution: "fsevents@patch:fsevents@npm%3A2.3.3#optional!builtin::version=2.3.3&hash=df0bf1" + dependencies: + node-gyp: "npm:latest" + conditions: os=darwin + languageName: node + linkType: hard + +"function-bind@npm:^1.1.2": + version: 1.1.2 + resolution: "function-bind@npm:1.1.2" + checksum: 10c0/d8680ee1e5fcd4c197e4ac33b2b4dce03c71f4d91717292785703db200f5c21f977c568d28061226f9b5900cbcd2c84463646134fd5337e7925e0942bc3f46d5 + languageName: node + linkType: hard + +"get-caller-file@npm:^2.0.5": + version: 2.0.5 + resolution: "get-caller-file@npm:2.0.5" + checksum: 10c0/c6c7b60271931fa752aeb92f2b47e355eac1af3a2673f47c9589e8f8a41adc74d45551c1bc57b5e66a80609f10ffb72b6f575e4370d61cc3f7f3aaff01757cde + languageName: node + linkType: hard + +"get-east-asian-width@npm:^1.0.0": + version: 1.3.0 + resolution: "get-east-asian-width@npm:1.3.0" + checksum: 10c0/1a049ba697e0f9a4d5514c4623781c5246982bdb61082da6b5ae6c33d838e52ce6726407df285cdbb27ec1908b333cf2820989bd3e986e37bb20979437fdf34b + languageName: node + linkType: hard + +"glob-parent@npm:^6.0.2": + version: 6.0.2 + resolution: "glob-parent@npm:6.0.2" + dependencies: + is-glob: "npm:^4.0.3" + checksum: 10c0/317034d88654730230b3f43bb7ad4f7c90257a426e872ea0bf157473ac61c99bf5d205fad8f0185f989be8d2fa6d3c7dce1645d99d545b6ea9089c39f838e7f8 + languageName: node + linkType: hard + +"glob@npm:^10.2.2, glob@npm:^10.4.5": + version: 10.4.5 + resolution: "glob@npm:10.4.5" + dependencies: + foreground-child: "npm:^3.1.0" + jackspeak: "npm:^3.1.2" + minimatch: "npm:^9.0.4" + minipass: "npm:^7.1.2" + package-json-from-dist: "npm:^1.0.0" + path-scurry: "npm:^1.11.1" + bin: + glob: dist/esm/bin.mjs + checksum: 10c0/19a9759ea77b8e3ca0a43c2f07ecddc2ad46216b786bb8f993c445aee80d345925a21e5280c7b7c6c59e860a0154b84e4b2b60321fea92cd3c56b4a7489f160e + languageName: node + linkType: hard + +"globals@npm:^14.0.0": + version: 14.0.0 + resolution: "globals@npm:14.0.0" + checksum: 10c0/b96ff42620c9231ad468d4c58ff42afee7777ee1c963013ff8aabe095a451d0ceeb8dcd8ef4cbd64d2538cef45f787a78ba3a9574f4a634438963e334471302d + languageName: node + linkType: hard + +"globals@npm:^16.3.0": + version: 16.3.0 + resolution: "globals@npm:16.3.0" + checksum: 10c0/c62dc20357d1c0bf2be4545d6c4141265d1a229bf1c3294955efb5b5ef611145391895e3f2729f8603809e81b30b516c33e6c2597573844449978606aad6eb38 + languageName: node + linkType: hard + +"graceful-fs@npm:^4.2.6": + version: 4.2.11 + resolution: "graceful-fs@npm:4.2.11" + checksum: 10c0/386d011a553e02bc594ac2ca0bd6d9e4c22d7fa8cfbfc448a6d148c59ea881b092db9dbe3547ae4b88e55f1b01f7c4a2ecc53b310c042793e63aa44cf6c257f2 + languageName: node + linkType: hard + +"has-flag@npm:^4.0.0": + version: 4.0.0 + resolution: "has-flag@npm:4.0.0" + checksum: 10c0/2e789c61b7888d66993e14e8331449e525ef42aac53c627cc53d1c3334e768bcb6abdc4f5f0de1478a25beec6f0bd62c7549058b7ac53e924040d4f301f02fd1 + languageName: node + linkType: hard + +"hasown@npm:^2.0.2": + version: 2.0.2 + resolution: "hasown@npm:2.0.2" + dependencies: + function-bind: "npm:^1.1.2" + checksum: 10c0/3769d434703b8ac66b209a4cca0737519925bbdb61dd887f93a16372b14694c63ff4e797686d87c90f08168e81082248b9b028bad60d4da9e0d1148766f56eb9 + languageName: node + linkType: hard + +"he@npm:^1.2.0": + version: 1.2.0 + resolution: "he@npm:1.2.0" + bin: + he: bin/he + checksum: 10c0/a27d478befe3c8192f006cdd0639a66798979dfa6e2125c6ac582a19a5ebfec62ad83e8382e6036170d873f46e4536a7e795bf8b95bf7c247f4cc0825ccc8c17 + languageName: node + linkType: hard + +"http-cache-semantics@npm:^4.1.1": + version: 4.1.1 + resolution: "http-cache-semantics@npm:4.1.1" + checksum: 10c0/ce1319b8a382eb3cbb4a37c19f6bfe14e5bb5be3d09079e885e8c513ab2d3cd9214902f8a31c9dc4e37022633ceabfc2d697405deeaf1b8f3552bb4ed996fdfc + languageName: node + linkType: hard + +"http-proxy-agent@npm:^7.0.0": + version: 7.0.2 + resolution: "http-proxy-agent@npm:7.0.2" + dependencies: + agent-base: "npm:^7.1.0" + debug: "npm:^4.3.4" + checksum: 10c0/4207b06a4580fb85dd6dff521f0abf6db517489e70863dca1a0291daa7f2d3d2d6015a57bd702af068ea5cf9f1f6ff72314f5f5b4228d299c0904135d2aef921 + languageName: node + linkType: hard + +"https-proxy-agent@npm:^7.0.1": + version: 7.0.6 + resolution: "https-proxy-agent@npm:7.0.6" + dependencies: + agent-base: "npm:^7.1.2" + debug: "npm:4" + checksum: 10c0/f729219bc735edb621fa30e6e84e60ee5d00802b8247aac0d7b79b0bd6d4b3294737a337b93b86a0bd9e68099d031858a39260c976dc14cdbba238ba1f8779ac + languageName: node + linkType: hard + +"husky@npm:^9.1.7": + version: 9.1.7 + resolution: "husky@npm:9.1.7" + bin: + husky: bin.js + checksum: 10c0/35bb110a71086c48906aa7cd3ed4913fb913823715359d65e32e0b964cb1e255593b0ae8014a5005c66a68e6fa66c38dcfa8056dbbdfb8b0187c0ffe7ee3a58f + languageName: node + linkType: hard + +"iconv-lite@npm:^0.6.2": + version: 0.6.3 + resolution: "iconv-lite@npm:0.6.3" + dependencies: + safer-buffer: "npm:>= 2.1.2 < 3.0.0" + checksum: 10c0/98102bc66b33fcf5ac044099d1257ba0b7ad5e3ccd3221f34dd508ab4070edff183276221684e1e0555b145fce0850c9f7d2b60a9fcac50fbb4ea0d6e845a3b1 + languageName: node + linkType: hard + +"ignore@npm:^5.2.0": + version: 5.3.2 + resolution: "ignore@npm:5.3.2" + checksum: 10c0/f9f652c957983634ded1e7f02da3b559a0d4cc210fca3792cb67f1b153623c9c42efdc1c4121af171e295444459fc4a9201101fb041b1104a3c000bccb188337 + languageName: node + linkType: hard + +"import-fresh@npm:^3.2.1": + version: 3.3.0 + resolution: "import-fresh@npm:3.3.0" + dependencies: + parent-module: "npm:^1.0.0" + resolve-from: "npm:^4.0.0" + checksum: 10c0/7f882953aa6b740d1f0e384d0547158bc86efbf2eea0f1483b8900a6f65c5a5123c2cf09b0d542cc419d0b98a759ecaeb394237e97ea427f2da221dc3cd80cc3 + languageName: node + linkType: hard + +"imurmurhash@npm:^0.1.4": + version: 0.1.4 + resolution: "imurmurhash@npm:0.1.4" + checksum: 10c0/8b51313850dd33605c6c9d3fd9638b714f4c4c40250cff658209f30d40da60f78992fb2df5dabee4acf589a6a82bbc79ad5486550754bd9ec4e3fc0d4a57d6a6 + languageName: node + linkType: hard + +"ioredis@npm:^5.7.0": + version: 5.7.0 + resolution: "ioredis@npm:5.7.0" + dependencies: + "@ioredis/commands": "npm:^1.3.0" + cluster-key-slot: "npm:^1.1.0" + debug: "npm:^4.3.4" + denque: "npm:^2.1.0" + lodash.defaults: "npm:^4.2.0" + lodash.isarguments: "npm:^3.1.0" + redis-errors: "npm:^1.2.0" + redis-parser: "npm:^3.0.0" + standard-as-callback: "npm:^2.1.0" + checksum: 10c0/c63c521a953bfaf29f8c8871b122af38e439328336fa238f83bfbb066556f64daf69ed7a4ec01fc7b9ee1f0862059dd188b8c684150125d362d36642399b30ee + languageName: node + linkType: hard + +"ip-address@npm:^9.0.5": + version: 9.0.5 + resolution: "ip-address@npm:9.0.5" + dependencies: + jsbn: "npm:1.1.0" + sprintf-js: "npm:^1.1.3" + checksum: 10c0/331cd07fafcb3b24100613e4b53e1a2b4feab11e671e655d46dc09ee233da5011284d09ca40c4ecbdfe1d0004f462958675c224a804259f2f78d2465a87824bc + languageName: node + linkType: hard + +"is-core-module@npm:^2.16.0": + version: 2.16.0 + resolution: "is-core-module@npm:2.16.0" + dependencies: + hasown: "npm:^2.0.2" + checksum: 10c0/57e3b4bf3503a5ace3e61ef030a2eefa03d27827647b22968456e3e4befffed7c7aa849eea2e029f4f74a119a2d53cc391d5bad59c9352ecc9b79be3fd2acf79 + languageName: node + linkType: hard + +"is-extglob@npm:^2.1.1": + version: 2.1.1 + resolution: "is-extglob@npm:2.1.1" + checksum: 10c0/5487da35691fbc339700bbb2730430b07777a3c21b9ebaecb3072512dfd7b4ba78ac2381a87e8d78d20ea08affb3f1971b4af629173a6bf435ff8a4c47747912 + languageName: node + linkType: hard + +"is-fullwidth-code-point@npm:^3.0.0": + version: 3.0.0 + resolution: "is-fullwidth-code-point@npm:3.0.0" + checksum: 10c0/bb11d825e049f38e04c06373a8d72782eee0205bda9d908cc550ccb3c59b99d750ff9537982e01733c1c94a58e35400661f57042158ff5e8f3e90cf936daf0fc + languageName: node + linkType: hard + +"is-fullwidth-code-point@npm:^4.0.0": + version: 4.0.0 + resolution: "is-fullwidth-code-point@npm:4.0.0" + checksum: 10c0/df2a717e813567db0f659c306d61f2f804d480752526886954a2a3e2246c7745fd07a52b5fecf2b68caf0a6c79dcdace6166fdf29cc76ed9975cc334f0a018b8 + languageName: node + linkType: hard + +"is-fullwidth-code-point@npm:^5.0.0": + version: 5.0.0 + resolution: "is-fullwidth-code-point@npm:5.0.0" + dependencies: + get-east-asian-width: "npm:^1.0.0" + checksum: 10c0/cd591b27d43d76b05fa65ed03eddce57a16e1eca0b7797ff7255de97019bcaf0219acfc0c4f7af13319e13541f2a53c0ace476f442b13267b9a6a7568f2b65c8 + languageName: node + linkType: hard + +"is-glob@npm:^4.0.0, is-glob@npm:^4.0.3": + version: 4.0.3 + resolution: "is-glob@npm:4.0.3" + dependencies: + is-extglob: "npm:^2.1.1" + checksum: 10c0/17fb4014e22be3bbecea9b2e3a76e9e34ff645466be702f1693e8f1ee1adac84710d0be0bd9f967d6354036fd51ab7c2741d954d6e91dae6bb69714de92c197a + languageName: node + linkType: hard + +"is-module@npm:^1.0.0": + version: 1.0.0 + resolution: "is-module@npm:1.0.0" + checksum: 10c0/795a3914bcae7c26a1c23a1e5574c42eac13429625045737bf3e324ce865c0601d61aee7a5afbca1bee8cb300c7d9647e7dc98860c9bdbc3b7fdc51d8ac0bffc + languageName: node + linkType: hard + +"is-number@npm:^7.0.0": + version: 7.0.0 + resolution: "is-number@npm:7.0.0" + checksum: 10c0/b4686d0d3053146095ccd45346461bc8e53b80aeb7671cc52a4de02dbbf7dc0d1d2a986e2fe4ae206984b4d34ef37e8b795ebc4f4295c978373e6575e295d811 + languageName: node + linkType: hard + +"is-plain-obj@npm:^2.1.0": + version: 2.1.0 + resolution: "is-plain-obj@npm:2.1.0" + checksum: 10c0/e5c9814cdaa627a9ad0a0964ded0e0491bfd9ace405c49a5d63c88b30a162f1512c069d5b80997893c4d0181eadc3fed02b4ab4b81059aba5620bfcdfdeb9c53 + languageName: node + linkType: hard + +"is-reference@npm:1.2.1": + version: 1.2.1 + resolution: "is-reference@npm:1.2.1" + dependencies: + "@types/estree": "npm:*" + checksum: 10c0/7dc819fc8de7790264a0a5d531164f9f5b9ef5aa1cd05f35322d14db39c8a2ec78fd5d4bf57f9789f3ddd2b3abeea7728432b759636157a42db12a9e8c3b549b + languageName: node + linkType: hard + +"is-unicode-supported@npm:^0.1.0": + version: 0.1.0 + resolution: "is-unicode-supported@npm:0.1.0" + checksum: 10c0/00cbe3455c3756be68d2542c416cab888aebd5012781d6819749fefb15162ff23e38501fe681b3d751c73e8ff561ac09a5293eba6f58fdf0178462ce6dcb3453 + languageName: node + linkType: hard + +"isexe@npm:^2.0.0": + version: 2.0.0 + resolution: "isexe@npm:2.0.0" + checksum: 10c0/228cfa503fadc2c31596ab06ed6aa82c9976eec2bfd83397e7eaf06d0ccf42cd1dfd6743bf9aeb01aebd4156d009994c5f76ea898d2832c1fe342da923ca457d + languageName: node + linkType: hard + +"isexe@npm:^3.1.1": + version: 3.1.1 + resolution: "isexe@npm:3.1.1" + checksum: 10c0/9ec257654093443eb0a528a9c8cbba9c0ca7616ccb40abd6dde7202734d96bb86e4ac0d764f0f8cd965856aacbff2f4ce23e730dc19dfb41e3b0d865ca6fdcc7 + languageName: node + linkType: hard + +"jackspeak@npm:^3.1.2": + version: 3.4.3 + resolution: "jackspeak@npm:3.4.3" + dependencies: + "@isaacs/cliui": "npm:^8.0.2" + "@pkgjs/parseargs": "npm:^0.11.0" + dependenciesMeta: + "@pkgjs/parseargs": + optional: true + checksum: 10c0/6acc10d139eaefdbe04d2f679e6191b3abf073f111edf10b1de5302c97ec93fffeb2fdd8681ed17f16268aa9dd4f8c588ed9d1d3bffbbfa6e8bf897cbb3149b9 + languageName: node + linkType: hard + +"js-yaml@npm:^4.1.0": + version: 4.1.0 + resolution: "js-yaml@npm:4.1.0" + dependencies: + argparse: "npm:^2.0.1" + bin: + js-yaml: bin/js-yaml.js + checksum: 10c0/184a24b4eaacfce40ad9074c64fd42ac83cf74d8c8cd137718d456ced75051229e5061b8633c3366b8aada17945a7a356b337828c19da92b51ae62126575018f + languageName: node + linkType: hard + +"jsbn@npm:1.1.0": + version: 1.1.0 + resolution: "jsbn@npm:1.1.0" + checksum: 10c0/4f907fb78d7b712e11dea8c165fe0921f81a657d3443dde75359ed52eb2b5d33ce6773d97985a089f09a65edd80b11cb75c767b57ba47391fee4c969f7215c96 + languageName: node + linkType: hard + +"json-buffer@npm:3.0.1": + version: 3.0.1 + resolution: "json-buffer@npm:3.0.1" + checksum: 10c0/0d1c91569d9588e7eef2b49b59851f297f3ab93c7b35c7c221e288099322be6b562767d11e4821da500f3219542b9afd2e54c5dc573107c1126ed1080f8e96d7 + languageName: node + linkType: hard + +"json-schema-traverse@npm:^0.4.1": + version: 0.4.1 + resolution: "json-schema-traverse@npm:0.4.1" + checksum: 10c0/108fa90d4cc6f08243aedc6da16c408daf81793bf903e9fd5ab21983cda433d5d2da49e40711da016289465ec2e62e0324dcdfbc06275a607fe3233fde4942ce + languageName: node + linkType: hard + +"json-stable-stringify-without-jsonify@npm:^1.0.1": + version: 1.0.1 + resolution: "json-stable-stringify-without-jsonify@npm:1.0.1" + checksum: 10c0/cb168b61fd4de83e58d09aaa6425ef71001bae30d260e2c57e7d09a5fd82223e2f22a042dedaab8db23b7d9ae46854b08bb1f91675a8be11c5cffebef5fb66a5 + languageName: node + linkType: hard + +"keyv@npm:^4.5.4": + version: 4.5.4 + resolution: "keyv@npm:4.5.4" + dependencies: + json-buffer: "npm:3.0.1" + checksum: 10c0/aa52f3c5e18e16bb6324876bb8b59dd02acf782a4b789c7b2ae21107fab95fab3890ed448d4f8dba80ce05391eeac4bfabb4f02a20221342982f806fa2cf271e + languageName: node + linkType: hard + +"levn@npm:^0.4.1": + version: 0.4.1 + resolution: "levn@npm:0.4.1" + dependencies: + prelude-ls: "npm:^1.2.1" + type-check: "npm:~0.4.0" + checksum: 10c0/effb03cad7c89dfa5bd4f6989364bfc79994c2042ec5966cb9b95990e2edee5cd8969ddf42616a0373ac49fac1403437deaf6e9050fbbaa3546093a59b9ac94e + languageName: node + linkType: hard + +"lilconfig@npm:^3.1.3": + version: 3.1.3 + resolution: "lilconfig@npm:3.1.3" + checksum: 10c0/f5604e7240c5c275743561442fbc5abf2a84ad94da0f5adc71d25e31fa8483048de3dcedcb7a44112a942fed305fd75841cdf6c9681c7f640c63f1049e9a5dcc + languageName: node + linkType: hard + +"lint-staged@npm:^16.1.5": + version: 16.1.5 + resolution: "lint-staged@npm:16.1.5" + dependencies: + chalk: "npm:^5.5.0" + commander: "npm:^14.0.0" + debug: "npm:^4.4.1" + lilconfig: "npm:^3.1.3" + listr2: "npm:^9.0.1" + micromatch: "npm:^4.0.8" + nano-spawn: "npm:^1.0.2" + pidtree: "npm:^0.6.0" + string-argv: "npm:^0.3.2" + yaml: "npm:^2.8.1" + bin: + lint-staged: bin/lint-staged.js + checksum: 10c0/771e7be871f1d74ed09ef4e4eae5f835ed962965db7709be26cccf71bef8fed34f8d5d92f193b2a6fad32c12d955850aa74008e6180fabea8a7a6666cba2ac39 + languageName: node + linkType: hard + +"listr2@npm:^9.0.1": + version: 9.0.2 + resolution: "listr2@npm:9.0.2" + dependencies: + cli-truncate: "npm:^4.0.0" + colorette: "npm:^2.0.20" + eventemitter3: "npm:^5.0.1" + log-update: "npm:^6.1.0" + rfdc: "npm:^1.4.1" + wrap-ansi: "npm:^9.0.0" + checksum: 10c0/f256ae893d5798d41b50b388b9bb77aa5a50f53e46ab043435956556e10f933931413e130b6c0e36c3e1f03d8becacb42beeec874fe5b10f8eb4e1be0fef229f + languageName: node + linkType: hard + +"locate-path@npm:^6.0.0": + version: 6.0.0 + resolution: "locate-path@npm:6.0.0" + dependencies: + p-locate: "npm:^5.0.0" + checksum: 10c0/d3972ab70dfe58ce620e64265f90162d247e87159b6126b01314dd67be43d50e96a50b517bce2d9452a79409c7614054c277b5232377de50416564a77ac7aad3 + languageName: node + linkType: hard + +"lodash.defaults@npm:^4.2.0": + version: 4.2.0 + resolution: "lodash.defaults@npm:4.2.0" + checksum: 10c0/d5b77aeb702caa69b17be1358faece33a84497bcca814897383c58b28a2f8dfc381b1d9edbec239f8b425126a3bbe4916223da2a576bb0411c2cefd67df80707 + languageName: node + linkType: hard + +"lodash.isarguments@npm:^3.1.0": + version: 3.1.0 + resolution: "lodash.isarguments@npm:3.1.0" + checksum: 10c0/5e8f95ba10975900a3920fb039a3f89a5a79359a1b5565e4e5b4310ed6ebe64011e31d402e34f577eca983a1fc01ff86c926e3cbe602e1ddfc858fdd353e62d8 + languageName: node + linkType: hard + +"lodash.merge@npm:^4.6.2": + version: 4.6.2 + resolution: "lodash.merge@npm:4.6.2" + checksum: 10c0/402fa16a1edd7538de5b5903a90228aa48eb5533986ba7fa26606a49db2572bf414ff73a2c9f5d5fd36b31c46a5d5c7e1527749c07cbcf965ccff5fbdf32c506 + languageName: node + linkType: hard + +"log-symbols@npm:^4.1.0": + version: 4.1.0 + resolution: "log-symbols@npm:4.1.0" + dependencies: + chalk: "npm:^4.1.0" + is-unicode-supported: "npm:^0.1.0" + checksum: 10c0/67f445a9ffa76db1989d0fa98586e5bc2fd5247260dafb8ad93d9f0ccd5896d53fb830b0e54dade5ad838b9de2006c826831a3c528913093af20dff8bd24aca6 + languageName: node + linkType: hard + +"log-update@npm:^6.1.0": + version: 6.1.0 + resolution: "log-update@npm:6.1.0" + dependencies: + ansi-escapes: "npm:^7.0.0" + cli-cursor: "npm:^5.0.0" + slice-ansi: "npm:^7.1.0" + strip-ansi: "npm:^7.1.0" + wrap-ansi: "npm:^9.0.0" + checksum: 10c0/4b350c0a83d7753fea34dcac6cd797d1dc9603291565de009baa4aa91c0447eab0d3815a05c8ec9ac04fdfffb43c82adcdb03ec1fceafd8518e1a8c1cff4ff89 + languageName: node + linkType: hard + +"lru-cache@npm:^10.0.1, lru-cache@npm:^10.2.0": + version: 10.4.3 + resolution: "lru-cache@npm:10.4.3" + checksum: 10c0/ebd04fbca961e6c1d6c0af3799adcc966a1babe798f685bb84e6599266599cd95d94630b10262f5424539bc4640107e8a33aa28585374abf561d30d16f4b39fb + languageName: node + linkType: hard + +"magic-string@npm:^0.30.3": + version: 0.30.17 + resolution: "magic-string@npm:0.30.17" + dependencies: + "@jridgewell/sourcemap-codec": "npm:^1.5.0" + checksum: 10c0/16826e415d04b88378f200fe022b53e638e3838b9e496edda6c0e086d7753a44a6ed187adc72d19f3623810589bf139af1a315541cd6a26ae0771a0193eaf7b8 + languageName: node + linkType: hard + +"make-fetch-happen@npm:^14.0.3": + version: 14.0.3 + resolution: "make-fetch-happen@npm:14.0.3" + dependencies: + "@npmcli/agent": "npm:^3.0.0" + cacache: "npm:^19.0.1" + http-cache-semantics: "npm:^4.1.1" + minipass: "npm:^7.0.2" + minipass-fetch: "npm:^4.0.0" + minipass-flush: "npm:^1.0.5" + minipass-pipeline: "npm:^1.2.4" + negotiator: "npm:^1.0.0" + proc-log: "npm:^5.0.0" + promise-retry: "npm:^2.0.1" + ssri: "npm:^12.0.0" + checksum: 10c0/c40efb5e5296e7feb8e37155bde8eb70bc57d731b1f7d90e35a092fde403d7697c56fb49334d92d330d6f1ca29a98142036d6480a12681133a0a1453164cb2f0 + languageName: node + linkType: hard + +"micromatch@npm:^4.0.8": + version: 4.0.8 + resolution: "micromatch@npm:4.0.8" + dependencies: + braces: "npm:^3.0.3" + picomatch: "npm:^2.3.1" + checksum: 10c0/166fa6eb926b9553f32ef81f5f531d27b4ce7da60e5baf8c021d043b27a388fb95e46a8038d5045877881e673f8134122b59624d5cecbd16eb50a42e7a6b5ca8 + languageName: node + linkType: hard + +"mimic-function@npm:^5.0.0": + version: 5.0.1 + resolution: "mimic-function@npm:5.0.1" + checksum: 10c0/f3d9464dd1816ecf6bdf2aec6ba32c0728022039d992f178237d8e289b48764fee4131319e72eedd4f7f094e22ded0af836c3187a7edc4595d28dd74368fd81d + languageName: node + linkType: hard + +"minimatch@npm:^3.1.2": + version: 3.1.2 + resolution: "minimatch@npm:3.1.2" + dependencies: + brace-expansion: "npm:^1.1.7" + checksum: 10c0/0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 + languageName: node + linkType: hard + +"minimatch@npm:^9.0.4, minimatch@npm:^9.0.5": + version: 9.0.5 + resolution: "minimatch@npm:9.0.5" + dependencies: + brace-expansion: "npm:^2.0.1" + checksum: 10c0/de96cf5e35bdf0eab3e2c853522f98ffbe9a36c37797778d2665231ec1f20a9447a7e567cb640901f89e4daaa95ae5d70c65a9e8aa2bb0019b6facbc3c0575ed + languageName: node + linkType: hard + +"minimist@npm:^1.2.0": + version: 1.2.8 + resolution: "minimist@npm:1.2.8" + checksum: 10c0/19d3fcdca050087b84c2029841a093691a91259a47def2f18222f41e7645a0b7c44ef4b40e88a1e58a40c84d2ef0ee6047c55594d298146d0eb3f6b737c20ce6 + languageName: node + linkType: hard + +"minipass-collect@npm:^2.0.1": + version: 2.0.1 + resolution: "minipass-collect@npm:2.0.1" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/5167e73f62bb74cc5019594709c77e6a742051a647fe9499abf03c71dca75515b7959d67a764bdc4f8b361cf897fbf25e2d9869ee039203ed45240f48b9aa06e + languageName: node + linkType: hard + +"minipass-fetch@npm:^4.0.0": + version: 4.0.1 + resolution: "minipass-fetch@npm:4.0.1" + dependencies: + encoding: "npm:^0.1.13" + minipass: "npm:^7.0.3" + minipass-sized: "npm:^1.0.3" + minizlib: "npm:^3.0.1" + dependenciesMeta: + encoding: + optional: true + checksum: 10c0/a3147b2efe8e078c9bf9d024a0059339c5a09c5b1dded6900a219c218cc8b1b78510b62dae556b507304af226b18c3f1aeb1d48660283602d5b6586c399eed5c + languageName: node + linkType: hard + +"minipass-flush@npm:^1.0.5": + version: 1.0.5 + resolution: "minipass-flush@npm:1.0.5" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/2a51b63feb799d2bb34669205eee7c0eaf9dce01883261a5b77410c9408aa447e478efd191b4de6fc1101e796ff5892f8443ef20d9544385819093dbb32d36bd + languageName: node + linkType: hard + +"minipass-pipeline@npm:^1.2.4": + version: 1.2.4 + resolution: "minipass-pipeline@npm:1.2.4" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/cbda57cea20b140b797505dc2cac71581a70b3247b84480c1fed5ca5ba46c25ecc25f68bfc9e6dcb1a6e9017dab5c7ada5eab73ad4f0a49d84e35093e0c643f2 + languageName: node + linkType: hard + +"minipass-sized@npm:^1.0.3": + version: 1.0.3 + resolution: "minipass-sized@npm:1.0.3" + dependencies: + minipass: "npm:^3.0.0" + checksum: 10c0/298f124753efdc745cfe0f2bdfdd81ba25b9f4e753ca4a2066eb17c821f25d48acea607dfc997633ee5bf7b6dfffb4eee4f2051eb168663f0b99fad2fa4829cb + languageName: node + linkType: hard + +"minipass@npm:^3.0.0": + version: 3.3.6 + resolution: "minipass@npm:3.3.6" + dependencies: + yallist: "npm:^4.0.0" + checksum: 10c0/a114746943afa1dbbca8249e706d1d38b85ed1298b530f5808ce51f8e9e941962e2a5ad2e00eae7dd21d8a4aae6586a66d4216d1a259385e9d0358f0c1eba16c + languageName: node + linkType: hard + +"minipass@npm:^5.0.0 || ^6.0.2 || ^7.0.0, minipass@npm:^7.0.2, minipass@npm:^7.0.3, minipass@npm:^7.0.4, minipass@npm:^7.1.2": + version: 7.1.2 + resolution: "minipass@npm:7.1.2" + checksum: 10c0/b0fd20bb9fb56e5fa9a8bfac539e8915ae07430a619e4b86ff71f5fc757ef3924b23b2c4230393af1eda647ed3d75739e4e0acb250a6b1eb277cf7f8fe449557 + languageName: node + linkType: hard + +"minizlib@npm:^3.0.1": + version: 3.0.2 + resolution: "minizlib@npm:3.0.2" + dependencies: + minipass: "npm:^7.1.2" + checksum: 10c0/9f3bd35e41d40d02469cb30470c55ccc21cae0db40e08d1d0b1dff01cc8cc89a6f78e9c5d2b7c844e485ec0a8abc2238111213fdc5b2038e6d1012eacf316f78 + languageName: node + linkType: hard + +"mkdirp@npm:^3.0.1": + version: 3.0.1 + resolution: "mkdirp@npm:3.0.1" + bin: + mkdirp: dist/cjs/src/bin.js + checksum: 10c0/9f2b975e9246351f5e3a40dcfac99fcd0baa31fbfab615fe059fb11e51f10e4803c63de1f384c54d656e4db31d000e4767e9ef076a22e12a641357602e31d57d + languageName: node + linkType: hard + +"mocha@npm:^11.7.1": + version: 11.7.1 + resolution: "mocha@npm:11.7.1" + dependencies: + browser-stdout: "npm:^1.3.1" + chokidar: "npm:^4.0.1" + debug: "npm:^4.3.5" + diff: "npm:^7.0.0" + escape-string-regexp: "npm:^4.0.0" + find-up: "npm:^5.0.0" + glob: "npm:^10.4.5" + he: "npm:^1.2.0" + js-yaml: "npm:^4.1.0" + log-symbols: "npm:^4.1.0" + minimatch: "npm:^9.0.5" + ms: "npm:^2.1.3" + picocolors: "npm:^1.1.1" + serialize-javascript: "npm:^6.0.2" + strip-json-comments: "npm:^3.1.1" + supports-color: "npm:^8.1.1" + workerpool: "npm:^9.2.0" + yargs: "npm:^17.7.2" + yargs-parser: "npm:^21.1.1" + yargs-unparser: "npm:^2.0.0" + bin: + _mocha: bin/_mocha + mocha: bin/mocha.js + checksum: 10c0/63817742cb265035bdb7796be90f4e8f3c53400d22e41c03310a65eee99baca653fa28992b04ec349bfe2b61857658610996850c0ddacf0a2aac13b755b1132d + languageName: node + linkType: hard + +"ms@npm:^2.1.3": + version: 2.1.3 + resolution: "ms@npm:2.1.3" + checksum: 10c0/d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48 + languageName: node + linkType: hard + +"nan@npm:^2.14.1": + version: 2.22.2 + resolution: "nan@npm:2.22.2" + dependencies: + node-gyp: "npm:latest" + checksum: 10c0/971f963b8120631880fa47a389c71b00cadc1c1b00ef8f147782a3f4387d4fc8195d0695911272d57438c11562fb27b24c4ae5f8c05d5e4eeb4478ba51bb73c5 + languageName: node + linkType: hard + +"nano-spawn@npm:^1.0.2": + version: 1.0.2 + resolution: "nano-spawn@npm:1.0.2" + checksum: 10c0/d8cec78f127a44aa5e38be01746b3d963a8dcf8b00b4a05bf259b5369af2225b8c7dc9d12517050b90234e5c3eeea4ece5d18a5f9c6c3462b56f9f595f07e632 + languageName: node + linkType: hard + +"natural-compare@npm:^1.4.0": + version: 1.4.0 + resolution: "natural-compare@npm:1.4.0" + checksum: 10c0/f5f9a7974bfb28a91afafa254b197f0f22c684d4a1731763dda960d2c8e375b36c7d690e0d9dc8fba774c537af14a7e979129bca23d88d052fbeb9466955e447 + languageName: node + linkType: hard + +"negotiator@npm:^1.0.0": + version: 1.0.0 + resolution: "negotiator@npm:1.0.0" + checksum: 10c0/4c559dd52669ea48e1914f9d634227c561221dd54734070791f999c52ed0ff36e437b2e07d5c1f6e32909fc625fe46491c16e4a8f0572567d4dd15c3a4fda04b + languageName: node + linkType: hard + +"node-gyp@npm:latest": + version: 11.2.0 + resolution: "node-gyp@npm:11.2.0" + dependencies: + env-paths: "npm:^2.2.0" + exponential-backoff: "npm:^3.1.1" + graceful-fs: "npm:^4.2.6" + make-fetch-happen: "npm:^14.0.3" + nopt: "npm:^8.0.0" + proc-log: "npm:^5.0.0" + semver: "npm:^7.3.5" + tar: "npm:^7.4.3" + tinyglobby: "npm:^0.2.12" + which: "npm:^5.0.0" + bin: + node-gyp: bin/node-gyp.js + checksum: 10c0/bd8d8c76b06be761239b0c8680f655f6a6e90b48e44d43415b11c16f7e8c15be346fba0cbf71588c7cdfb52c419d928a7d3db353afc1d952d19756237d8f10b9 + languageName: node + linkType: hard + +"nopt@npm:^8.0.0": + version: 8.1.0 + resolution: "nopt@npm:8.1.0" + dependencies: + abbrev: "npm:^3.0.0" + bin: + nopt: bin/nopt.js + checksum: 10c0/62e9ea70c7a3eb91d162d2c706b6606c041e4e7b547cbbb48f8b3695af457dd6479904d7ace600856bf923dd8d1ed0696f06195c8c20f02ac87c1da0e1d315ef + languageName: node + linkType: hard + +"onetime@npm:^7.0.0": + version: 7.0.0 + resolution: "onetime@npm:7.0.0" + dependencies: + mimic-function: "npm:^5.0.0" + checksum: 10c0/5cb9179d74b63f52a196a2e7037ba2b9a893245a5532d3f44360012005c9cadb60851d56716ebff18a6f47129dab7168022445df47c2aff3b276d92585ed1221 + languageName: node + linkType: hard + +"optionator@npm:^0.9.3": + version: 0.9.4 + resolution: "optionator@npm:0.9.4" + dependencies: + deep-is: "npm:^0.1.3" + fast-levenshtein: "npm:^2.0.6" + levn: "npm:^0.4.1" + prelude-ls: "npm:^1.2.1" + type-check: "npm:^0.4.0" + word-wrap: "npm:^1.2.5" + checksum: 10c0/4afb687a059ee65b61df74dfe87d8d6815cd6883cb8b3d5883a910df72d0f5d029821f37025e4bccf4048873dbdb09acc6d303d27b8f76b1a80dd5a7d5334675 + languageName: node + linkType: hard + +"p-limit@npm:^3.0.2": + version: 3.1.0 + resolution: "p-limit@npm:3.1.0" + dependencies: + yocto-queue: "npm:^0.1.0" + checksum: 10c0/9db675949dbdc9c3763c89e748d0ef8bdad0afbb24d49ceaf4c46c02c77d30db4e0652ed36d0a0a7a95154335fab810d95c86153105bb73b3a90448e2bb14e1a + languageName: node + linkType: hard + +"p-locate@npm:^5.0.0": + version: 5.0.0 + resolution: "p-locate@npm:5.0.0" + dependencies: + p-limit: "npm:^3.0.2" + checksum: 10c0/2290d627ab7903b8b70d11d384fee714b797f6040d9278932754a6860845c4d3190603a0772a663c8cb5a7b21d1b16acb3a6487ebcafa9773094edc3dfe6009a + languageName: node + linkType: hard + +"p-map@npm:^7.0.2": + version: 7.0.3 + resolution: "p-map@npm:7.0.3" + checksum: 10c0/46091610da2b38ce47bcd1d8b4835a6fa4e832848a6682cf1652bc93915770f4617afc844c10a77d1b3e56d2472bb2d5622353fa3ead01a7f42b04fc8e744a5c + languageName: node + linkType: hard + +"package-json-from-dist@npm:^1.0.0": + version: 1.0.1 + resolution: "package-json-from-dist@npm:1.0.1" + checksum: 10c0/62ba2785eb655fec084a257af34dbe24292ab74516d6aecef97ef72d4897310bc6898f6c85b5cd22770eaa1ce60d55a0230e150fb6a966e3ecd6c511e23d164b + languageName: node + linkType: hard + +"parent-module@npm:^1.0.0": + version: 1.0.1 + resolution: "parent-module@npm:1.0.1" + dependencies: + callsites: "npm:^3.0.0" + checksum: 10c0/c63d6e80000d4babd11978e0d3fee386ca7752a02b035fd2435960ffaa7219dc42146f07069fb65e6e8bf1caef89daf9af7535a39bddf354d78bf50d8294f556 + languageName: node + linkType: hard + +"path-exists@npm:^4.0.0": + version: 4.0.0 + resolution: "path-exists@npm:4.0.0" + checksum: 10c0/8c0bd3f5238188197dc78dced15207a4716c51cc4e3624c44fc97acf69558f5ebb9a2afff486fe1b4ee148e0c133e96c5e11a9aa5c48a3006e3467da070e5e1b + languageName: node + linkType: hard + +"path-key@npm:^3.1.0": + version: 3.1.1 + resolution: "path-key@npm:3.1.1" + checksum: 10c0/748c43efd5a569c039d7a00a03b58eecd1d75f3999f5a28303d75f521288df4823bc057d8784eb72358b2895a05f29a070bc9f1f17d28226cc4e62494cc58c4c + languageName: node + linkType: hard + +"path-parse@npm:^1.0.7": + version: 1.0.7 + resolution: "path-parse@npm:1.0.7" + checksum: 10c0/11ce261f9d294cc7a58d6a574b7f1b935842355ec66fba3c3fd79e0f036462eaf07d0aa95bb74ff432f9afef97ce1926c720988c6a7451d8a584930ae7de86e1 + languageName: node + linkType: hard + +"path-scurry@npm:^1.11.1": + version: 1.11.1 + resolution: "path-scurry@npm:1.11.1" + dependencies: + lru-cache: "npm:^10.2.0" + minipass: "npm:^5.0.0 || ^6.0.2 || ^7.0.0" + checksum: 10c0/32a13711a2a505616ae1cc1b5076801e453e7aae6ac40ab55b388bb91b9d0547a52f5aaceff710ea400205f18691120d4431e520afbe4266b836fadede15872d + languageName: node + linkType: hard + +"picocolors@npm:^1.1.1": + version: 1.1.1 + resolution: "picocolors@npm:1.1.1" + checksum: 10c0/e2e3e8170ab9d7c7421969adaa7e1b31434f789afb9b3f115f6b96d91945041ac3ceb02e9ec6fe6510ff036bcc0bf91e69a1772edc0b707e12b19c0f2d6bcf58 + languageName: node + linkType: hard + +"picomatch@npm:^2.3.1": + version: 2.3.1 + resolution: "picomatch@npm:2.3.1" + checksum: 10c0/26c02b8d06f03206fc2ab8d16f19960f2ff9e81a658f831ecb656d8f17d9edc799e8364b1f4a7873e89d9702dff96204be0fa26fe4181f6843f040f819dac4be + languageName: node + linkType: hard + +"picomatch@npm:^4.0.2": + version: 4.0.2 + resolution: "picomatch@npm:4.0.2" + checksum: 10c0/7c51f3ad2bb42c776f49ebf964c644958158be30d0a510efd5a395e8d49cb5acfed5b82c0c5b365523ce18e6ab85013c9ebe574f60305892ec3fa8eee8304ccc + languageName: node + linkType: hard + +"pidtree@npm:^0.6.0": + version: 0.6.0 + resolution: "pidtree@npm:0.6.0" + bin: + pidtree: bin/pidtree.js + checksum: 10c0/0829ec4e9209e230f74ebf4265f5ccc9ebfb488334b525cb13f86ff801dca44b362c41252cd43ae4d7653a10a5c6ab3be39d2c79064d6895e0d78dc50a5ed6e9 + languageName: node + linkType: hard + +"prelude-ls@npm:^1.2.1": + version: 1.2.1 + resolution: "prelude-ls@npm:1.2.1" + checksum: 10c0/b00d617431e7886c520a6f498a2e14c75ec58f6d93ba48c3b639cf241b54232d90daa05d83a9e9b9fef6baa63cb7e1e4602c2372fea5bc169668401eb127d0cd + languageName: node + linkType: hard + +"prettier-plugin-organize-imports@npm:^4.2.0": + version: 4.2.0 + resolution: "prettier-plugin-organize-imports@npm:4.2.0" + peerDependencies: + prettier: ">=2.0" + typescript: ">=2.9" + vue-tsc: ^2.1.0 || 3 + peerDependenciesMeta: + vue-tsc: + optional: true + checksum: 10c0/3b20652d7ff71786c088bdb4189b315bca086faee70db1c10aca21dc41efadbcba45ef37b0842fb91f14f31927b6bed63433b2725346dc79b3833b6694b33eed + languageName: node + linkType: hard + +"prettier-plugin-pkg@npm:^0.21.2": + version: 0.21.2 + resolution: "prettier-plugin-pkg@npm:0.21.2" + peerDependencies: + prettier: ^3.0.3 + checksum: 10c0/027497d93f5208190acf28f0f6ef6fe2656f01aa25d1f063437db74772ee9a84227936144f8142de3dfbce8eb66d29199f5811c68a1b633445f9aaca398b536e + languageName: node + linkType: hard + +"prettier-plugin-sh@npm:^0.18.0": + version: 0.18.0 + resolution: "prettier-plugin-sh@npm:0.18.0" + dependencies: + "@reteps/dockerfmt": "npm:^0.3.6" + sh-syntax: "npm:^0.5.8" + peerDependencies: + prettier: ^3.6.0 + checksum: 10c0/d8946440abaab1c0f32cee270e0ad285e1f4bba0008aff7c55a9a1c8639032aa8b397aef8f8387a5b88c3f4fb657f1d4d1ab7ae6a1f5c4599875080fb951b9b1 + languageName: node + linkType: hard + +"prettier@npm:^3.6.2": + version: 3.6.2 + resolution: "prettier@npm:3.6.2" + bin: + prettier: bin/prettier.cjs + checksum: 10c0/488cb2f2b99ec13da1e50074912870217c11edaddedeadc649b1244c749d15ba94e846423d062e2c4c9ae683e2d65f754de28889ba06e697ac4f988d44f45812 + languageName: node + linkType: hard + +"pretty-bytes@npm:^4.0.2": + version: 4.0.2 + resolution: "pretty-bytes@npm:4.0.2" + checksum: 10c0/b2e0bd22d78c9d46e589e62a5b604eec26f2f3adf2b3255b791883318ce205cc71be49b0f0dac4a0d37191d7ddddf6e870df543ee2226092daba92b1a35c0984 + languageName: node + linkType: hard + +"proc-log@npm:^5.0.0": + version: 5.0.0 + resolution: "proc-log@npm:5.0.0" + checksum: 10c0/bbe5edb944b0ad63387a1d5b1911ae93e05ce8d0f60de1035b218cdcceedfe39dbd2c697853355b70f1a090f8f58fe90da487c85216bf9671f9499d1a897e9e3 + languageName: node + linkType: hard + +"promise-retry@npm:^2.0.1": + version: 2.0.1 + resolution: "promise-retry@npm:2.0.1" + dependencies: + err-code: "npm:^2.0.2" + retry: "npm:^0.12.0" + checksum: 10c0/9c7045a1a2928094b5b9b15336dcd2a7b1c052f674550df63cc3f36cd44028e5080448175b6f6ca32b642de81150f5e7b1a98b728f15cb069f2dd60ac2616b96 + languageName: node + linkType: hard + +"punycode@npm:^2.1.0": + version: 2.3.1 + resolution: "punycode@npm:2.3.1" + checksum: 10c0/14f76a8206bc3464f794fb2e3d3cc665ae416c01893ad7a02b23766eb07159144ee612ad67af5e84fa4479ccfe67678c4feb126b0485651b302babf66f04f9e9 + languageName: node + linkType: hard + +"randombytes@npm:^2.1.0": + version: 2.1.0 + resolution: "randombytes@npm:2.1.0" + dependencies: + safe-buffer: "npm:^5.1.0" + checksum: 10c0/50395efda7a8c94f5dffab564f9ff89736064d32addf0cc7e8bf5e4166f09f8ded7a0849ca6c2d2a59478f7d90f78f20d8048bca3cdf8be09d8e8a10790388f3 + languageName: node + linkType: hard + +"readdirp@npm:^4.0.1": + version: 4.1.2 + resolution: "readdirp@npm:4.1.2" + checksum: 10c0/60a14f7619dec48c9c850255cd523e2717001b0e179dc7037cfa0895da7b9e9ab07532d324bfb118d73a710887d1e35f79c495fa91582784493e085d18c72c62 + languageName: node + linkType: hard + +"redis-commands@npm:^1.2.0": + version: 1.7.0 + resolution: "redis-commands@npm:1.7.0" + checksum: 10c0/c78b46d8d6e811f422961878538c57048a451ab56760d3f1657a7c8f29aaae42cc23890f75655556a59ec67611022e18cb443d2976e6c55036934bfe783aa60e + languageName: node + linkType: hard + +"redis-errors@npm:^1.0.0, redis-errors@npm:^1.2.0": + version: 1.2.0 + resolution: "redis-errors@npm:1.2.0" + checksum: 10c0/5b316736e9f532d91a35bff631335137a4f974927bb2fb42bf8c2f18879173a211787db8ac4c3fde8f75ed6233eb0888e55d52510b5620e30d69d7d719c8b8a7 + languageName: node + linkType: hard + +"redis-parser@npm:^2.6.0": + version: 2.6.0 + resolution: "redis-parser@npm:2.6.0" + checksum: 10c0/ff9815c3e9b5e9c76bcc417222a9d96fb320a27752a12fdca12a25b075b587420f1eb351f6f2f38c9e2daed4cfc354b80c839e11ee2e91eab5d8ea709a233eff + languageName: node + linkType: hard + +"redis-parser@npm:^3.0.0": + version: 3.0.0 + resolution: "redis-parser@npm:3.0.0" + dependencies: + redis-errors: "npm:^1.0.0" + checksum: 10c0/ee16ac4c7b2a60b1f42a2cdaee22b005bd4453eb2d0588b8a4939718997ae269da717434da5d570fe0b05030466eeb3f902a58cf2e8e1ca058bf6c9c596f632f + languageName: node + linkType: hard + +"redis@npm:^2.8.0": + version: 2.8.0 + resolution: "redis@npm:2.8.0" + dependencies: + double-ended-queue: "npm:^2.1.0-0" + redis-commands: "npm:^1.2.0" + redis-parser: "npm:^2.6.0" + checksum: 10c0/c3769a5bc69ab8f579f76ac6a1461bf780718d6c43db1785ea9f71a9ee010e49dd38ff67381f3be66bd67f6660f67756af7a60e101ac2284aab11dee6464885e + languageName: node + linkType: hard + +"require-directory@npm:^2.1.1": + version: 2.1.1 + resolution: "require-directory@npm:2.1.1" + checksum: 10c0/83aa76a7bc1531f68d92c75a2ca2f54f1b01463cb566cf3fbc787d0de8be30c9dbc211d1d46be3497dac5785fe296f2dd11d531945ac29730643357978966e99 + languageName: node + linkType: hard + +"resolve-from@npm:^4.0.0": + version: 4.0.0 + resolution: "resolve-from@npm:4.0.0" + checksum: 10c0/8408eec31a3112ef96e3746c37be7d64020cda07c03a920f5024e77290a218ea758b26ca9529fd7b1ad283947f34b2291c1c0f6aa0ed34acfdda9c6014c8d190 + languageName: node + linkType: hard + +"resolve@npm:^1.22.1": + version: 1.22.10 + resolution: "resolve@npm:1.22.10" + dependencies: + is-core-module: "npm:^2.16.0" + path-parse: "npm:^1.0.7" + supports-preserve-symlinks-flag: "npm:^1.0.0" + bin: + resolve: bin/resolve + checksum: 10c0/8967e1f4e2cc40f79b7e080b4582b9a8c5ee36ffb46041dccb20e6461161adf69f843b43067b4a375de926a2cd669157e29a29578191def399dd5ef89a1b5203 + languageName: node + linkType: hard + +"resolve@patch:resolve@npm%3A^1.22.1#optional!builtin": + version: 1.22.10 + resolution: "resolve@patch:resolve@npm%3A1.22.10#optional!builtin::version=1.22.10&hash=c3c19d" + dependencies: + is-core-module: "npm:^2.16.0" + path-parse: "npm:^1.0.7" + supports-preserve-symlinks-flag: "npm:^1.0.0" + bin: + resolve: bin/resolve + checksum: 10c0/52a4e505bbfc7925ac8f4cd91fd8c4e096b6a89728b9f46861d3b405ac9a1ccf4dcbf8befb4e89a2e11370dacd0160918163885cbc669369590f2f31f4c58939 + languageName: node + linkType: hard + +"restore-cursor@npm:^5.0.0": + version: 5.1.0 + resolution: "restore-cursor@npm:5.1.0" + dependencies: + onetime: "npm:^7.0.0" + signal-exit: "npm:^4.1.0" + checksum: 10c0/c2ba89131eea791d1b25205bdfdc86699767e2b88dee2a590b1a6caa51737deac8bad0260a5ded2f7c074b7db2f3a626bcf1fcf3cdf35974cbeea5e2e6764f60 + languageName: node + linkType: hard + +"retry@npm:^0.12.0": + version: 0.12.0 + resolution: "retry@npm:0.12.0" + checksum: 10c0/59933e8501727ba13ad73ef4a04d5280b3717fd650408460c987392efe9d7be2040778ed8ebe933c5cbd63da3dcc37919c141ef8af0a54a6e4fca5a2af177bfe + languageName: node + linkType: hard + +"rfdc@npm:^1.4.1": + version: 1.4.1 + resolution: "rfdc@npm:1.4.1" + checksum: 10c0/4614e4292356cafade0b6031527eea9bc90f2372a22c012313be1dcc69a3b90c7338158b414539be863fa95bfcb2ddcd0587be696841af4e6679d85e62c060c7 + languageName: node + linkType: hard + +"rollup@npm:^4.48.1": + version: 4.48.1 + resolution: "rollup@npm:4.48.1" + dependencies: + "@rollup/rollup-android-arm-eabi": "npm:4.48.1" + "@rollup/rollup-android-arm64": "npm:4.48.1" + "@rollup/rollup-darwin-arm64": "npm:4.48.1" + "@rollup/rollup-darwin-x64": "npm:4.48.1" + "@rollup/rollup-freebsd-arm64": "npm:4.48.1" + "@rollup/rollup-freebsd-x64": "npm:4.48.1" + "@rollup/rollup-linux-arm-gnueabihf": "npm:4.48.1" + "@rollup/rollup-linux-arm-musleabihf": "npm:4.48.1" + "@rollup/rollup-linux-arm64-gnu": "npm:4.48.1" + "@rollup/rollup-linux-arm64-musl": "npm:4.48.1" + "@rollup/rollup-linux-loongarch64-gnu": "npm:4.48.1" + "@rollup/rollup-linux-ppc64-gnu": "npm:4.48.1" + "@rollup/rollup-linux-riscv64-gnu": "npm:4.48.1" + "@rollup/rollup-linux-riscv64-musl": "npm:4.48.1" + "@rollup/rollup-linux-s390x-gnu": "npm:4.48.1" + "@rollup/rollup-linux-x64-gnu": "npm:4.48.1" + "@rollup/rollup-linux-x64-musl": "npm:4.48.1" + "@rollup/rollup-win32-arm64-msvc": "npm:4.48.1" + "@rollup/rollup-win32-ia32-msvc": "npm:4.48.1" + "@rollup/rollup-win32-x64-msvc": "npm:4.48.1" + "@types/estree": "npm:1.0.8" + fsevents: "npm:~2.3.2" + dependenciesMeta: + "@rollup/rollup-android-arm-eabi": + optional: true + "@rollup/rollup-android-arm64": + optional: true + "@rollup/rollup-darwin-arm64": + optional: true + "@rollup/rollup-darwin-x64": + optional: true + "@rollup/rollup-freebsd-arm64": + optional: true + "@rollup/rollup-freebsd-x64": + optional: true + "@rollup/rollup-linux-arm-gnueabihf": + optional: true + "@rollup/rollup-linux-arm-musleabihf": + optional: true + "@rollup/rollup-linux-arm64-gnu": + optional: true + "@rollup/rollup-linux-arm64-musl": + optional: true + "@rollup/rollup-linux-loongarch64-gnu": + optional: true + "@rollup/rollup-linux-ppc64-gnu": + optional: true + "@rollup/rollup-linux-riscv64-gnu": + optional: true + "@rollup/rollup-linux-riscv64-musl": + optional: true + "@rollup/rollup-linux-s390x-gnu": + optional: true + "@rollup/rollup-linux-x64-gnu": + optional: true + "@rollup/rollup-linux-x64-musl": + optional: true + "@rollup/rollup-win32-arm64-msvc": + optional: true + "@rollup/rollup-win32-ia32-msvc": + optional: true + "@rollup/rollup-win32-x64-msvc": + optional: true + fsevents: + optional: true + bin: + rollup: dist/bin/rollup + checksum: 10c0/1b7167f17d7cfb9e7d7cd9e3c60a6150fc1d4b1a55e37c925c1832d9992176a7fa98e8cd1cf1ea3f0adf0b251394ca0ea004873ab3088c1ab272a76da40b3a71 + languageName: node + linkType: hard + +"safe-buffer@npm:^5.1.0": + version: 5.2.1 + resolution: "safe-buffer@npm:5.2.1" + checksum: 10c0/6501914237c0a86e9675d4e51d89ca3c21ffd6a31642efeba25ad65720bce6921c9e7e974e5be91a786b25aa058b5303285d3c15dbabf983a919f5f630d349f3 + languageName: node + linkType: hard + +"safer-buffer@npm:>= 2.1.2 < 3.0.0": + version: 2.1.2 + resolution: "safer-buffer@npm:2.1.2" + checksum: 10c0/7e3c8b2e88a1841c9671094bbaeebd94448111dd90a81a1f606f3f67708a6ec57763b3b47f06da09fc6054193e0e6709e77325415dc8422b04497a8070fa02d4 + languageName: node + linkType: hard + +"semver@npm:^7.3.5": + version: 7.7.1 + resolution: "semver@npm:7.7.1" + bin: + semver: bin/semver.js + checksum: 10c0/fd603a6fb9c399c6054015433051bdbe7b99a940a8fb44b85c2b524c4004b023d7928d47cb22154f8d054ea7ee8597f586605e05b52047f048278e4ac56ae958 + languageName: node + linkType: hard + +"serialize-javascript@npm:^6.0.2": + version: 6.0.2 + resolution: "serialize-javascript@npm:6.0.2" + dependencies: + randombytes: "npm:^2.1.0" + checksum: 10c0/2dd09ef4b65a1289ba24a788b1423a035581bef60817bea1f01eda8e3bda623f86357665fe7ac1b50f6d4f583f97db9615b3f07b2a2e8cbcb75033965f771dd2 + languageName: node + linkType: hard + +"sh-syntax@npm:^0.5.8": + version: 0.5.8 + resolution: "sh-syntax@npm:0.5.8" + dependencies: + tslib: "npm:^2.8.1" + checksum: 10c0/2d2609fc8760ef97175c852be26ee3eeb196078c5aec282c8b96a59ee362be4f470d3e0df4e372da6c7a7b44ccc42910cbdcb0915271b3cb6a6212d00dde116f + languageName: node + linkType: hard + +"shebang-command@npm:^2.0.0": + version: 2.0.0 + resolution: "shebang-command@npm:2.0.0" + dependencies: + shebang-regex: "npm:^3.0.0" + checksum: 10c0/a41692e7d89a553ef21d324a5cceb5f686d1f3c040759c50aab69688634688c5c327f26f3ecf7001ebfd78c01f3c7c0a11a7c8bfd0a8bc9f6240d4f40b224e4e + languageName: node + linkType: hard + +"shebang-regex@npm:^3.0.0": + version: 3.0.0 + resolution: "shebang-regex@npm:3.0.0" + checksum: 10c0/1dbed0726dd0e1152a92696c76c7f06084eb32a90f0528d11acd764043aacf76994b2fb30aa1291a21bd019d6699164d048286309a278855ee7bec06cf6fb690 + languageName: node + linkType: hard + +"signal-exit@npm:^4.0.1, signal-exit@npm:^4.1.0": + version: 4.1.0 + resolution: "signal-exit@npm:4.1.0" + checksum: 10c0/41602dce540e46d599edba9d9860193398d135f7ff72cab629db5171516cfae628d21e7bfccde1bbfdf11c48726bc2a6d1a8fb8701125852fbfda7cf19c6aa83 + languageName: node + linkType: hard + +"slice-ansi@npm:^5.0.0": + version: 5.0.0 + resolution: "slice-ansi@npm:5.0.0" + dependencies: + ansi-styles: "npm:^6.0.0" + is-fullwidth-code-point: "npm:^4.0.0" + checksum: 10c0/2d4d40b2a9d5cf4e8caae3f698fe24ae31a4d778701724f578e984dcb485ec8c49f0c04dab59c401821e80fcdfe89cace9c66693b0244e40ec485d72e543914f + languageName: node + linkType: hard + +"slice-ansi@npm:^7.1.0": + version: 7.1.0 + resolution: "slice-ansi@npm:7.1.0" + dependencies: + ansi-styles: "npm:^6.2.1" + is-fullwidth-code-point: "npm:^5.0.0" + checksum: 10c0/631c971d4abf56cf880f034d43fcc44ff883624867bf11ecbd538c47343911d734a4656d7bc02362b40b89d765652a7f935595441e519b59e2ad3f4d5d6fe7ca + languageName: node + linkType: hard + +"smart-buffer@npm:^4.2.0": + version: 4.2.0 + resolution: "smart-buffer@npm:4.2.0" + checksum: 10c0/a16775323e1404dd43fabafe7460be13a471e021637bc7889468eb45ce6a6b207261f454e4e530a19500cc962c4cc5348583520843b363f4193cee5c00e1e539 + languageName: node + linkType: hard + +"socks-proxy-agent@npm:^8.0.3": + version: 8.0.5 + resolution: "socks-proxy-agent@npm:8.0.5" + dependencies: + agent-base: "npm:^7.1.2" + debug: "npm:^4.3.4" + socks: "npm:^2.8.3" + checksum: 10c0/5d2c6cecba6821389aabf18728325730504bf9bb1d9e342e7987a5d13badd7a98838cc9a55b8ed3cb866ad37cc23e1086f09c4d72d93105ce9dfe76330e9d2a6 + languageName: node + linkType: hard + +"socks@npm:^2.8.3": + version: 2.8.4 + resolution: "socks@npm:2.8.4" + dependencies: + ip-address: "npm:^9.0.5" + smart-buffer: "npm:^4.2.0" + checksum: 10c0/00c3271e233ccf1fb83a3dd2060b94cc37817e0f797a93c560b9a7a86c4a0ec2961fb31263bdd24a3c28945e24868b5f063cd98744171d9e942c513454b50ae5 + languageName: node + linkType: hard + +"sprintf-js@npm:^1.1.3": + version: 1.1.3 + resolution: "sprintf-js@npm:1.1.3" + checksum: 10c0/09270dc4f30d479e666aee820eacd9e464215cdff53848b443964202bf4051490538e5dd1b42e1a65cf7296916ca17640aebf63dae9812749c7542ee5f288dec + languageName: node + linkType: hard + +"ssri@npm:^12.0.0": + version: 12.0.0 + resolution: "ssri@npm:12.0.0" + dependencies: + minipass: "npm:^7.0.3" + checksum: 10c0/caddd5f544b2006e88fa6b0124d8d7b28208b83c72d7672d5ade44d794525d23b540f3396108c4eb9280dcb7c01f0bef50682f5b4b2c34291f7c5e211fd1417d + languageName: node + linkType: hard + +"standard-as-callback@npm:^2.1.0": + version: 2.1.0 + resolution: "standard-as-callback@npm:2.1.0" + checksum: 10c0/012677236e3d3fdc5689d29e64ea8a599331c4babe86956bf92fc5e127d53f85411c5536ee0079c52c43beb0026b5ce7aa1d834dd35dd026e82a15d1bcaead1f + languageName: node + linkType: hard + +"string-argv@npm:^0.3.2": + version: 0.3.2 + resolution: "string-argv@npm:0.3.2" + checksum: 10c0/75c02a83759ad1722e040b86823909d9a2fc75d15dd71ec4b537c3560746e33b5f5a07f7332d1e3f88319909f82190843aa2f0a0d8c8d591ec08e93d5b8dec82 + languageName: node + linkType: hard + +"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.3": + version: 4.2.3 + resolution: "string-width@npm:4.2.3" + dependencies: + emoji-regex: "npm:^8.0.0" + is-fullwidth-code-point: "npm:^3.0.0" + strip-ansi: "npm:^6.0.1" + checksum: 10c0/1e525e92e5eae0afd7454086eed9c818ee84374bb80328fc41217ae72ff5f065ef1c9d7f72da41de40c75fa8bb3dee63d92373fd492c84260a552c636392a47b + languageName: node + linkType: hard + +"string-width@npm:^5.0.1, string-width@npm:^5.1.2": + version: 5.1.2 + resolution: "string-width@npm:5.1.2" + dependencies: + eastasianwidth: "npm:^0.2.0" + emoji-regex: "npm:^9.2.2" + strip-ansi: "npm:^7.0.1" + checksum: 10c0/ab9c4264443d35b8b923cbdd513a089a60de339216d3b0ed3be3ba57d6880e1a192b70ae17225f764d7adbf5994e9bb8df253a944736c15a0240eff553c678ca + languageName: node + linkType: hard + +"string-width@npm:^7.0.0": + version: 7.2.0 + resolution: "string-width@npm:7.2.0" + dependencies: + emoji-regex: "npm:^10.3.0" + get-east-asian-width: "npm:^1.0.0" + strip-ansi: "npm:^7.1.0" + checksum: 10c0/eb0430dd43f3199c7a46dcbf7a0b34539c76fe3aa62763d0b0655acdcbdf360b3f66f3d58ca25ba0205f42ea3491fa00f09426d3b7d3040e506878fc7664c9b9 + languageName: node + linkType: hard + +"strip-ansi-cjs@npm:strip-ansi@^6.0.1, strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": + version: 6.0.1 + resolution: "strip-ansi@npm:6.0.1" + dependencies: + ansi-regex: "npm:^5.0.1" + checksum: 10c0/1ae5f212a126fe5b167707f716942490e3933085a5ff6c008ab97ab2f272c8025d3aa218b7bd6ab25729ca20cc81cddb252102f8751e13482a5199e873680952 + languageName: node + linkType: hard + +"strip-ansi@npm:^7.0.1, strip-ansi@npm:^7.1.0": + version: 7.1.0 + resolution: "strip-ansi@npm:7.1.0" + dependencies: + ansi-regex: "npm:^6.0.1" + checksum: 10c0/a198c3762e8832505328cbf9e8c8381de14a4fa50a4f9b2160138158ea88c0f5549fb50cb13c651c3088f47e63a108b34622ec18c0499b6c8c3a5ddf6b305ac4 + languageName: node + linkType: hard + +"strip-json-comments@npm:^3.1.1": + version: 3.1.1 + resolution: "strip-json-comments@npm:3.1.1" + checksum: 10c0/9681a6257b925a7fa0f285851c0e613cc934a50661fa7bb41ca9cbbff89686bb4a0ee366e6ecedc4daafd01e83eee0720111ab294366fe7c185e935475ebcecd + languageName: node + linkType: hard + +"supports-color@npm:^7.1.0": + version: 7.2.0 + resolution: "supports-color@npm:7.2.0" + dependencies: + has-flag: "npm:^4.0.0" + checksum: 10c0/afb4c88521b8b136b5f5f95160c98dee7243dc79d5432db7efc27efb219385bbc7d9427398e43dd6cc730a0f87d5085ce1652af7efbe391327bc0a7d0f7fc124 + languageName: node + linkType: hard + +"supports-color@npm:^8.1.1": + version: 8.1.1 + resolution: "supports-color@npm:8.1.1" + dependencies: + has-flag: "npm:^4.0.0" + checksum: 10c0/ea1d3c275dd604c974670f63943ed9bd83623edc102430c05adb8efc56ba492746b6e95386e7831b872ec3807fd89dd8eb43f735195f37b5ec343e4234cc7e89 + languageName: node + linkType: hard + +"supports-preserve-symlinks-flag@npm:^1.0.0": + version: 1.0.0 + resolution: "supports-preserve-symlinks-flag@npm:1.0.0" + checksum: 10c0/6c4032340701a9950865f7ae8ef38578d8d7053f5e10518076e6554a9381fa91bd9c6850193695c141f32b21f979c985db07265a758867bac95de05f7d8aeb39 + languageName: node + linkType: hard + +"tar@npm:^7.4.3": + version: 7.4.3 + resolution: "tar@npm:7.4.3" + dependencies: + "@isaacs/fs-minipass": "npm:^4.0.0" + chownr: "npm:^3.0.0" + minipass: "npm:^7.1.2" + minizlib: "npm:^3.0.1" + mkdirp: "npm:^3.0.1" + yallist: "npm:^5.0.0" + checksum: 10c0/d4679609bb2a9b48eeaf84632b6d844128d2412b95b6de07d53d8ee8baf4ca0857c9331dfa510390a0727b550fd543d4d1a10995ad86cdf078423fbb8d99831d + languageName: node + linkType: hard + +"tinyglobby@npm:^0.2.12": + version: 0.2.13 + resolution: "tinyglobby@npm:0.2.13" + dependencies: + fdir: "npm:^6.4.4" + picomatch: "npm:^4.0.2" + checksum: 10c0/ef07dfaa7b26936601d3f6d999f7928a4d1c6234c5eb36896bb88681947c0d459b7ebe797022400e555fe4b894db06e922b95d0ce60cb05fd827a0a66326b18c + languageName: node + linkType: hard + +"to-regex-range@npm:^5.0.1": + version: 5.0.1 + resolution: "to-regex-range@npm:5.0.1" + dependencies: + is-number: "npm:^7.0.0" + checksum: 10c0/487988b0a19c654ff3e1961b87f471702e708fa8a8dd02a298ef16da7206692e8552a0250e8b3e8759270f62e9d8314616f6da274734d3b558b1fc7b7724e892 + languageName: node + linkType: hard + +"tslib@npm:^2.8.1": + version: 2.8.1 + resolution: "tslib@npm:2.8.1" + checksum: 10c0/9c4759110a19c53f992d9aae23aac5ced636e99887b51b9e61def52611732872ff7668757d4e4c61f19691e36f4da981cd9485e869b4a7408d689f6bf1f14e62 + languageName: node + linkType: hard + +"type-check@npm:^0.4.0, type-check@npm:~0.4.0": + version: 0.4.0 + resolution: "type-check@npm:0.4.0" + dependencies: + prelude-ls: "npm:^1.2.1" + checksum: 10c0/7b3fd0ed43891e2080bf0c5c504b418fbb3e5c7b9708d3d015037ba2e6323a28152ec163bcb65212741fa5d2022e3075ac3c76440dbd344c9035f818e8ecee58 + languageName: node + linkType: hard + +"typescript@npm:^5.9.2": + version: 5.9.2 + resolution: "typescript@npm:5.9.2" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 10c0/cd635d50f02d6cf98ed42de2f76289701c1ec587a363369255f01ed15aaf22be0813226bff3c53e99d971f9b540e0b3cc7583dbe05faded49b1b0bed2f638a18 + languageName: node + linkType: hard + +"typescript@patch:typescript@npm%3A^5.9.2#optional!builtin": + version: 5.9.2 + resolution: "typescript@patch:typescript@npm%3A5.9.2#optional!builtin::version=5.9.2&hash=5786d5" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 10c0/34d2a8e23eb8e0d1875072064d5e1d9c102e0bdce56a10a25c0b917b8aa9001a9cf5c225df12497e99da107dc379360bc138163c66b55b95f5b105b50578067e + languageName: node + linkType: hard + +"undici-types@npm:~6.21.0": + version: 6.21.0 + resolution: "undici-types@npm:6.21.0" + checksum: 10c0/c01ed51829b10aa72fc3ce64b747f8e74ae9b60eafa19a7b46ef624403508a54c526ffab06a14a26b3120d055e1104d7abe7c9017e83ced038ea5cf52f8d5e04 + languageName: node + linkType: hard + +"unique-filename@npm:^4.0.0": + version: 4.0.0 + resolution: "unique-filename@npm:4.0.0" + dependencies: + unique-slug: "npm:^5.0.0" + checksum: 10c0/38ae681cceb1408ea0587b6b01e29b00eee3c84baee1e41fd5c16b9ed443b80fba90c40e0ba69627e30855570a34ba8b06702d4a35035d4b5e198bf5a64c9ddc + languageName: node + linkType: hard + +"unique-slug@npm:^5.0.0": + version: 5.0.0 + resolution: "unique-slug@npm:5.0.0" + dependencies: + imurmurhash: "npm:^0.1.4" + checksum: 10c0/d324c5a44887bd7e105ce800fcf7533d43f29c48757ac410afd42975de82cc38ea2035c0483f4de82d186691bf3208ef35c644f73aa2b1b20b8e651be5afd293 + languageName: node + linkType: hard + +"uri-js@npm:^4.2.2": + version: 4.4.1 + resolution: "uri-js@npm:4.4.1" + dependencies: + punycode: "npm:^2.1.0" + checksum: 10c0/4ef57b45aa820d7ac6496e9208559986c665e49447cb072744c13b66925a362d96dd5a46c4530a6b8e203e5db5fe849369444440cb22ecfc26c679359e5dfa3c + languageName: node + linkType: hard + +"which@npm:^2.0.1": + version: 2.0.2 + resolution: "which@npm:2.0.2" + dependencies: + isexe: "npm:^2.0.0" + bin: + node-which: ./bin/node-which + checksum: 10c0/66522872a768b60c2a65a57e8ad184e5372f5b6a9ca6d5f033d4b0dc98aff63995655a7503b9c0a2598936f532120e81dd8cc155e2e92ed662a2b9377cc4374f + languageName: node + linkType: hard + +"which@npm:^5.0.0": + version: 5.0.0 + resolution: "which@npm:5.0.0" + dependencies: + isexe: "npm:^3.1.1" + bin: + node-which: bin/which.js + checksum: 10c0/e556e4cd8b7dbf5df52408c9a9dd5ac6518c8c5267c8953f5b0564073c66ed5bf9503b14d876d0e9c7844d4db9725fb0dcf45d6e911e17e26ab363dc3965ae7b + languageName: node + linkType: hard + +"word-wrap@npm:^1.2.5": + version: 1.2.5 + resolution: "word-wrap@npm:1.2.5" + checksum: 10c0/e0e4a1ca27599c92a6ca4c32260e8a92e8a44f4ef6ef93f803f8ed823f486e0889fc0b93be4db59c8d51b3064951d25e43d434e95dc8c960cc3a63d65d00ba20 + languageName: node + linkType: hard + +"workerpool@npm:^9.2.0": + version: 9.3.3 + resolution: "workerpool@npm:9.3.3" + checksum: 10c0/ef82fe9824f6edd3976e36cc1fef5d44f487fd7477f012ca06a4e18ed7522948a59834173036eb1395fe04bedff39d4ebd541f84de04e6f410de379db5847c64 + languageName: node + linkType: hard + +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0, wrap-ansi@npm:^7.0.0": + version: 7.0.0 + resolution: "wrap-ansi@npm:7.0.0" + dependencies: + ansi-styles: "npm:^4.0.0" + string-width: "npm:^4.1.0" + strip-ansi: "npm:^6.0.0" + checksum: 10c0/d15fc12c11e4cbc4044a552129ebc75ee3f57aa9c1958373a4db0292d72282f54373b536103987a4a7594db1ef6a4f10acf92978f79b98c49306a4b58c77d4da + languageName: node + linkType: hard + +"wrap-ansi@npm:^8.1.0": + version: 8.1.0 + resolution: "wrap-ansi@npm:8.1.0" + dependencies: + ansi-styles: "npm:^6.1.0" + string-width: "npm:^5.0.1" + strip-ansi: "npm:^7.0.1" + checksum: 10c0/138ff58a41d2f877eae87e3282c0630fc2789012fc1af4d6bd626eeb9a2f9a65ca92005e6e69a75c7b85a68479fe7443c7dbe1eb8fbaa681a4491364b7c55c60 + languageName: node + linkType: hard + +"wrap-ansi@npm:^9.0.0": + version: 9.0.0 + resolution: "wrap-ansi@npm:9.0.0" + dependencies: + ansi-styles: "npm:^6.2.1" + string-width: "npm:^7.0.0" + strip-ansi: "npm:^7.1.0" + checksum: 10c0/a139b818da9573677548dd463bd626a5a5286271211eb6e4e82f34a4f643191d74e6d4a9bb0a3c26ec90e6f904f679e0569674ac099ea12378a8b98e20706066 + languageName: node + linkType: hard + +"y18n@npm:^5.0.5": + version: 5.0.8 + resolution: "y18n@npm:5.0.8" + checksum: 10c0/4df2842c36e468590c3691c894bc9cdbac41f520566e76e24f59401ba7d8b4811eb1e34524d57e54bc6d864bcb66baab7ffd9ca42bf1eda596618f9162b91249 + languageName: node + linkType: hard + +"yallist@npm:^4.0.0": + version: 4.0.0 + resolution: "yallist@npm:4.0.0" + checksum: 10c0/2286b5e8dbfe22204ab66e2ef5cc9bbb1e55dfc873bbe0d568aa943eb255d131890dfd5bf243637273d31119b870f49c18fcde2c6ffbb7a7a092b870dc90625a + languageName: node + linkType: hard + +"yallist@npm:^5.0.0": + version: 5.0.0 + resolution: "yallist@npm:5.0.0" + checksum: 10c0/a499c81ce6d4a1d260d4ea0f6d49ab4da09681e32c3f0472dee16667ed69d01dae63a3b81745a24bd78476ec4fcf856114cb4896ace738e01da34b2c42235416 + languageName: node + linkType: hard + +"yaml@npm:^2.8.1": + version: 2.8.1 + resolution: "yaml@npm:2.8.1" + bin: + yaml: bin.mjs + checksum: 10c0/7c587be00d9303d2ae1566e03bc5bc7fe978ba0d9bf39cc418c3139d37929dfcb93a230d9749f2cb578b6aa5d9ebebc322415e4b653cb83acd8bc0bc321707f3 + languageName: node + linkType: hard + +"yargs-parser@npm:^21.1.1": + version: 21.1.1 + resolution: "yargs-parser@npm:21.1.1" + checksum: 10c0/f84b5e48169479d2f402239c59f084cfd1c3acc197a05c59b98bab067452e6b3ea46d4dd8ba2985ba7b3d32a343d77df0debd6b343e5dae3da2aab2cdf5886b2 + languageName: node + linkType: hard + +"yargs-unparser@npm:^2.0.0": + version: 2.0.0 + resolution: "yargs-unparser@npm:2.0.0" + dependencies: + camelcase: "npm:^6.0.0" + decamelize: "npm:^4.0.0" + flat: "npm:^5.0.2" + is-plain-obj: "npm:^2.1.0" + checksum: 10c0/a5a7d6dc157efa95122e16780c019f40ed91d4af6d2bac066db8194ed0ec5c330abb115daa5a79ff07a9b80b8ea80c925baacf354c4c12edd878c0529927ff03 + languageName: node + linkType: hard + +"yargs@npm:^17.7.2": + version: 17.7.2 + resolution: "yargs@npm:17.7.2" + dependencies: + cliui: "npm:^8.0.1" + escalade: "npm:^3.1.1" + get-caller-file: "npm:^2.0.5" + require-directory: "npm:^2.1.1" + string-width: "npm:^4.2.3" + y18n: "npm:^5.0.5" + yargs-parser: "npm:^21.1.1" + checksum: 10c0/ccd7e723e61ad5965fffbb791366db689572b80cca80e0f96aad968dfff4156cd7cd1ad18607afe1046d8241e6fb2d6c08bf7fa7bfb5eaec818735d8feac8f05 + languageName: node + linkType: hard + +"yocto-queue@npm:^0.1.0": + version: 0.1.0 + resolution: "yocto-queue@npm:0.1.0" + checksum: 10c0/dceb44c28578b31641e13695d200d34ec4ab3966a5729814d5445b194933c096b7ced71494ce53a0e8820685d1d010df8b2422e5bf2cdea7e469d97ffbea306f + languageName: node + linkType: hard