mirror of
https://github.com/idanoo/GoScrobble
synced 2025-07-01 21:52:19 +00:00
0.2.0 - Mid migration
This commit is contained in:
parent
139e6a915e
commit
7e38fdbd7d
42393 changed files with 5358157 additions and 62 deletions
207
web/node_modules/bfj/.eslintrc
generated
vendored
Normal file
207
web/node_modules/bfj/.eslintrc
generated
vendored
Normal file
|
@ -0,0 +1,207 @@
|
|||
env:
|
||||
es6: true
|
||||
node: true
|
||||
parserOptions:
|
||||
ecmaVersion: 2018
|
||||
rules:
|
||||
accessor-pairs: [ 2, { "setWithoutGet": true } ]
|
||||
array-bracket-spacing: [ 2, "always" ]
|
||||
array-callback-return: 2
|
||||
arrow-body-style: 0
|
||||
arrow-parens: 0
|
||||
arrow-spacing: [ 2, { "before": true, "after": true } ]
|
||||
block-scoped-var: 1
|
||||
block-spacing: [ 2, "always" ]
|
||||
brace-style: [ 2, "1tbs" ]
|
||||
callback-return: 0
|
||||
camelcase: [ 2, { "properties": "always" } ]
|
||||
comma-dangle: 0
|
||||
comma-spacing: [ 2, { "before": false, "after": true } ]
|
||||
computed-property-spacing: [ 2, "never" ]
|
||||
consistent-return: 0
|
||||
consistent-this: [ 2, "self" ]
|
||||
constructor-super: 0
|
||||
curly: [ 2, "all" ]
|
||||
default-case: 0
|
||||
dot-location: [ 2, "property" ]
|
||||
dot-notation: [ 2, { "allowKeywords": true } ]
|
||||
eol-last: 2
|
||||
eqeqeq: 2
|
||||
func-names: 0
|
||||
func-style: [ 2, "declaration" ]
|
||||
generator-star-spacing: [ 2, "after" ]
|
||||
global-require: 0
|
||||
guard-for-in: 2
|
||||
handle-callback-err: [ 2, "error" ]
|
||||
id-blacklist: 0
|
||||
id-length: 0
|
||||
id-match: 0
|
||||
indent: [ 2, 2, { "SwitchCase": 1 } ]
|
||||
init-declarations: 0
|
||||
jsx-quotes: 0
|
||||
key-spacing: [ 2, { "beforeColon": false, "afterColon": true } ]
|
||||
keyword-spacing: [ 2, { "before": true, "after": true } ]
|
||||
linebreak-style: [ 2, "unix" ]
|
||||
lines-around-comment: 0
|
||||
max-depth: [ 1, 3 ]
|
||||
max-nested-callbacks: [ 1, 2 ]
|
||||
max-params: [ 1, 4 ]
|
||||
max-statements: 0
|
||||
new-cap: 2
|
||||
new-parens: 2
|
||||
newline-after-var: 0
|
||||
newline-before-return: 0
|
||||
newline-per-chained-call: 1
|
||||
no-alert: 2
|
||||
no-array-constructor: 2
|
||||
no-bitwise: 1
|
||||
no-caller: 2
|
||||
no-case-declarations: 2
|
||||
no-catch-shadow: 2
|
||||
no-class-assign: 2
|
||||
no-cond-assign: [ 2, "always" ]
|
||||
no-confusing-arrow: 0
|
||||
no-console: 1
|
||||
no-constant-condition: 2
|
||||
no-const-assign: 2
|
||||
no-continue: 1
|
||||
no-control-regex: 2
|
||||
no-debugger: 2
|
||||
no-delete-var: 2
|
||||
no-div-regex: 0
|
||||
no-dupe-args: 2
|
||||
no-dupe-class-members: 2
|
||||
no-dupe-keys: 2
|
||||
no-duplicate-case: 2
|
||||
no-else-return: 2
|
||||
no-empty: 2
|
||||
no-empty-character-class: 2
|
||||
no-empty-function: 0
|
||||
no-empty-pattern: 2
|
||||
no-eq-null: 2
|
||||
no-eval: 2
|
||||
no-ex-assign: 2
|
||||
no-extend-native: 2
|
||||
no-extra-bind: 2
|
||||
no-extra-boolean-cast: 2
|
||||
no-extra-label: 2
|
||||
no-extra-parens: [ 2, "all", { "nestedBinaryExpressions": false } ]
|
||||
no-extra-semi: 2
|
||||
no-fallthrough: 0
|
||||
no-floating-decimal: 2
|
||||
no-func-assign: 2
|
||||
no-implicit-coercion: 0
|
||||
no-implicit-globals: 0
|
||||
no-implied-eval: 2
|
||||
no-inline-comments: 2
|
||||
no-inner-declarations: [ 2, "both" ]
|
||||
no-invalid-regexp: 2
|
||||
no-invalid-this: 1
|
||||
no-irregular-whitespace: 2
|
||||
no-iterator: 2
|
||||
no-labels: 2
|
||||
no-label-var: 2
|
||||
no-lonely-if: 2
|
||||
no-lone-blocks: 1
|
||||
no-loop-func: 2
|
||||
no-magic-numbers: 0
|
||||
no-mixed-requires: 0
|
||||
no-mixed-spaces-and-tabs: 2
|
||||
no-multiple-empty-lines: [ 2, { "max": 1, "maxEOF": 1, "maxBOF": 0 } ]
|
||||
no-multi-spaces: 2
|
||||
no-multi-str: 2
|
||||
no-native-reassign: 2
|
||||
no-negated-condition: 2
|
||||
no-negated-in-lhs: 2
|
||||
no-nested-ternary: 2
|
||||
no-new: 1
|
||||
no-new-func: 2
|
||||
no-new-object: 2
|
||||
no-new-require: 2
|
||||
no-new-symbol: 2
|
||||
no-new-wrappers: 2
|
||||
no-obj-calls: 2
|
||||
no-octal: 2
|
||||
no-octal-escape: 2
|
||||
no-param-reassign: 0
|
||||
no-path-concat: 2
|
||||
no-plusplus: 0
|
||||
no-process-env: 0
|
||||
no-process-exit: 1
|
||||
no-proto: 2
|
||||
no-redeclare: [ 2, { "builtinGlobals": true } ]
|
||||
no-regex-spaces: 2
|
||||
no-restricted-globals: 2
|
||||
no-restricted-imports: 0
|
||||
no-restricted-modules: 0
|
||||
no-restricted-syntax: 0
|
||||
no-return-assign: 0
|
||||
no-script-url: 2
|
||||
no-self-assign: 2
|
||||
no-self-compare: 2
|
||||
no-sequences: 2
|
||||
no-shadow: [ 2, { "hoist": "all" } ]
|
||||
no-shadow-restricted-names: 2
|
||||
no-spaced-func: 2
|
||||
no-sparse-arrays: 2
|
||||
no-sync: 0
|
||||
no-ternary: 1
|
||||
no-this-before-super: 2
|
||||
no-throw-literal: 1
|
||||
no-trailing-spaces: 2
|
||||
no-undef: 2
|
||||
no-undef-init: 2
|
||||
no-undefined: 2
|
||||
no-underscore-dangle: 2
|
||||
no-unexpected-multiline: 2
|
||||
no-unmodified-loop-condition: 2
|
||||
no-unneeded-ternary: 2
|
||||
no-unreachable: 2
|
||||
no-unused-expressions: 2
|
||||
no-unused-labels: 2
|
||||
no-unused-vars: [ 2, "all" ]
|
||||
no-useless-call: 2
|
||||
no-useless-concat: 2
|
||||
no-useless-constructor: 2
|
||||
no-use-before-define: 0
|
||||
no-var: 1
|
||||
no-void: 2
|
||||
no-warning-comments: 1
|
||||
no-whitespace-before-property: 2
|
||||
no-with: 2
|
||||
object-curly-spacing: [ 2, "always" ]
|
||||
object-shorthand: [ 2, "always" ]
|
||||
one-var: 0
|
||||
one-var-declaration-per-line: 0
|
||||
operator-assignment: [ 2, "always" ]
|
||||
operator-linebreak: [ 2, "after" ]
|
||||
padded-blocks: [ 2, "never" ]
|
||||
prefer-arrow-callback: 2
|
||||
prefer-const: 2
|
||||
prefer-reflect: 0
|
||||
prefer-rest-params: 2
|
||||
prefer-spread: 2
|
||||
prefer-template: 2
|
||||
quote-props: [ 2, "as-needed" ]
|
||||
radix: 0
|
||||
require-jsdoc: 0
|
||||
require-yield: 2
|
||||
semi: [ 2, "never" ]
|
||||
semi-spacing: 0
|
||||
sort-imports: 0
|
||||
sort-vars: 0
|
||||
space-before-blocks: [ 2, "always" ]
|
||||
space-before-function-paren: [ 2, { "anonymous": "always", "named": "always" } ]
|
||||
space-infix-ops: 2
|
||||
space-unary-ops: 0
|
||||
spaced-comment: [ 1, "always" ]
|
||||
strict: [ 2, "global" ]
|
||||
template-curly-spacing: [ 2, "never" ]
|
||||
use-isnan: 2
|
||||
valid-jsdoc: 0
|
||||
valid-typeof: 2
|
||||
vars-on-top: 0
|
||||
wrap-iife: 2
|
||||
wrap-regex: 0
|
||||
yield-star-spacing: [ 2, { "before": false, "after": true } ]
|
||||
yoda: [ 2, "never" ]
|
25
web/node_modules/bfj/.gitlab-ci.yml
generated
vendored
Normal file
25
web/node_modules/bfj/.gitlab-ci.yml
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
cache:
|
||||
key: "$CI_JOB_NAME"
|
||||
untracked: true
|
||||
paths:
|
||||
- node_modules/
|
||||
|
||||
before_script:
|
||||
- npm install
|
||||
|
||||
.test_template: &npm_test
|
||||
script:
|
||||
- npm run lint
|
||||
- npm test
|
||||
|
||||
test:node8:
|
||||
image: node:8
|
||||
<<: *npm_test
|
||||
|
||||
test:node10:
|
||||
image: node:10
|
||||
<<: *npm_test
|
||||
|
||||
test:node12:
|
||||
image: node:12
|
||||
<<: *npm_test
|
4
web/node_modules/bfj/AUTHORS
generated
vendored
Normal file
4
web/node_modules/bfj/AUTHORS
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
Phil Booth <pmbooth@gmail.com> (https://philbooth.me/)
|
||||
Rowan Manning (https://github.com/rowanmanning)
|
||||
Benedikt Rötsch (https://github.com/axe312ger)
|
||||
|
37
web/node_modules/bfj/CONTRIBUTING.md
generated
vendored
Normal file
37
web/node_modules/bfj/CONTRIBUTING.md
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
# Contribution guidelines
|
||||
|
||||
* Install/update the dependencies:
|
||||
```
|
||||
npm install
|
||||
```
|
||||
|
||||
* Make sure the code lints:
|
||||
```
|
||||
npm run lint
|
||||
```
|
||||
|
||||
* Make sure all of the tests pass:
|
||||
```
|
||||
npm test
|
||||
```
|
||||
|
||||
* Adhere to the coding conventions
|
||||
that are used elsewhere in the codebase.
|
||||
|
||||
* New code must have new unit tests.
|
||||
|
||||
* New features
|
||||
or changes to existing features
|
||||
must be documented in the [readme] file.
|
||||
|
||||
* Add yourself to the [authors] file.
|
||||
|
||||
* Feel free to [open an issue][newissue] first,
|
||||
if the change is one that you think
|
||||
needs some discussion.
|
||||
|
||||
[readme]: https://gitlab.com/philbooth/bfj/blob/master/README.md
|
||||
[authors]: https://gitlab.com/philbooth/bfj/blob/master/AUTHORS
|
||||
[newissue]: https://gitlab.com/philbooth/bfj/issues/new
|
||||
[issues]: https://gitlab.com/philbooth/bfj/issues
|
||||
|
20
web/node_modules/bfj/COPYING
generated
vendored
Normal file
20
web/node_modules/bfj/COPYING
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
Copyright © 2015, 2016 Phil Booth
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
295
web/node_modules/bfj/HISTORY.md
generated
vendored
Normal file
295
web/node_modules/bfj/HISTORY.md
generated
vendored
Normal file
|
@ -0,0 +1,295 @@
|
|||
# History
|
||||
|
||||
## 7.0.2
|
||||
|
||||
### Other changes
|
||||
|
||||
* package: update dependencies (cf82fd4)
|
||||
|
||||
## 7.0.1
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* match: prevent mismatched properties when minDepth is set (4efaf1a)
|
||||
|
||||
## 7.0.0
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* package: limit to node 8+ (cc42139)
|
||||
|
||||
### New features
|
||||
|
||||
* match: support minDepth option to improve performance (6d79fe4)
|
||||
|
||||
### Refactorings
|
||||
|
||||
* code: prefer spread syntax to Object.assign (5544086)
|
||||
* code: prefer Object.entries to iterate keys and values (d101317)
|
||||
|
||||
### Other changes
|
||||
|
||||
* package: test in node 12 (9bf4e6b)
|
||||
* git: ignore test/*.json (e87eaf7)
|
||||
|
||||
## 6.1.2
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* eventify: escape object keys (910ad08)
|
||||
|
||||
### Other changes
|
||||
|
||||
* package: update deps (aafb4ff)
|
||||
|
||||
## 6.1.1
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* eventify: don't serialise NaN or infinities (3c50fe4)
|
||||
|
||||
### Other changes
|
||||
|
||||
* deps: npm update (b3c86d0)
|
||||
* project: add package lock file (63df27d)
|
||||
* project: migrate to gitlab (26746a0)
|
||||
|
||||
## 6.1.0
|
||||
|
||||
### New features
|
||||
|
||||
* match: pass a depth argument to selector predicates (af15939)
|
||||
|
||||
### Other changes
|
||||
|
||||
* tests: delete unused var (f10902a)
|
||||
* ci: reinstate tests in node 9 (7cd2594)
|
||||
* ci: temporarily disable tests in node 9 (e27ccd0)
|
||||
|
||||
## 6.0.0
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* eventify: distinguish between syntax and operational errors (e7bc23d)
|
||||
* walk: distinguish between syntax and operational errors (419ddae)
|
||||
|
||||
### New features
|
||||
|
||||
* streams: expose a highWaterMark option (626f755)
|
||||
* match: implement a streaming match api (e2e320d)
|
||||
|
||||
### Other changes
|
||||
|
||||
* docs: note the end of node-4 maintenance (0a32090)
|
||||
|
||||
## 5.3.1
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* unpipe: prohibit unpipe from setting the ndjson option (90b61c6)
|
||||
|
||||
## 5.3.0
|
||||
|
||||
### New features
|
||||
|
||||
* walk: add support for NDJSON streams (e87672a)
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* docs: document the pause and resume functions on event emitters (bfdf152)
|
||||
|
||||
### Other changes
|
||||
|
||||
* lint: silence warning (761bad4)
|
||||
* package: update dev dependencies (396cc40)
|
||||
* docs: link to bfj-collections (11eacb8)
|
||||
|
||||
## 5.2.1
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* walk: handle stream errors sanely (9fe21ff)
|
||||
|
||||
### Other changes
|
||||
|
||||
* deps: update dev dependencies (c1d0518)
|
||||
* ci: run tests in node 9 (222356e)
|
||||
* deps: update dev dependencies (be54dbf)
|
||||
|
||||
## 5.2.0
|
||||
|
||||
* fix: extra paragraph about why bfj is slow (e51ca34)
|
||||
* fix: expand possible causes of the error event (8d1d352)
|
||||
* feature: add a pause method to the walk emitter (a4cd0e0)
|
||||
|
||||
## 5.1.1
|
||||
|
||||
* fix: replace old mockery link with proxyquire (e6b3924)
|
||||
* chore: delete redundant teardowns (52040a6)
|
||||
* fix: catch errors from user code (b8103e4)
|
||||
|
||||
## 5.1.0
|
||||
|
||||
* chore: remove extra trailing newlines (fa561e2)
|
||||
* feature: allow callers to pass in the Promise constructor (aa5a664)
|
||||
* refactor: better names for the option-mangling functions (5eb2e4e)
|
||||
|
||||
## 5.0.0
|
||||
|
||||
* fix: ditch mockery in favour of proxyquire (01a9177)
|
||||
* breaking change: return bluebird promises instead of native (c80fe0f)
|
||||
* fix: clear the yield counter when unpausing (9d5c95d)
|
||||
* chore: reduce the buffer length (9abd435)
|
||||
|
||||
## 4.2.4
|
||||
|
||||
* chore: update deps (c3eeeb4)
|
||||
|
||||
## 4.2.3
|
||||
|
||||
* fix: eliminate costly string concatenation (42998d7)
|
||||
* fix: micro-optimise eventify::proceed::after (98a2519)
|
||||
* fix: micro-optimise walk::character (8d1c4cf)
|
||||
|
||||
## 4.2.2
|
||||
|
||||
* fix: silence obnoxious unhandled rejection warnings (1d4a902)
|
||||
|
||||
## 4.2.1
|
||||
|
||||
* refactor: discard chunks more aggressively (970a964)
|
||||
|
||||
## 4.2.0
|
||||
|
||||
* chore: add a unit test for parallel object references (e8f3895)
|
||||
* chore: update check-types (c0bc551)
|
||||
* fix: shortcut primitive coercion (c6381b5)
|
||||
* fix: shortcut coercions (d9a9676)
|
||||
* fix: eliminate unnecessary indirection in promise coercion (c63e81f)
|
||||
* fix: yield rather than grow when buffer is filled (a3cc7e6)
|
||||
* feature: add a bufferLength option (3b560f9)
|
||||
* fix: document improved performance from disabling coercions (25eecc7)
|
||||
* fix: fix lint errors (a85f7c0)
|
||||
|
||||
## 4.1.1
|
||||
|
||||
* fix: fix links in readme (90d7a0b)
|
||||
* fix: pop references on exiting collections (c13eaf4)
|
||||
* fix: eliminate sequential reference look-up (d622893)
|
||||
* chore: add a couple of sentences on speed (ae8994d)
|
||||
|
||||
## 4.1.0
|
||||
|
||||
* fix: update node support in the readme (61c41f4)
|
||||
* fix: reject if fs.createReadStream throws (4840938)
|
||||
* fix: test on node 8 (371807b)
|
||||
* feature: add a yieldRate option to the parsing functions (35bd20b)
|
||||
|
||||
## 4.0.1
|
||||
|
||||
* fix: set minimum required node version (db58b47)
|
||||
|
||||
## 4.0.0
|
||||
|
||||
* breaking change: revert to strings from circular arrays in walk (ccda677)
|
||||
* feature: add yieldRate option to control events per tick (419247b)
|
||||
* chore: increase the default discard threshold (751aa6c)
|
||||
|
||||
## 3.1.4
|
||||
|
||||
* fix: add options to example code (5c207dd)
|
||||
* chore: update authors (cdf2b7d)
|
||||
* chore: bump up the default array size to 4mb (4a2fe55)
|
||||
* fix: fix stupid memory consumption bug (d2b6fe2)
|
||||
|
||||
## 3.1.3
|
||||
|
||||
* fix: eliminate needless per-character chunking in streamify (a7fcc2f)
|
||||
|
||||
## 3.1.2
|
||||
|
||||
* fix: eliminate duplicated readme section (283b3ce)
|
||||
|
||||
## 3.1.1
|
||||
|
||||
* fix: document the dropped support for node 4 (6120c9e)
|
||||
|
||||
## 3.1.0
|
||||
|
||||
* chore: tweak the readme (040e9be)
|
||||
* chore: swap out bespoke circular array for hoopy (0ed7986)
|
||||
* feature: used fixed-length circular array in streamify (e773a94)
|
||||
* fix: eliminate mockery allowed module warning (b1dc7db)
|
||||
* chore: fix lint errors (abde4de)
|
||||
|
||||
## 3.0.0
|
||||
|
||||
* chore: delete left-over debugging code (b903a27)
|
||||
* chore: run tests on node 7 (acbb808)
|
||||
* chore: remove old linter config (62c18ce)
|
||||
* chore: update dependencies (882c74c)
|
||||
* chore: add an integration test that parses a request (029afdb)
|
||||
* chore: fix the broken perf test (8ac0e03)
|
||||
* chore: add a crude memory-profiling script (1ee6f36)
|
||||
* breaking change: preallocate memory to avoid out-of-memory conditions (18da753)
|
||||
* feature: implement unpipe (f8a41d2)
|
||||
|
||||
## 2.1.2
|
||||
|
||||
* Fix lint errors.
|
||||
|
||||
## 2.1.1
|
||||
|
||||
* Fix "unhandled rejection" warnings.
|
||||
|
||||
## 2.1.0
|
||||
|
||||
* Stop throwing errors from promise-returning methods.
|
||||
|
||||
## 2.0.0
|
||||
|
||||
* Honour `toJSON` on all objects.
|
||||
* Drop support for Node.js 0.12, switch to ES6.
|
||||
* Tidy the readme.
|
||||
|
||||
## 1.2.2
|
||||
|
||||
* Sanely escape strings when serialising (thanks [@rowanmanning](https://github.com/rowanmanning)).
|
||||
|
||||
## 1.2.1
|
||||
|
||||
* Sanely handle `undefined`, functions and symbols.
|
||||
|
||||
## 1.2.0
|
||||
|
||||
* Sanely handle circular references in the data when serialising.
|
||||
|
||||
## 1.1.0
|
||||
|
||||
* Pass `options` to `fs.createReadStream` inside `read`.
|
||||
* Fix truncated output bug in `write`.
|
||||
|
||||
## 1.0.0
|
||||
|
||||
* Breaking changes:
|
||||
* Take `Readable` parameter in `walk`.
|
||||
* Return `EventEmitter` from `walk`.
|
||||
* Return `Promise` from `write`.
|
||||
* Fix stream termination bug in `streamify`.
|
||||
* Fix missing comma after empty objects and arrays in `streamify`.
|
||||
* Improve tests.
|
||||
* Add `reviver` option for `parse` and `read`.
|
||||
* Add `space` option for `streamify`, `stringify` and `write`.
|
||||
* Remove the `debug` option from all functions.
|
||||
|
||||
## 0.2.0
|
||||
|
||||
* Implement `eventify`.
|
||||
* Implement `streamify`.
|
||||
* Implement `stringify`.
|
||||
* Implement `write`.
|
||||
|
||||
## 0.1.0
|
||||
|
||||
* Initial release.
|
||||
|
971
web/node_modules/bfj/README.md
generated
vendored
Normal file
971
web/node_modules/bfj/README.md
generated
vendored
Normal file
|
@ -0,0 +1,971 @@
|
|||
# BFJ
|
||||
|
||||
[](https://gitlab.com/philbooth/bfj/pipelines)
|
||||
[](https://www.npmjs.com/package/bfj)
|
||||
[](https://www.npmjs.com/package/bfj)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
|
||||
Big-Friendly JSON. Asynchronous streaming functions for large JSON data sets.
|
||||
|
||||
* [Why would I want those?](#why-would-i-want-those)
|
||||
* [Is it fast?](#is-it-fast)
|
||||
* [What functions does it implement?](#what-functions-does-it-implement)
|
||||
* [How do I install it?](#how-do-i-install-it)
|
||||
* [How do I read a JSON file?](#how-do-i-read-a-json-file)
|
||||
* [How do I parse a stream of JSON?](#how-do-i-parse-a-stream-of-json)
|
||||
* [How do I selectively parse individual items from a JSON stream?](#how-do-i-selectively-parse-individual-items-from-a-json-stream)
|
||||
* [How do I write a JSON file?](#how-do-i-write-a-json-file)
|
||||
* [How do I create a stream of JSON?](#how-do-i-create-a-stream-of-json)
|
||||
* [How do I create a JSON string?](#how-do-i-create-a-json-string)
|
||||
* [What other methods are there?](#what-other-methods-are-there)
|
||||
* [bfj.walk (stream, options)](#bfjwalk-stream-options)
|
||||
* [bfj.eventify (data, options)](#bfjeventify-data-options)
|
||||
* [What options can I specify?](#what-options-can-i-specify)
|
||||
* [Options for parsing functions](#options-for-parsing-functions)
|
||||
* [Options for serialisation functions](#options-for-serialisation-functions)
|
||||
* [Is it possible to pause parsing or serialisation from calling code?](#is-it-possible-to-pause-parsing-or-serialisation-from-calling-code)
|
||||
* [Can it handle newline-delimited JSON (NDJSON)?](#can-it-handle-newline-delimited-json-ndjson)
|
||||
* [Why does it default to bluebird promises?](#why-does-it-default-to-bluebird-promises)
|
||||
* [Can I specify a different promise implementation?](#can-i-specify-a-different-promise-implementation)
|
||||
* [Is there a change log?](#is-there-a-change-log)
|
||||
* [How do I set up the dev environment?](#how-do-i-set-up-the-dev-environment)
|
||||
* [What versions of Node.js does it support?](#what-versions-of-nodejs-does-it-support)
|
||||
* [What license is it released under?](#what-license-is-it-released-under)
|
||||
|
||||
## Why would I want those?
|
||||
|
||||
If you need
|
||||
to parse huge JSON strings
|
||||
or stringify huge JavaScript data sets,
|
||||
it monopolises the event loop
|
||||
and can lead to out-of-memory exceptions.
|
||||
BFJ implements asynchronous functions
|
||||
and uses pre-allocated fixed-length arrays
|
||||
to try and alleviate those issues.
|
||||
|
||||
## Is it fast?
|
||||
|
||||
No.
|
||||
|
||||
BFJ yields frequently
|
||||
to avoid monopolising the event loop,
|
||||
interrupting its own execution
|
||||
to let other event handlers run.
|
||||
The frequency of those yields
|
||||
can be controlled with the [`yieldRate` option](#what-options-can-i-specify),
|
||||
but fundamentally it is not designed for speed.
|
||||
|
||||
Furthermore,
|
||||
when serialising data to a stream,
|
||||
BFJ uses a fixed-length buffer
|
||||
to avoid exhausting available memory.
|
||||
Whenever that buffer is full,
|
||||
serialisation is paused
|
||||
until the receiving stream processes some more data,
|
||||
regardless of the value of `yieldRate`.
|
||||
You can control the size of the buffer
|
||||
using the [`bufferLength` option](#options-for-serialisation-functions)
|
||||
but really,
|
||||
if you need quick results,
|
||||
BFJ is not for you.
|
||||
|
||||
## What functions does it implement?
|
||||
|
||||
Nine functions
|
||||
are exported.
|
||||
|
||||
Five are
|
||||
concerned with
|
||||
parsing, or
|
||||
turning JSON strings
|
||||
into JavaScript data:
|
||||
|
||||
* [`read`](#how-do-i-read-a-json-file)
|
||||
asynchronously parses
|
||||
a JSON file from disk.
|
||||
|
||||
* [`parse` and `unpipe`](#how-do-i-parse-a-stream-of-json)
|
||||
are for asynchronously parsing
|
||||
streams of JSON.
|
||||
|
||||
* [`match`](#how-do-i-selectively-parse-individual-items-from-a-json-stream)
|
||||
selectively parses individual items
|
||||
from a JSON stream.
|
||||
|
||||
* [`walk`](#bfjwalk-stream-options)
|
||||
asynchronously walks
|
||||
a stream,
|
||||
emitting events
|
||||
as it encounters
|
||||
JSON tokens.
|
||||
Analagous to a
|
||||
[SAX parser][sax].
|
||||
|
||||
The other four functions
|
||||
handle the reverse transformations,
|
||||
serialising
|
||||
JavaScript data
|
||||
to JSON:
|
||||
|
||||
* [`write`](#how-do-i-write-a-json-file)
|
||||
asynchronously serialises data
|
||||
to a JSON file on disk.
|
||||
|
||||
* [`streamify`](#how-do-i-create-a-stream-of-json)
|
||||
asynchronously serialises data
|
||||
to a stream of JSON.
|
||||
|
||||
* [`stringify`](#how-do-i-create-a-json-string)
|
||||
asynchronously serialises data
|
||||
to a JSON string.
|
||||
|
||||
* [`eventify`](#bfjeventify-data-options)
|
||||
asynchronously traverses
|
||||
a data structure
|
||||
depth-first,
|
||||
emitting events
|
||||
as it encounters items.
|
||||
By default
|
||||
it coerces
|
||||
promises, buffers and iterables
|
||||
to JSON-friendly values.
|
||||
|
||||
## How do I install it?
|
||||
|
||||
If you're using npm:
|
||||
|
||||
```
|
||||
npm i bfj --save
|
||||
```
|
||||
|
||||
Or if you just want
|
||||
the git repo:
|
||||
|
||||
```
|
||||
git clone git@gitlab.com:philbooth/bfj.git
|
||||
```
|
||||
|
||||
## How do I read a JSON file?
|
||||
|
||||
```js
|
||||
const bfj = require('bfj');
|
||||
|
||||
bfj.read(path, options)
|
||||
.then(data => {
|
||||
// :)
|
||||
})
|
||||
.catch(error => {
|
||||
// :(
|
||||
});
|
||||
```
|
||||
|
||||
`read` returns a [bluebird promise][promise] and
|
||||
asynchronously parses
|
||||
a JSON file
|
||||
from disk.
|
||||
|
||||
It takes two arguments;
|
||||
the path to the JSON file
|
||||
and an [options](#options-for-parsing-functions) object.
|
||||
|
||||
If there are
|
||||
no syntax errors,
|
||||
the returned promise is resolved
|
||||
with the parsed data.
|
||||
If syntax errors occur,
|
||||
the promise is rejected
|
||||
with the first error.
|
||||
|
||||
## How do I parse a stream of JSON?
|
||||
|
||||
```js
|
||||
const bfj = require('bfj');
|
||||
|
||||
// By passing a readable stream to bfj.parse():
|
||||
bfj.parse(fs.createReadStream(path), options)
|
||||
.then(data => {
|
||||
// :)
|
||||
})
|
||||
.catch(error => {
|
||||
// :(
|
||||
});
|
||||
|
||||
// ...or by passing the result from bfj.unpipe() to stream.pipe():
|
||||
request({ url }).pipe(bfj.unpipe((error, data) => {
|
||||
if (error) {
|
||||
// :(
|
||||
} else {
|
||||
// :)
|
||||
}
|
||||
}))
|
||||
```
|
||||
|
||||
* `parse` returns a [bluebird promise][promise]
|
||||
and asynchronously parses
|
||||
a stream of JSON data.
|
||||
|
||||
It takes two arguments;
|
||||
a [readable stream][readable]
|
||||
from which
|
||||
the JSON
|
||||
will be parsed
|
||||
and an [options](#options-for-parsing-functions) object.
|
||||
|
||||
If there are
|
||||
no syntax errors,
|
||||
the returned promise is resolved
|
||||
with the parsed data.
|
||||
If syntax errors occur,
|
||||
the promise is rejected
|
||||
with the first error.
|
||||
|
||||
* `unpipe` returns a [writable stream][writable]
|
||||
that can be passed to [`stream.pipe`][pipe],
|
||||
then parses JSON data
|
||||
read from the stream.
|
||||
|
||||
It takes two arguments;
|
||||
a callback function
|
||||
that will be called
|
||||
after parsing is complete
|
||||
and an [options](#options-for-parsing-functions) object.
|
||||
|
||||
If there are no errors,
|
||||
the callback is invoked
|
||||
with the result as the second argument.
|
||||
If errors occur,
|
||||
the first error is passed
|
||||
the callback
|
||||
as the first argument.
|
||||
|
||||
## How do I selectively parse individual items from a JSON stream?
|
||||
|
||||
```js
|
||||
const bfj = require('bfj');
|
||||
|
||||
// Call match with your stream and a selector predicate/regex/string
|
||||
const dataStream = bfj.match(jsonStream, selector, options);
|
||||
|
||||
// Get data out of the returned stream with event handlers
|
||||
dataStream.on('data', item => { /* ... */ });
|
||||
dataStream.on('end', () => { /* ... */);
|
||||
dataStream.on('error', () => { /* ... */);
|
||||
dataStream.on('dataError', () => { /* ... */);
|
||||
|
||||
// ...or you can pipe it to another stream
|
||||
dataStream.pipe(someOtherStream);
|
||||
```
|
||||
|
||||
`match` returns a readable, object-mode stream
|
||||
and asynchronously parses individual matching items
|
||||
from an input JSON stream.
|
||||
|
||||
It takes three arguments:
|
||||
a [readable stream][readable]
|
||||
from which the JSON will be parsed;
|
||||
a selector argument for determining matches,
|
||||
which may be a string, a regular expression or a predicate function;
|
||||
and an [options](#options-for-parsing-functions) object.
|
||||
|
||||
If the selector is a string,
|
||||
it will be compared to property keys
|
||||
to determine whether
|
||||
each item in the data is a match.
|
||||
If it is a regular expression,
|
||||
the comparison will be made
|
||||
by calling the [RegExp `test` method][regexp-test]
|
||||
with the property key.
|
||||
Predicate functions will be called with three arguments:
|
||||
`key`, `value` and `depth`.
|
||||
If the result of the predicate is a truthy value
|
||||
then the item will be deemed a match.
|
||||
|
||||
In addition to the regular options
|
||||
accepted by other parsing functions,
|
||||
you can also specify `minDepth`
|
||||
to only apply the selector
|
||||
to certain depths.
|
||||
This can improve performance
|
||||
and memory usage,
|
||||
if you know that
|
||||
you're not interested in
|
||||
parsing top-level items.
|
||||
|
||||
If there are any syntax errors in the JSON,
|
||||
a `dataError` event will be emitted.
|
||||
If any other errors occur,
|
||||
an `error` event will be emitted.
|
||||
|
||||
## How do I write a JSON file?
|
||||
|
||||
```js
|
||||
const bfj = require('bfj');
|
||||
|
||||
bfj.write(path, data, options)
|
||||
.then(() => {
|
||||
// :)
|
||||
})
|
||||
.catch(error => {
|
||||
// :(
|
||||
});
|
||||
```
|
||||
|
||||
`write` returns a [bluebird promise][promise]
|
||||
and asynchronously serialises a data structure
|
||||
to a JSON file on disk.
|
||||
The promise is resolved
|
||||
when the file has been written,
|
||||
or rejected with the error
|
||||
if writing failed.
|
||||
|
||||
It takes three arguments;
|
||||
the path to the JSON file,
|
||||
the data structure to serialise
|
||||
and an [options](#options-for-serialisation-functions) object.
|
||||
|
||||
## How do I create a stream of JSON?
|
||||
|
||||
```js
|
||||
const bfj = require('bfj');
|
||||
|
||||
const stream = bfj.streamify(data, options);
|
||||
|
||||
// Get data out of the stream with event handlers
|
||||
stream.on('data', chunk => { /* ... */ });
|
||||
stream.on('end', () => { /* ... */);
|
||||
stream.on('error', () => { /* ... */);
|
||||
stream.on('dataError', () => { /* ... */);
|
||||
|
||||
// ...or you can pipe it to another stream
|
||||
stream.pipe(someOtherStream);
|
||||
```
|
||||
|
||||
`streamify` returns a [readable stream][readable]
|
||||
and asynchronously serialises
|
||||
a data structure to JSON,
|
||||
pushing the result
|
||||
to the returned stream.
|
||||
|
||||
It takes two arguments;
|
||||
the data structure to serialise
|
||||
and an [options](#options-for-serialisation-functions) object.
|
||||
|
||||
If there a circular reference is encountered in the data
|
||||
and `options.circular` is not set to `'ignore'`,
|
||||
a `dataError` event will be emitted.
|
||||
If any other errors occur,
|
||||
an `error` event will be emitted.
|
||||
|
||||
## How do I create a JSON string?
|
||||
|
||||
```js
|
||||
const bfj = require('bfj');
|
||||
|
||||
bfj.stringify(data, options)
|
||||
.then(json => {
|
||||
// :)
|
||||
})
|
||||
.catch(error => {
|
||||
// :(
|
||||
});
|
||||
```
|
||||
|
||||
`stringify` returns a [bluebird promise][promise] and
|
||||
asynchronously serialises a data structure
|
||||
to a JSON string.
|
||||
The promise is resolved
|
||||
to the JSON string
|
||||
when serialisation is complete.
|
||||
|
||||
It takes two arguments;
|
||||
the data structure to serialise
|
||||
and an [options](#options-for-serialisation-functions) object.
|
||||
|
||||
## What other methods are there?
|
||||
|
||||
### bfj.walk (stream, options)
|
||||
|
||||
```js
|
||||
const bfj = require('bfj');
|
||||
|
||||
const emitter = bfj.walk(fs.createReadStream(path), options);
|
||||
|
||||
emitter.on(bfj.events.array, () => { /* ... */ });
|
||||
emitter.on(bfj.events.object, () => { /* ... */ });
|
||||
emitter.on(bfj.events.property, name => { /* ... */ });
|
||||
emitter.on(bfj.events.string, value => { /* ... */ });
|
||||
emitter.on(bfj.events.number, value => { /* ... */ });
|
||||
emitter.on(bfj.events.literal, value => { /* ... */ });
|
||||
emitter.on(bfj.events.endArray, () => { /* ... */ });
|
||||
emitter.on(bfj.events.endObject, () => { /* ... */ });
|
||||
emitter.on(bfj.events.error, error => { /* ... */ });
|
||||
emitter.on(bfj.events.dataError, error => { /* ... */ });
|
||||
emitter.on(bfj.events.end, () => { /* ... */ });
|
||||
```
|
||||
|
||||
`walk` returns an [event emitter][eventemitter]
|
||||
and asynchronously walks
|
||||
a stream of JSON data,
|
||||
emitting events
|
||||
as it encounters
|
||||
tokens.
|
||||
|
||||
It takes two arguments;
|
||||
a [readable stream][readable]
|
||||
from which
|
||||
the JSON
|
||||
will be read
|
||||
and an [options](#options-for-parsing-functions) object.
|
||||
|
||||
The emitted events
|
||||
are defined
|
||||
as public properties
|
||||
of an object,
|
||||
`bfj.events`:
|
||||
|
||||
* `bfj.events.array`
|
||||
indicates that
|
||||
an array context
|
||||
has been entered
|
||||
by encountering
|
||||
the `[` character.
|
||||
|
||||
* `bfj.events.endArray`
|
||||
indicates that
|
||||
an array context
|
||||
has been left
|
||||
by encountering
|
||||
the `]` character.
|
||||
|
||||
* `bfj.events.object`
|
||||
indicates that
|
||||
an object context
|
||||
has been entered
|
||||
by encountering
|
||||
the `{` character.
|
||||
|
||||
* `bfj.events.endObject`
|
||||
indicates that
|
||||
an object context
|
||||
has been left
|
||||
by encountering
|
||||
the `}` character.
|
||||
|
||||
* `bfj.events.property`
|
||||
indicates that
|
||||
a property
|
||||
has been encountered
|
||||
in an object.
|
||||
The listener
|
||||
will be passed
|
||||
the name of the property
|
||||
as its argument
|
||||
and the next event
|
||||
to be emitted
|
||||
will represent
|
||||
the property's value.
|
||||
|
||||
* `bfj.events.string`
|
||||
indicates that
|
||||
a string
|
||||
has been encountered.
|
||||
The listener
|
||||
will be passed
|
||||
the value
|
||||
as its argument.
|
||||
|
||||
* `bfj.events.number`
|
||||
indicates that
|
||||
a number
|
||||
has been encountered.
|
||||
The listener
|
||||
will be passed
|
||||
the value
|
||||
as its argument.
|
||||
|
||||
* `bfj.events.literal`
|
||||
indicates that
|
||||
a JSON literal
|
||||
(either `true`, `false` or `null`)
|
||||
has been encountered.
|
||||
The listener
|
||||
will be passed
|
||||
the value
|
||||
as its argument.
|
||||
|
||||
* `bfj.events.error`
|
||||
indicates that
|
||||
an error was caught
|
||||
from one of the event handlers
|
||||
in user code.
|
||||
The listener
|
||||
will be passed
|
||||
the `Error` instance
|
||||
as its argument.
|
||||
|
||||
* `bfj.events.dataError`
|
||||
indicates that
|
||||
a syntax error was encountered
|
||||
in the incoming JSON stream.
|
||||
The listener
|
||||
will be passed
|
||||
an `Error` instance
|
||||
decorated with `actual`, `expected`, `lineNumber` and `columnNumber` properties
|
||||
as its argument.
|
||||
|
||||
* `bfj.events.end`
|
||||
indicates that
|
||||
the end of the input
|
||||
has been reached
|
||||
and the stream is closed.
|
||||
|
||||
* `bfj.events.endLine`
|
||||
indicates that a root-level newline character
|
||||
has been encountered in an [NDJSON](#can-it-handle-newline-delimited-json-ndjson) stream.
|
||||
Only emitted if the `ndjson` [option](#options-for-parsing-functions) is set.
|
||||
|
||||
If you are using `bfj.walk`
|
||||
to sequentially parse items in an array,
|
||||
you might also be interested in
|
||||
the [bfj-collections] module.
|
||||
|
||||
### bfj.eventify (data, options)
|
||||
|
||||
```js
|
||||
const bfj = require('bfj');
|
||||
|
||||
const emitter = bfj.eventify(data, options);
|
||||
|
||||
emitter.on(bfj.events.array, () => { /* ... */ });
|
||||
emitter.on(bfj.events.object, () => { /* ... */ });
|
||||
emitter.on(bfj.events.property, name => { /* ... */ });
|
||||
emitter.on(bfj.events.string, value => { /* ... */ });
|
||||
emitter.on(bfj.events.number, value => { /* ... */ });
|
||||
emitter.on(bfj.events.literal, value => { /* ... */ });
|
||||
emitter.on(bfj.events.endArray, () => { /* ... */ });
|
||||
emitter.on(bfj.events.endObject, () => { /* ... */ });
|
||||
emitter.on(bfj.events.error, error => { /* ... */ });
|
||||
emitter.on(bfj.events.dataError, error => { /* ... */ });
|
||||
emitter.on(bfj.events.end, () => { /* ... */ });
|
||||
```
|
||||
|
||||
`eventify` returns an [event emitter][eventemitter]
|
||||
and asynchronously traverses
|
||||
a data structure depth-first,
|
||||
emitting events as it
|
||||
encounters items.
|
||||
By default it coerces
|
||||
promises, buffers and iterables
|
||||
to JSON-friendly values.
|
||||
|
||||
It takes two arguments;
|
||||
the data structure to traverse
|
||||
and an [options](#options-for-serialisation-functions) object.
|
||||
|
||||
The emitted events
|
||||
are defined
|
||||
as public properties
|
||||
of an object,
|
||||
`bfj.events`:
|
||||
|
||||
* `bfj.events.array`
|
||||
indicates that
|
||||
an array
|
||||
has been encountered.
|
||||
|
||||
* `bfj.events.endArray`
|
||||
indicates that
|
||||
the end of an array
|
||||
has been encountered.
|
||||
|
||||
* `bfj.events.object`
|
||||
indicates that
|
||||
an object
|
||||
has been encountered.
|
||||
|
||||
* `bfj.events.endObject`
|
||||
indicates that
|
||||
the end of an object
|
||||
has been encountered.
|
||||
|
||||
* `bfj.events.property`
|
||||
indicates that
|
||||
a property
|
||||
has been encountered
|
||||
in an object.
|
||||
The listener
|
||||
will be passed
|
||||
the name of the property
|
||||
as its argument
|
||||
and the next event
|
||||
to be emitted
|
||||
will represent
|
||||
the property's value.
|
||||
|
||||
* `bfj.events.string`
|
||||
indicates that
|
||||
a string
|
||||
has been encountered.
|
||||
The listener
|
||||
will be passed
|
||||
the value
|
||||
as its argument.
|
||||
|
||||
* `bfj.events.number`
|
||||
indicates that
|
||||
a number
|
||||
has been encountered.
|
||||
The listener
|
||||
will be passed
|
||||
the value
|
||||
as its argument.
|
||||
|
||||
* `bfj.events.literal`
|
||||
indicates that
|
||||
a JSON literal
|
||||
(either `true`, `false` or `null`)
|
||||
has been encountered.
|
||||
The listener
|
||||
will be passed
|
||||
the value
|
||||
as its argument.
|
||||
|
||||
* `bfj.events.error`
|
||||
indicates that
|
||||
an error was caught
|
||||
from one of the event handlers
|
||||
in user code.
|
||||
The listener
|
||||
will be passed
|
||||
the `Error` instance
|
||||
as its argument.
|
||||
|
||||
* `bfj.events.dataError`
|
||||
indicates that
|
||||
a circular reference was encountered in the data
|
||||
and the `circular` option was not set to `'ignore'`.
|
||||
The listener
|
||||
will be passed
|
||||
an `Error` instance
|
||||
as its argument.
|
||||
|
||||
* `bfj.events.end`
|
||||
indicates that
|
||||
the end of the data
|
||||
has been reached and
|
||||
no further events
|
||||
will be emitted.
|
||||
|
||||
## What options can I specify?
|
||||
|
||||
### Options for parsing functions
|
||||
|
||||
* `options.reviver`:
|
||||
Transformation function,
|
||||
invoked depth-first
|
||||
against the parsed
|
||||
data structure.
|
||||
This option
|
||||
is analagous to the
|
||||
[reviver parameter for JSON.parse][reviver].
|
||||
|
||||
* `options.yieldRate`:
|
||||
The number of data items to process
|
||||
before yielding to the event loop.
|
||||
Smaller values yield to the event loop more frequently,
|
||||
meaning less time will be consumed by bfj per tick
|
||||
but the overall parsing time will be slower.
|
||||
Larger values yield to the event loop less often,
|
||||
meaning slower tick times but faster overall parsing time.
|
||||
The default value is `16384`.
|
||||
|
||||
* `options.Promise`:
|
||||
Promise constructor that will be used
|
||||
for promises returned by all methods.
|
||||
If you set this option,
|
||||
please be aware that some promise implementations
|
||||
(including native promises)
|
||||
may cause your process to die
|
||||
with out-of-memory exceptions.
|
||||
Defaults to [bluebird's implementation][promise],
|
||||
which does not have that problem.
|
||||
|
||||
* `options.ndjson`:
|
||||
If set to `true`,
|
||||
newline characters at the root level
|
||||
will be treated as delimiters between
|
||||
discrete chunks of JSON.
|
||||
See [NDJSON](#can-it-handle-newline-delimited-json-ndjson) for more information.
|
||||
|
||||
* `options.numbers`:
|
||||
For `bfj.match` only,
|
||||
set this to `true`
|
||||
if you wish to match against numbers
|
||||
with a string or regular expression
|
||||
`selector` argument.
|
||||
|
||||
* `options.bufferLength`:
|
||||
For `bfj.match` only,
|
||||
the length of the match buffer.
|
||||
Smaller values use less memory
|
||||
but may result in a slower parse time.
|
||||
The default value is `1024`.
|
||||
|
||||
* `options.highWaterMark`:
|
||||
For `bfj.match` only,
|
||||
set this if you would like to
|
||||
pass a value for the `highWaterMark` option
|
||||
to the readable stream constructor.
|
||||
|
||||
### Options for serialisation functions
|
||||
|
||||
* `options.space`:
|
||||
Indentation string
|
||||
or the number of spaces
|
||||
to indent
|
||||
each nested level by.
|
||||
This option
|
||||
is analagous to the
|
||||
[space parameter for JSON.stringify][space].
|
||||
|
||||
* `options.promises`:
|
||||
By default,
|
||||
promises are coerced
|
||||
to their resolved value.
|
||||
Set this property
|
||||
to `'ignore'`
|
||||
for improved performance
|
||||
if you don't need
|
||||
to coerce promises.
|
||||
|
||||
* `options.buffers`:
|
||||
By default,
|
||||
buffers are coerced
|
||||
using their `toString` method.
|
||||
Set this property
|
||||
to `'ignore'`
|
||||
for improved performance
|
||||
if you don't need
|
||||
to coerce buffers.
|
||||
|
||||
* `options.maps`:
|
||||
By default,
|
||||
maps are coerced
|
||||
to plain objects.
|
||||
Set this property
|
||||
to `'ignore'`
|
||||
for improved performance
|
||||
if you don't need
|
||||
to coerce maps.
|
||||
|
||||
* `options.iterables`:
|
||||
By default,
|
||||
other iterables
|
||||
(i.e. not arrays, strings or maps)
|
||||
are coerced
|
||||
to arrays.
|
||||
Set this property
|
||||
to `'ignore'`
|
||||
for improved performance
|
||||
if you don't need
|
||||
to coerce iterables.
|
||||
|
||||
* `options.circular`:
|
||||
By default,
|
||||
circular references
|
||||
will cause the write
|
||||
to fail.
|
||||
Set this property
|
||||
to `'ignore'`
|
||||
if you'd prefer
|
||||
to silently skip past
|
||||
circular references
|
||||
in the data.
|
||||
|
||||
* `options.bufferLength`:
|
||||
The length of the write buffer.
|
||||
Smaller values use less memory
|
||||
but may result in a slower serialisation time.
|
||||
The default value is `1024`.
|
||||
|
||||
* `options.highWaterMark`:
|
||||
Set this if you would like to
|
||||
pass a value for the `highWaterMark` option
|
||||
to the readable stream constructor.
|
||||
|
||||
* `options.yieldRate`:
|
||||
The number of data items to process
|
||||
before yielding to the event loop.
|
||||
Smaller values yield to the event loop more frequently,
|
||||
meaning less time will be consumed by bfj per tick
|
||||
but the overall serialisation time will be slower.
|
||||
Larger values yield to the event loop less often,
|
||||
meaning slower tick times but faster overall serialisation time.
|
||||
The default value is `16384`.
|
||||
|
||||
* `options.Promise`:
|
||||
Promise constructor that will be used
|
||||
for promises returned by all methods.
|
||||
If you set this option,
|
||||
please be aware that some promise implementations
|
||||
(including native promises)
|
||||
may cause your process to die
|
||||
with out-of-memory exceptions.
|
||||
Defaults to [bluebird's implementation][promise],
|
||||
which does not have that problem.
|
||||
|
||||
## Is it possible to pause parsing or serialisation from calling code?
|
||||
|
||||
Yes it is!
|
||||
Both [`walk`](#bfjwalk-stream-options)
|
||||
and [`eventify`](#bfjeventify-data-options)
|
||||
decorate their returned event emitters
|
||||
with a `pause` method
|
||||
that will prevent any further events being emitted.
|
||||
The `pause` method itself
|
||||
returns a `resume` function
|
||||
that you can call to indicate
|
||||
that processing should continue.
|
||||
|
||||
For example:
|
||||
|
||||
```js
|
||||
const bfj = require('bfj');
|
||||
const emitter = bfj.walk(fs.createReadStream(path), options);
|
||||
|
||||
// Later, when you want to pause parsing:
|
||||
|
||||
const resume = emitter.pause();
|
||||
|
||||
// Then when you want to resume:
|
||||
|
||||
resume();
|
||||
```
|
||||
|
||||
## Can it handle [newline-delimited JSON (NDJSON)](http://ndjson.org/)?
|
||||
|
||||
Yes.
|
||||
If you pass the `ndjson` [option](#options-for-parsing-functions)
|
||||
to `bfj.walk`, `bfj.match` or `bfj.parse`,
|
||||
newline characters at the root level
|
||||
will act as delimiters between
|
||||
discrete JSON values:
|
||||
|
||||
* `bfj.walk` will emit a `bfj.events.endLine` event
|
||||
each time it encounters a newline character.
|
||||
|
||||
* `bfj.match` will just ignore the newlines
|
||||
while it continues looking for matching items.
|
||||
|
||||
* `bfj.parse` will resolve with the first value
|
||||
and pause the underlying stream.
|
||||
If it's called again with the same stream,
|
||||
it will resume processing
|
||||
and resolve with the second value.
|
||||
To parse the entire stream,
|
||||
calls should be made sequentially one-at-a-time
|
||||
until the returned promise
|
||||
resolves to `undefined`
|
||||
(`undefined` is not a valid JSON token).
|
||||
|
||||
`bfj.unpipe` and `bfj.read` will not parse NDJSON.
|
||||
|
||||
## Why does it default to bluebird promises?
|
||||
|
||||
Until version `4.2.4`,
|
||||
native promises were used.
|
||||
But they were found
|
||||
to cause out-of-memory errors
|
||||
when serialising large amounts of data to JSON,
|
||||
due to [well-documented problems
|
||||
with the native promise implementation](https://alexn.org/blog/2017/10/11/javascript-promise-leaks-memory.html).
|
||||
So in version `5.0.0`,
|
||||
bluebird promises were used instead.
|
||||
In version `5.1.0`,
|
||||
an option was added
|
||||
that enables callers to specify
|
||||
the promise constructor to use.
|
||||
Use it at your own risk.
|
||||
|
||||
## Can I specify a different promise implementation?
|
||||
|
||||
Yes.
|
||||
Just pass the `Promise` option
|
||||
to any method.
|
||||
If you get out-of-memory errors
|
||||
when using that option,
|
||||
consider changing your promise implementation.
|
||||
|
||||
## Is there a change log?
|
||||
|
||||
[Yes][history].
|
||||
|
||||
## How do I set up the dev environment?
|
||||
|
||||
The development environment
|
||||
relies on [Node.js][node],
|
||||
[ESLint],
|
||||
[Mocha],
|
||||
[Chai],
|
||||
[Proxyquire] and
|
||||
[Spooks].
|
||||
Assuming that
|
||||
you already have
|
||||
node and NPM
|
||||
set up,
|
||||
you just need
|
||||
to run
|
||||
`npm install`
|
||||
to install
|
||||
all of the dependencies
|
||||
as listed in `package.json`.
|
||||
|
||||
You can
|
||||
lint the code
|
||||
with the command
|
||||
`npm run lint`.
|
||||
|
||||
You can
|
||||
run the tests
|
||||
with the command
|
||||
`npm test`.
|
||||
|
||||
## What versions of Node.js does it support?
|
||||
|
||||
As of [version `7.0.0`](HISTORY.md#700),
|
||||
only Node.js versions 8 or greater
|
||||
are supported.
|
||||
|
||||
Between versions [`3.0.0`](HISTORY.md#300)
|
||||
and [`6.1.2`](HISTORY.md#612),
|
||||
only Node.js versions 6 or greater
|
||||
were supported.
|
||||
|
||||
Until [version `2.1.2`](HISTORY.md#212),
|
||||
only Node.js versions 4 or greater
|
||||
were supported.
|
||||
|
||||
## What license is it released under?
|
||||
|
||||
[MIT][license].
|
||||
|
||||
[ci-image]: https://secure.travis-ci.org/philbooth/bfj.png?branch=master
|
||||
[ci-status]: http://travis-ci.org/#!/philbooth/bfj
|
||||
[sax]: http://en.wikipedia.org/wiki/Simple_API_for_XML
|
||||
[promise]: http://bluebirdjs.com/docs/api-reference.html
|
||||
[bfj-collections]: https://github.com/hash-bang/bfj-collections
|
||||
[eventemitter]: https://nodejs.org/api/events.html#events_class_eventemitter
|
||||
[readable]: https://nodejs.org/api/stream.html#stream_readable_streams
|
||||
[writable]: https://nodejs.org/api/stream.html#stream_writable_streams
|
||||
[pipe]: https://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
|
||||
[regexp-test]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/test
|
||||
[reviver]: https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Using_the_reviver_parameter
|
||||
[space]: https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The_space_argument
|
||||
[history]: HISTORY.md
|
||||
[node]: https://nodejs.org/en/
|
||||
[eslint]: http://eslint.org/
|
||||
[mocha]: https://mochajs.org/
|
||||
[chai]: http://chaijs.com/
|
||||
[proxyquire]: https://github.com/thlorenz/proxyquire
|
||||
[spooks]: https://gitlab.com/philbooth/spooks.js
|
||||
[license]: COPYING
|
53
web/node_modules/bfj/package.json
generated
vendored
Normal file
53
web/node_modules/bfj/package.json
generated
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
{
|
||||
"name": "bfj",
|
||||
"version": "7.0.2",
|
||||
"description": "Big-friendly JSON. Asynchronous streaming functions for large JSON data sets.",
|
||||
"homepage": "https://gitlab.com/philbooth/bfj",
|
||||
"bugs": "https://gitlab.com/philbooth/bfj/issues",
|
||||
"license": "MIT",
|
||||
"author": "Phil Booth (https://gitlab.com/philbooth)",
|
||||
"main": "./src",
|
||||
"keywords": [
|
||||
"json",
|
||||
"streamify",
|
||||
"stringify",
|
||||
"walk",
|
||||
"parse",
|
||||
"parser",
|
||||
"serialise",
|
||||
"serialize",
|
||||
"read",
|
||||
"write",
|
||||
"async",
|
||||
"asynchronous"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://gitlab.com/philbooth/bfj.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"bluebird": "^3.5.5",
|
||||
"check-types": "^11.1.1",
|
||||
"hoopy": "^0.1.4",
|
||||
"tryer": "^1.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.2.0",
|
||||
"eslint": "^6.0.1",
|
||||
"mocha": "^6.1.4",
|
||||
"please-release-me": "^2.1.2",
|
||||
"proxyquire": "^2.1.0",
|
||||
"request": "^2.88.0",
|
||||
"spooks": "^2.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint src",
|
||||
"test": "npm run unit && npm run integration",
|
||||
"unit": "mocha --ui tdd --reporter spec --recursive --colors --slow 120 test/unit",
|
||||
"integration": "mocha --ui tdd --reporter spec --colors test/integration",
|
||||
"perf": "wget -O test/mtg.json http://mtgjson.com/json/AllSets-x.json && node test/performance mtg"
|
||||
}
|
||||
}
|
17
web/node_modules/bfj/src/datastream.js
generated
vendored
Normal file
17
web/node_modules/bfj/src/datastream.js
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
'use strict'
|
||||
|
||||
const check = require('check-types')
|
||||
const BfjStream = require('./stream')
|
||||
const util = require('util')
|
||||
|
||||
util.inherits(DataStream, BfjStream)
|
||||
|
||||
module.exports = DataStream
|
||||
|
||||
function DataStream (read, options) {
|
||||
if (check.not.instanceStrict(this, DataStream)) {
|
||||
return new DataStream(read, options)
|
||||
}
|
||||
|
||||
return BfjStream.call(this, read, { ...options, objectMode: true })
|
||||
}
|
22
web/node_modules/bfj/src/error.js
generated
vendored
Normal file
22
web/node_modules/bfj/src/error.js
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = { create }
|
||||
|
||||
function create (actual, expected, line, column) {
|
||||
const error = new Error(
|
||||
/* eslint-disable prefer-template */
|
||||
'JSON error: encountered `' + actual +
|
||||
'` at line ' + line +
|
||||
', column ' + column +
|
||||
' where `' + expected +
|
||||
'` was expected.'
|
||||
/* eslint-enable prefer-template */
|
||||
)
|
||||
|
||||
error.actual = actual
|
||||
error.expected = expected
|
||||
error.lineNumber = line
|
||||
error.columnNumber = column
|
||||
|
||||
return error
|
||||
}
|
309
web/node_modules/bfj/src/eventify.js
generated
vendored
Normal file
309
web/node_modules/bfj/src/eventify.js
generated
vendored
Normal file
|
@ -0,0 +1,309 @@
|
|||
'use strict'
|
||||
|
||||
const check = require('check-types')
|
||||
const EventEmitter = require('events').EventEmitter
|
||||
const events = require('./events')
|
||||
const promise = require('./promise')
|
||||
|
||||
const invalidTypes = {
|
||||
undefined: true, // eslint-disable-line no-undefined
|
||||
function: true,
|
||||
symbol: true
|
||||
}
|
||||
|
||||
module.exports = eventify
|
||||
|
||||
/**
|
||||
* Public function `eventify`.
|
||||
*
|
||||
* Returns an event emitter and asynchronously traverses a data structure
|
||||
* (depth-first), emitting events as it encounters items. Sanely handles
|
||||
* promises, buffers, maps and other iterables. The event emitter is
|
||||
* decorated with a `pause` method that can be called to pause processing.
|
||||
*
|
||||
* @param data: The data structure to traverse.
|
||||
*
|
||||
* @option promises: 'resolve' or 'ignore', default is 'resolve'.
|
||||
*
|
||||
* @option buffers: 'toString' or 'ignore', default is 'toString'.
|
||||
*
|
||||
* @option maps: 'object' or 'ignore', default is 'object'.
|
||||
*
|
||||
* @option iterables: 'array' or 'ignore', default is 'array'.
|
||||
*
|
||||
* @option circular: 'error' or 'ignore', default is 'error'.
|
||||
*
|
||||
* @option yieldRate: The number of data items to process per timeslice,
|
||||
* default is 16384.
|
||||
*
|
||||
* @option Promise: The promise constructor to use, defaults to bluebird.
|
||||
**/
|
||||
function eventify (data, options = {}) {
|
||||
const coercions = {}
|
||||
const emitter = new EventEmitter()
|
||||
const Promise = promise(options)
|
||||
const references = new Map()
|
||||
|
||||
let count = 0
|
||||
let disableCoercions = false
|
||||
let ignoreCircularReferences
|
||||
let ignoreItems
|
||||
let pause
|
||||
let yieldRate
|
||||
|
||||
emitter.pause = () => {
|
||||
let resolve
|
||||
pause = new Promise(res => resolve = res)
|
||||
return () => {
|
||||
pause = null
|
||||
count = 0
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
parseOptions()
|
||||
setImmediate(begin)
|
||||
|
||||
return emitter
|
||||
|
||||
function parseOptions () {
|
||||
parseCoercionOption('promises')
|
||||
parseCoercionOption('buffers')
|
||||
parseCoercionOption('maps')
|
||||
parseCoercionOption('iterables')
|
||||
|
||||
if (Object.keys(coercions).length === 0) {
|
||||
disableCoercions = true
|
||||
}
|
||||
|
||||
if (options.circular === 'ignore') {
|
||||
ignoreCircularReferences = true
|
||||
}
|
||||
|
||||
check.assert.maybe.positive(options.yieldRate)
|
||||
yieldRate = options.yieldRate || 16384
|
||||
}
|
||||
|
||||
function parseCoercionOption (key) {
|
||||
if (options[key] !== 'ignore') {
|
||||
coercions[key] = true
|
||||
}
|
||||
}
|
||||
|
||||
function begin () {
|
||||
return proceed(data)
|
||||
.catch(error => emit(events.error, error))
|
||||
.then(() => emit(events.end))
|
||||
}
|
||||
|
||||
function proceed (datum) {
|
||||
if (++count % yieldRate !== 0) {
|
||||
return coerce(datum).then(after)
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
setImmediate(() => {
|
||||
coerce(datum)
|
||||
.then(after)
|
||||
.then(resolve)
|
||||
.catch(reject)
|
||||
})
|
||||
})
|
||||
|
||||
function after (coerced) {
|
||||
if (isInvalid(coerced)) {
|
||||
return
|
||||
}
|
||||
|
||||
if (coerced === false || coerced === true || coerced === null) {
|
||||
return literal(coerced)
|
||||
}
|
||||
|
||||
if (Array.isArray(coerced)) {
|
||||
return array(coerced)
|
||||
}
|
||||
|
||||
const type = typeof coerced
|
||||
|
||||
switch (type) {
|
||||
case 'number':
|
||||
return value(coerced, type)
|
||||
case 'string':
|
||||
return value(escapeString(coerced), type)
|
||||
default:
|
||||
return object(coerced)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function coerce (datum) {
|
||||
if (disableCoercions || check.primitive(datum)) {
|
||||
return Promise.resolve(datum)
|
||||
}
|
||||
|
||||
if (check.thenable(datum)) {
|
||||
return coerceThing(datum, 'promises', coercePromise).then(coerce)
|
||||
}
|
||||
|
||||
if (check.instanceStrict(datum, Buffer)) {
|
||||
return coerceThing(datum, 'buffers', coerceBuffer)
|
||||
}
|
||||
|
||||
if (check.instanceStrict(datum, Map)) {
|
||||
return coerceThing(datum, 'maps', coerceMap)
|
||||
}
|
||||
|
||||
if (
|
||||
check.iterable(datum) &&
|
||||
check.not.string(datum) &&
|
||||
check.not.array(datum)
|
||||
) {
|
||||
return coerceThing(datum, 'iterables', coerceIterable)
|
||||
}
|
||||
|
||||
if (check.function(datum.toJSON)) {
|
||||
return Promise.resolve(datum.toJSON())
|
||||
}
|
||||
|
||||
return Promise.resolve(datum)
|
||||
}
|
||||
|
||||
function coerceThing (datum, thing, fn) {
|
||||
if (coercions[thing]) {
|
||||
return fn(datum)
|
||||
}
|
||||
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
function coercePromise (p) {
|
||||
return p
|
||||
}
|
||||
|
||||
function coerceBuffer (buffer) {
|
||||
return Promise.resolve(buffer.toString())
|
||||
}
|
||||
|
||||
function coerceMap (map) {
|
||||
const result = {}
|
||||
|
||||
return coerceCollection(map, result, (item, key) => {
|
||||
result[key] = item
|
||||
})
|
||||
}
|
||||
|
||||
function coerceCollection (coll, target, push) {
|
||||
coll.forEach(push)
|
||||
|
||||
return Promise.resolve(target)
|
||||
}
|
||||
|
||||
function coerceIterable (iterable) {
|
||||
const result = []
|
||||
|
||||
return coerceCollection(iterable, result, item => {
|
||||
result.push(item)
|
||||
})
|
||||
}
|
||||
|
||||
function isInvalid (datum) {
|
||||
const type = typeof datum
|
||||
return !! invalidTypes[type] || (
|
||||
type === 'number' && ! isValidNumber(datum)
|
||||
)
|
||||
}
|
||||
|
||||
function isValidNumber (datum) {
|
||||
return datum > Number.NEGATIVE_INFINITY && datum < Number.POSITIVE_INFINITY
|
||||
}
|
||||
|
||||
function literal (datum) {
|
||||
return value(datum, 'literal')
|
||||
}
|
||||
|
||||
function value (datum, type) {
|
||||
return emit(events[type], datum)
|
||||
}
|
||||
|
||||
function emit (event, eventData) {
|
||||
return (pause || Promise.resolve())
|
||||
.then(() => emitter.emit(event, eventData))
|
||||
.catch(err => {
|
||||
try {
|
||||
emitter.emit(events.error, err)
|
||||
} catch (_) {
|
||||
// When calling user code, anything is possible
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function array (datum) {
|
||||
// For an array, collection:object and collection:array are the same.
|
||||
return collection(datum, datum, 'array', item => {
|
||||
if (isInvalid(item)) {
|
||||
return proceed(null)
|
||||
}
|
||||
|
||||
return proceed(item)
|
||||
})
|
||||
}
|
||||
|
||||
function collection (obj, arr, type, action) {
|
||||
let ignoreThisItem
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
if (references.has(obj)) {
|
||||
ignoreThisItem = ignoreItems = true
|
||||
|
||||
if (! ignoreCircularReferences) {
|
||||
return emit(events.dataError, new Error('Circular reference.'))
|
||||
}
|
||||
} else {
|
||||
references.set(obj, true)
|
||||
}
|
||||
})
|
||||
.then(() => emit(events[type]))
|
||||
.then(() => item(0))
|
||||
|
||||
function item (index) {
|
||||
if (index >= arr.length) {
|
||||
if (ignoreThisItem) {
|
||||
ignoreItems = false
|
||||
}
|
||||
|
||||
if (ignoreItems) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
return emit(events.endPrefix + events[type])
|
||||
.then(() => references.delete(obj))
|
||||
}
|
||||
|
||||
if (ignoreItems) {
|
||||
return item(index + 1)
|
||||
}
|
||||
|
||||
return action(arr[index])
|
||||
.then(() => item(index + 1))
|
||||
}
|
||||
}
|
||||
|
||||
function object (datum) {
|
||||
// For an object, collection:object and collection:array are different.
|
||||
return collection(datum, Object.keys(datum), 'object', key => {
|
||||
const item = datum[key]
|
||||
|
||||
if (isInvalid(item)) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
return emit(events.property, escapeString(key))
|
||||
.then(() => proceed(item))
|
||||
})
|
||||
}
|
||||
|
||||
function escapeString (string) {
|
||||
string = JSON.stringify(string)
|
||||
return string.substring(1, string.length - 1)
|
||||
}
|
||||
}
|
18
web/node_modules/bfj/src/events.js
generated
vendored
Normal file
18
web/node_modules/bfj/src/events.js
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
array: 'arr',
|
||||
object: 'obj',
|
||||
property: 'pro',
|
||||
string: 'str',
|
||||
number: 'num',
|
||||
literal: 'lit',
|
||||
endPrefix: 'end-',
|
||||
end: 'end',
|
||||
error: 'err'
|
||||
}
|
||||
|
||||
module.exports.endArray = module.exports.endPrefix + module.exports.array
|
||||
module.exports.endObject = module.exports.endPrefix + module.exports.object
|
||||
module.exports.endLine = `${module.exports.endPrefix}line`
|
||||
module.exports.dataError = `${module.exports.error}-data`
|
14
web/node_modules/bfj/src/index.js
generated
vendored
Normal file
14
web/node_modules/bfj/src/index.js
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
walk: require('./walk'),
|
||||
match: require('./match'),
|
||||
parse: require('./parse'),
|
||||
unpipe: require('./unpipe'),
|
||||
read: require('./read'),
|
||||
eventify: require('./eventify'),
|
||||
streamify: require('./streamify'),
|
||||
stringify: require('./stringify'),
|
||||
write: require('./write'),
|
||||
events: require('./events')
|
||||
}
|
17
web/node_modules/bfj/src/jsonstream.js
generated
vendored
Normal file
17
web/node_modules/bfj/src/jsonstream.js
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
'use strict'
|
||||
|
||||
const check = require('check-types')
|
||||
const BfjStream = require('./stream')
|
||||
const util = require('util')
|
||||
|
||||
util.inherits(JsonStream, BfjStream)
|
||||
|
||||
module.exports = JsonStream
|
||||
|
||||
function JsonStream (read, options) {
|
||||
if (check.not.instanceStrict(this, JsonStream)) {
|
||||
return new JsonStream(read, options)
|
||||
}
|
||||
|
||||
return BfjStream.call(this, read, { ...options, encoding: 'utf8' })
|
||||
}
|
234
web/node_modules/bfj/src/match.js
generated
vendored
Normal file
234
web/node_modules/bfj/src/match.js
generated
vendored
Normal file
|
@ -0,0 +1,234 @@
|
|||
'use strict'
|
||||
|
||||
const check = require('check-types')
|
||||
const DataStream = require('./datastream')
|
||||
const events = require('./events')
|
||||
const Hoopy = require('hoopy')
|
||||
const walk = require('./walk')
|
||||
|
||||
const DEFAULT_BUFFER_LENGTH = 1024
|
||||
|
||||
module.exports = match
|
||||
|
||||
/**
|
||||
* Public function `match`.
|
||||
*
|
||||
* Asynchronously parses a stream of JSON data, returning a stream of items
|
||||
* that match the argument. Note that if a value is `null`, it won't be matched
|
||||
* because `null` is used to signify end-of-stream in node.
|
||||
*
|
||||
* @param stream: Readable instance representing the incoming JSON.
|
||||
*
|
||||
* @param selector: Regular expression, string or predicate function used to
|
||||
* identify matches. If a regular expression or string is
|
||||
* passed, only property keys are tested. If a predicate is
|
||||
* passed, both the key and the value are passed to it as
|
||||
* arguments.
|
||||
*
|
||||
* @option minDepth: Number indicating the minimum depth to apply the selector
|
||||
* to. The default is `0`, but setting it to a higher value
|
||||
* can improve performance and reduce memory usage by
|
||||
* eliminating the need to actualise top-level items.
|
||||
*
|
||||
* @option numbers: Boolean, indicating whether numerical keys (e.g. array
|
||||
* indices) should be coerced to strings before testing the
|
||||
* match. Only applies if the `selector` argument is a string
|
||||
* or regular expression.
|
||||
*
|
||||
* @option ndjson: Set this to true to parse newline-delimited JSON,
|
||||
* default is `false`.
|
||||
*
|
||||
* @option yieldRate: The number of data items to process per timeslice,
|
||||
* default is 16384.
|
||||
*
|
||||
* @option bufferLength: The length of the match buffer, default is 1024.
|
||||
*
|
||||
* @option highWaterMark: If set, will be passed to the readable stream constructor
|
||||
* as the value for the highWaterMark option.
|
||||
*
|
||||
* @option Promise: The promise constructor to use, defaults to bluebird.
|
||||
**/
|
||||
function match (stream, selector, options = {}) {
|
||||
const scopes = []
|
||||
const properties = []
|
||||
const emitter = walk(stream, options)
|
||||
const matches = new Hoopy(options.bufferLength || DEFAULT_BUFFER_LENGTH)
|
||||
let streamOptions
|
||||
const { highWaterMark } = options
|
||||
if (highWaterMark) {
|
||||
streamOptions = { highWaterMark }
|
||||
}
|
||||
const results = new DataStream(read, streamOptions)
|
||||
|
||||
let selectorFunction, selectorString, resume
|
||||
let coerceNumbers = false
|
||||
let awaitPush = true
|
||||
let isEnded = false
|
||||
let length = 0
|
||||
let index = 0
|
||||
|
||||
const minDepth = options.minDepth || 0
|
||||
check.assert.greaterOrEqual(minDepth, 0)
|
||||
|
||||
if (check.function(selector)) {
|
||||
selectorFunction = selector
|
||||
selector = null
|
||||
} else {
|
||||
coerceNumbers = !! options.numbers
|
||||
|
||||
if (check.string(selector)) {
|
||||
check.assert.nonEmptyString(selector)
|
||||
selectorString = selector
|
||||
selector = null
|
||||
} else {
|
||||
check.assert.instanceStrict(selector, RegExp)
|
||||
}
|
||||
}
|
||||
|
||||
emitter.on(events.array, array)
|
||||
emitter.on(events.object, object)
|
||||
emitter.on(events.property, property)
|
||||
emitter.on(events.endArray, endScope)
|
||||
emitter.on(events.endObject, endScope)
|
||||
emitter.on(events.string, value)
|
||||
emitter.on(events.number, value)
|
||||
emitter.on(events.literal, value)
|
||||
emitter.on(events.end, end)
|
||||
emitter.on(events.error, error)
|
||||
emitter.on(events.dataError, dataError)
|
||||
|
||||
return results
|
||||
|
||||
function read () {
|
||||
if (awaitPush) {
|
||||
awaitPush = false
|
||||
|
||||
if (isEnded) {
|
||||
if (length > 0) {
|
||||
after()
|
||||
}
|
||||
|
||||
return endResults()
|
||||
}
|
||||
}
|
||||
|
||||
if (resume) {
|
||||
const resumeCopy = resume
|
||||
resume = null
|
||||
resumeCopy()
|
||||
after()
|
||||
}
|
||||
}
|
||||
|
||||
function after () {
|
||||
if (awaitPush || resume) {
|
||||
return
|
||||
}
|
||||
|
||||
let i
|
||||
|
||||
for (i = 0; i < length && ! resume; ++i) {
|
||||
if (! results.push(matches[i + index])) {
|
||||
pause()
|
||||
}
|
||||
}
|
||||
|
||||
if (i === length) {
|
||||
index = length = 0
|
||||
} else {
|
||||
length -= i
|
||||
index += i
|
||||
}
|
||||
}
|
||||
|
||||
function pause () {
|
||||
resume = emitter.pause()
|
||||
}
|
||||
|
||||
function endResults () {
|
||||
if (! awaitPush) {
|
||||
results.push(null)
|
||||
}
|
||||
}
|
||||
|
||||
function array () {
|
||||
scopes.push([])
|
||||
}
|
||||
|
||||
function object () {
|
||||
scopes.push({})
|
||||
}
|
||||
|
||||
function property (name) {
|
||||
if (scopes.length < minDepth) {
|
||||
return
|
||||
}
|
||||
|
||||
properties.push(name)
|
||||
}
|
||||
|
||||
function endScope () {
|
||||
value(scopes.pop())
|
||||
}
|
||||
|
||||
function value (v) {
|
||||
let key
|
||||
|
||||
if (scopes.length < minDepth) {
|
||||
return
|
||||
}
|
||||
|
||||
if (scopes.length > 0) {
|
||||
const scope = scopes[scopes.length - 1]
|
||||
|
||||
if (Array.isArray(scope)) {
|
||||
key = scope.length
|
||||
} else {
|
||||
key = properties.pop()
|
||||
}
|
||||
|
||||
scope[key] = v
|
||||
}
|
||||
|
||||
if (v === null) {
|
||||
return
|
||||
}
|
||||
|
||||
if (selectorFunction) {
|
||||
if (selectorFunction(key, v, scopes.length)) {
|
||||
push(v)
|
||||
}
|
||||
} else {
|
||||
if (coerceNumbers && typeof key === 'number') {
|
||||
key = key.toString()
|
||||
}
|
||||
|
||||
if ((selectorString && selectorString === key) || (selector && selector.test(key))) {
|
||||
push(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function push (v) {
|
||||
if (length + 1 === matches.length) {
|
||||
pause()
|
||||
}
|
||||
|
||||
matches[index + length++] = v
|
||||
|
||||
after()
|
||||
}
|
||||
|
||||
function end () {
|
||||
isEnded = true
|
||||
endResults()
|
||||
}
|
||||
|
||||
function error (e) {
|
||||
results.emit('error', e)
|
||||
}
|
||||
|
||||
function dataError (e) {
|
||||
results.emit('dataError', e)
|
||||
}
|
||||
}
|
45
web/node_modules/bfj/src/memory.js
generated
vendored
Normal file
45
web/node_modules/bfj/src/memory.js
generated
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
'use strict'
|
||||
|
||||
const PROPERTIES = [ 'rss', 'heapTotal', 'heapUsed', 'external' ]
|
||||
|
||||
let memory
|
||||
|
||||
module.exports = {
|
||||
initialise,
|
||||
update,
|
||||
report
|
||||
}
|
||||
|
||||
function initialise () {
|
||||
memory = PROPERTIES.reduce((result, name) => {
|
||||
result[name] = {
|
||||
sum: 0,
|
||||
hwm: 0
|
||||
}
|
||||
return result
|
||||
}, { count: 0 })
|
||||
}
|
||||
|
||||
function update () {
|
||||
const currentMemory = process.memoryUsage()
|
||||
PROPERTIES.forEach(name => updateProperty(name, currentMemory))
|
||||
}
|
||||
|
||||
function updateProperty (name, currentMemory) {
|
||||
const m = memory[name]
|
||||
const c = currentMemory[name]
|
||||
m.sum += c
|
||||
if (c > m.hwm) {
|
||||
m.hwm = c
|
||||
}
|
||||
}
|
||||
|
||||
function report () {
|
||||
PROPERTIES.forEach(name => reportProperty(name))
|
||||
}
|
||||
|
||||
function reportProperty (name) {
|
||||
const m = memory[name]
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`mean ${name}: ${m.sum / memory.count}; hwm: ${m.hwm}`)
|
||||
}
|
181
web/node_modules/bfj/src/parse.js
generated
vendored
Normal file
181
web/node_modules/bfj/src/parse.js
generated
vendored
Normal file
|
@ -0,0 +1,181 @@
|
|||
'use strict'
|
||||
|
||||
const check = require('check-types')
|
||||
const events = require('./events')
|
||||
const promise = require('./promise')
|
||||
const walk = require('./walk')
|
||||
|
||||
module.exports = parse
|
||||
|
||||
const NDJSON_STATE = new Map()
|
||||
|
||||
/**
|
||||
* Public function `parse`.
|
||||
*
|
||||
* Returns a promise and asynchronously parses a stream of JSON data. If
|
||||
* there are no errors, the promise is resolved with the parsed data. If
|
||||
* errors occur, the promise is rejected with the first error.
|
||||
*
|
||||
* @param stream: Readable instance representing the incoming JSON.
|
||||
*
|
||||
* @option reviver: Transformation function, invoked depth-first.
|
||||
*
|
||||
* @option yieldRate: The number of data items to process per timeslice,
|
||||
* default is 16384.
|
||||
*
|
||||
* @option Promise: The promise constructor to use, defaults to bluebird.
|
||||
*
|
||||
* @option ndjson: Set this to true to parse newline-delimited JSON. In
|
||||
* this case, each call will be resolved with one value
|
||||
* from the stream. To parse the entire stream, calls
|
||||
* should be made sequentially one-at-a-time until the
|
||||
* returned promise resolves to `undefined`.
|
||||
**/
|
||||
function parse (stream, options = {}) {
|
||||
const Promise = promise(options)
|
||||
|
||||
try {
|
||||
check.assert.maybe.function(options.reviver, 'Invalid reviver option')
|
||||
} catch (err) {
|
||||
return Promise.reject(err)
|
||||
}
|
||||
|
||||
const errors = []
|
||||
const scopes = []
|
||||
const reviver = options.reviver
|
||||
const shouldHandleNdjson = !! options.ndjson
|
||||
|
||||
let emitter, resolve, reject, scopeKey
|
||||
if (shouldHandleNdjson && NDJSON_STATE.has(stream)) {
|
||||
const state = NDJSON_STATE.get(stream)
|
||||
NDJSON_STATE.delete(stream)
|
||||
emitter = state.emitter
|
||||
setImmediate(state.resume)
|
||||
} else {
|
||||
emitter = walk(stream, options)
|
||||
}
|
||||
|
||||
emitter.on(events.array, array)
|
||||
emitter.on(events.object, object)
|
||||
emitter.on(events.property, property)
|
||||
emitter.on(events.string, value)
|
||||
emitter.on(events.number, value)
|
||||
emitter.on(events.literal, value)
|
||||
emitter.on(events.endArray, endScope)
|
||||
emitter.on(events.endObject, endScope)
|
||||
emitter.on(events.end, end)
|
||||
emitter.on(events.error, error)
|
||||
emitter.on(events.dataError, error)
|
||||
|
||||
if (shouldHandleNdjson) {
|
||||
emitter.on(events.endLine, endLine)
|
||||
}
|
||||
|
||||
return new Promise((res, rej) => {
|
||||
resolve = res
|
||||
reject = rej
|
||||
})
|
||||
|
||||
function array () {
|
||||
if (errors.length > 0) {
|
||||
return
|
||||
}
|
||||
|
||||
beginScope([])
|
||||
}
|
||||
|
||||
function beginScope (parsed) {
|
||||
if (errors.length > 0) {
|
||||
return
|
||||
}
|
||||
|
||||
if (scopes.length > 0) {
|
||||
value(parsed)
|
||||
}
|
||||
|
||||
scopes.push(parsed)
|
||||
}
|
||||
|
||||
function value (v) {
|
||||
if (errors.length > 0) {
|
||||
return
|
||||
}
|
||||
|
||||
if (scopes.length === 0) {
|
||||
return scopes.push(v)
|
||||
}
|
||||
|
||||
const scope = scopes[scopes.length - 1]
|
||||
|
||||
if (scopeKey) {
|
||||
scope[scopeKey] = v
|
||||
scopeKey = null
|
||||
} else {
|
||||
scope.push(v)
|
||||
}
|
||||
}
|
||||
|
||||
function object () {
|
||||
if (errors.length > 0) {
|
||||
return
|
||||
}
|
||||
|
||||
beginScope({})
|
||||
}
|
||||
|
||||
function property (name) {
|
||||
if (errors.length > 0) {
|
||||
return
|
||||
}
|
||||
|
||||
scopeKey = name
|
||||
}
|
||||
|
||||
function endScope () {
|
||||
if (errors.length > 0) {
|
||||
return
|
||||
}
|
||||
|
||||
if (scopes.length > 1) {
|
||||
scopes.pop()
|
||||
}
|
||||
}
|
||||
|
||||
function end () {
|
||||
if (shouldHandleNdjson) {
|
||||
const resume = emitter.pause()
|
||||
emitter.removeAllListeners()
|
||||
NDJSON_STATE.set(stream, { emitter, resume })
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
return reject(errors[0])
|
||||
}
|
||||
|
||||
if (reviver) {
|
||||
scopes[0] = transform(scopes[0], '')
|
||||
}
|
||||
|
||||
resolve(scopes[0])
|
||||
}
|
||||
|
||||
function transform (obj, key) {
|
||||
if (obj && typeof obj === 'object') {
|
||||
Object.entries(obj).forEach(([ k, v ]) => {
|
||||
obj[k] = transform(v, k)
|
||||
})
|
||||
}
|
||||
|
||||
return reviver(key, obj)
|
||||
}
|
||||
|
||||
function error (e) {
|
||||
errors.push(e)
|
||||
}
|
||||
|
||||
function endLine () {
|
||||
if (scopes.length > 0) {
|
||||
end()
|
||||
}
|
||||
}
|
||||
}
|
3
web/node_modules/bfj/src/promise.js
generated
vendored
Normal file
3
web/node_modules/bfj/src/promise.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
'use strict'
|
||||
|
||||
module.exports = (options = {}) => options.Promise || require('bluebird')
|
26
web/node_modules/bfj/src/read.js
generated
vendored
Normal file
26
web/node_modules/bfj/src/read.js
generated
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('fs')
|
||||
const parse = require('./parse')
|
||||
|
||||
module.exports = read
|
||||
|
||||
/**
|
||||
* Public function `read`.
|
||||
*
|
||||
* Returns a promise and asynchronously parses a JSON file read from disk. If
|
||||
* there are no errors, the promise is resolved with the parsed data. If errors
|
||||
* occur, the promise is rejected with the first error.
|
||||
*
|
||||
* @param path: Path to the JSON file.
|
||||
*
|
||||
* @option reviver: Transformation function, invoked depth-first.
|
||||
*
|
||||
* @option yieldRate: The number of data items to process per timeslice,
|
||||
* default is 16384.
|
||||
*
|
||||
* @option Promise: The promise constructor to use, defaults to bluebird.
|
||||
**/
|
||||
function read (path, options) {
|
||||
return parse(fs.createReadStream(path, options), { ...options, ndjson: false })
|
||||
}
|
23
web/node_modules/bfj/src/stream.js
generated
vendored
Normal file
23
web/node_modules/bfj/src/stream.js
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
const Readable = require('stream').Readable
|
||||
const check = require('check-types')
|
||||
|
||||
util.inherits(BfjStream, Readable)
|
||||
|
||||
module.exports = BfjStream
|
||||
|
||||
function BfjStream (read, options) {
|
||||
if (check.not.instanceStrict(this, BfjStream)) {
|
||||
return new BfjStream(read)
|
||||
}
|
||||
|
||||
check.assert.function(read, 'Invalid read implementation')
|
||||
|
||||
this._read = function () { // eslint-disable-line no-underscore-dangle
|
||||
read()
|
||||
}
|
||||
|
||||
return Readable.call(this, options)
|
||||
}
|
283
web/node_modules/bfj/src/streamify.js
generated
vendored
Normal file
283
web/node_modules/bfj/src/streamify.js
generated
vendored
Normal file
|
@ -0,0 +1,283 @@
|
|||
'use strict'
|
||||
|
||||
const check = require('check-types')
|
||||
const eventify = require('./eventify')
|
||||
const events = require('./events')
|
||||
const JsonStream = require('./jsonstream')
|
||||
const Hoopy = require('hoopy')
|
||||
const promise = require('./promise')
|
||||
const tryer = require('tryer')
|
||||
|
||||
const DEFAULT_BUFFER_LENGTH = 1024
|
||||
|
||||
module.exports = streamify
|
||||
|
||||
/**
|
||||
* Public function `streamify`.
|
||||
*
|
||||
* Asynchronously serialises a data structure to a stream of JSON
|
||||
* data. Sanely handles promises, buffers, maps and other iterables.
|
||||
*
|
||||
* @param data: The data to transform.
|
||||
*
|
||||
* @option space: Indentation string, or the number of spaces
|
||||
* to indent each nested level by.
|
||||
*
|
||||
* @option promises: 'resolve' or 'ignore', default is 'resolve'.
|
||||
*
|
||||
* @option buffers: 'toString' or 'ignore', default is 'toString'.
|
||||
*
|
||||
* @option maps: 'object' or 'ignore', default is 'object'.
|
||||
*
|
||||
* @option iterables: 'array' or 'ignore', default is 'array'.
|
||||
*
|
||||
* @option circular: 'error' or 'ignore', default is 'error'.
|
||||
*
|
||||
* @option yieldRate: The number of data items to process per timeslice,
|
||||
* default is 16384.
|
||||
*
|
||||
* @option bufferLength: The length of the buffer, default is 1024.
|
||||
*
|
||||
* @option highWaterMark: If set, will be passed to the readable stream constructor
|
||||
* as the value for the highWaterMark option.
|
||||
*
|
||||
* @option Promise: The promise constructor to use, defaults to bluebird.
|
||||
**/
|
||||
function streamify (data, options = {}) {
|
||||
const emitter = eventify(data, options)
|
||||
const json = new Hoopy(options.bufferLength || DEFAULT_BUFFER_LENGTH)
|
||||
const Promise = promise(options)
|
||||
const space = normaliseSpace(options)
|
||||
let streamOptions
|
||||
const { highWaterMark } = options
|
||||
if (highWaterMark) {
|
||||
streamOptions = { highWaterMark }
|
||||
}
|
||||
const stream = new JsonStream(read, streamOptions)
|
||||
|
||||
let awaitPush = true
|
||||
let index = 0
|
||||
let indentation = ''
|
||||
let isEnded
|
||||
let isPaused = false
|
||||
let isProperty
|
||||
let length = 0
|
||||
let mutex = Promise.resolve()
|
||||
let needsComma
|
||||
|
||||
emitter.on(events.array, noRacing(array))
|
||||
emitter.on(events.object, noRacing(object))
|
||||
emitter.on(events.property, noRacing(property))
|
||||
emitter.on(events.string, noRacing(string))
|
||||
emitter.on(events.number, noRacing(value))
|
||||
emitter.on(events.literal, noRacing(value))
|
||||
emitter.on(events.endArray, noRacing(endArray))
|
||||
emitter.on(events.endObject, noRacing(endObject))
|
||||
emitter.on(events.end, noRacing(end))
|
||||
emitter.on(events.error, noRacing(error))
|
||||
emitter.on(events.dataError, noRacing(dataError))
|
||||
|
||||
return stream
|
||||
|
||||
function read () {
|
||||
if (awaitPush) {
|
||||
awaitPush = false
|
||||
|
||||
if (isEnded) {
|
||||
if (length > 0) {
|
||||
after()
|
||||
}
|
||||
|
||||
return endStream()
|
||||
}
|
||||
}
|
||||
|
||||
if (isPaused) {
|
||||
after()
|
||||
}
|
||||
}
|
||||
|
||||
function after () {
|
||||
if (awaitPush) {
|
||||
return
|
||||
}
|
||||
|
||||
let i
|
||||
|
||||
for (i = 0; i < length && ! awaitPush; ++i) {
|
||||
if (! stream.push(json[i + index], 'utf8')) {
|
||||
awaitPush = true
|
||||
}
|
||||
}
|
||||
|
||||
if (i === length) {
|
||||
index = length = 0
|
||||
} else {
|
||||
length -= i
|
||||
index += i
|
||||
}
|
||||
}
|
||||
|
||||
function endStream () {
|
||||
if (! awaitPush) {
|
||||
stream.push(null)
|
||||
}
|
||||
}
|
||||
|
||||
function noRacing (handler) {
|
||||
return eventData => mutex = mutex.then(() => handler(eventData))
|
||||
}
|
||||
|
||||
function array () {
|
||||
return beforeScope()
|
||||
.then(() => addJson('['))
|
||||
.then(() => afterScope())
|
||||
}
|
||||
|
||||
function beforeScope () {
|
||||
return before(true)
|
||||
}
|
||||
|
||||
function before (isScope) {
|
||||
if (isProperty) {
|
||||
isProperty = false
|
||||
|
||||
if (space) {
|
||||
return addJson(' ')
|
||||
}
|
||||
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
if (needsComma) {
|
||||
if (isScope) {
|
||||
needsComma = false
|
||||
}
|
||||
|
||||
return addJson(',')
|
||||
}
|
||||
|
||||
if (! isScope) {
|
||||
needsComma = true
|
||||
}
|
||||
})
|
||||
.then(() => {
|
||||
if (space && indentation) {
|
||||
return indent()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function addJson (chunk) {
|
||||
if (length + 1 <= json.length) {
|
||||
json[index + length++] = chunk
|
||||
after()
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
isPaused = true
|
||||
return new Promise(resolve => {
|
||||
const unpause = emitter.pause()
|
||||
tryer({
|
||||
interval: -10,
|
||||
until () {
|
||||
return length + 1 <= json.length
|
||||
},
|
||||
pass () {
|
||||
isPaused = false
|
||||
json[index + length++] = chunk
|
||||
resolve()
|
||||
setImmediate(unpause)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function indent () {
|
||||
return addJson(`\n${indentation}`)
|
||||
}
|
||||
|
||||
function afterScope () {
|
||||
needsComma = false
|
||||
|
||||
if (space) {
|
||||
indentation += space
|
||||
}
|
||||
}
|
||||
|
||||
function object () {
|
||||
return beforeScope()
|
||||
.then(() => addJson('{'))
|
||||
.then(() => afterScope())
|
||||
}
|
||||
|
||||
function property (name) {
|
||||
return before()
|
||||
.then(() => addJson(`"${name}":`))
|
||||
.then(() => {
|
||||
isProperty = true
|
||||
})
|
||||
}
|
||||
|
||||
function string (s) {
|
||||
return value(`"${s}"`)
|
||||
}
|
||||
|
||||
function value (v) {
|
||||
return before()
|
||||
.then(() => addJson(`${v}`))
|
||||
}
|
||||
|
||||
function endArray () {
|
||||
return beforeScopeEnd()
|
||||
.then(() => addJson(']'))
|
||||
.then(() => afterScopeEnd())
|
||||
}
|
||||
|
||||
function beforeScopeEnd () {
|
||||
if (space) {
|
||||
indentation = indentation.substr(space.length)
|
||||
|
||||
return indent()
|
||||
}
|
||||
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
function afterScopeEnd () {
|
||||
needsComma = true
|
||||
}
|
||||
|
||||
function endObject () {
|
||||
return beforeScopeEnd()
|
||||
.then(() => addJson('}'))
|
||||
.then(() => afterScopeEnd())
|
||||
}
|
||||
|
||||
function end () {
|
||||
after()
|
||||
|
||||
isEnded = true
|
||||
endStream()
|
||||
}
|
||||
|
||||
function error (err) {
|
||||
stream.emit('error', err)
|
||||
}
|
||||
|
||||
function dataError (err) {
|
||||
stream.emit('dataError', err)
|
||||
}
|
||||
}
|
||||
|
||||
function normaliseSpace (options) {
|
||||
if (check.positive(options.space)) {
|
||||
return new Array(options.space + 1).join(' ')
|
||||
}
|
||||
|
||||
if (check.nonEmptyString(options.space)) {
|
||||
return options.space
|
||||
}
|
||||
}
|
67
web/node_modules/bfj/src/stringify.js
generated
vendored
Normal file
67
web/node_modules/bfj/src/stringify.js
generated
vendored
Normal file
|
@ -0,0 +1,67 @@
|
|||
'use strict'
|
||||
|
||||
const promise = require('./promise')
|
||||
const streamify = require('./streamify')
|
||||
|
||||
module.exports = stringify
|
||||
|
||||
/**
|
||||
* Public function `stringify`.
|
||||
*
|
||||
* Returns a promise and asynchronously serialises a data structure to a
|
||||
* JSON string. Sanely handles promises, buffers, maps and other iterables.
|
||||
*
|
||||
* @param data: The data to transform
|
||||
*
|
||||
* @option space: Indentation string, or the number of spaces
|
||||
* to indent each nested level by.
|
||||
*
|
||||
* @option promises: 'resolve' or 'ignore', default is 'resolve'.
|
||||
*
|
||||
* @option buffers: 'toString' or 'ignore', default is 'toString'.
|
||||
*
|
||||
* @option maps: 'object' or 'ignore', default is 'object'.
|
||||
*
|
||||
* @option iterables: 'array' or 'ignore', default is 'array'.
|
||||
*
|
||||
* @option circular: 'error' or 'ignore', default is 'error'.
|
||||
*
|
||||
* @option yieldRate: The number of data items to process per timeslice,
|
||||
* default is 16384.
|
||||
*
|
||||
* @option bufferLength: The length of the buffer, default is 1024.
|
||||
*
|
||||
* @option highWaterMark: If set, will be passed to the readable stream constructor
|
||||
* as the value for the highWaterMark option.
|
||||
*
|
||||
* @option Promise: The promise constructor to use, defaults to bluebird.
|
||||
**/
|
||||
function stringify (data, options) {
|
||||
const json = []
|
||||
const Promise = promise(options)
|
||||
const stream = streamify(data, options)
|
||||
|
||||
let resolve, reject
|
||||
|
||||
stream.on('data', read)
|
||||
stream.on('end', end)
|
||||
stream.on('error', error)
|
||||
stream.on('dataError', error)
|
||||
|
||||
return new Promise((res, rej) => {
|
||||
resolve = res
|
||||
reject = rej
|
||||
})
|
||||
|
||||
function read (chunk) {
|
||||
json.push(chunk)
|
||||
}
|
||||
|
||||
function end () {
|
||||
resolve(json.join(''))
|
||||
}
|
||||
|
||||
function error (e) {
|
||||
reject(e)
|
||||
}
|
||||
}
|
37
web/node_modules/bfj/src/unpipe.js
generated
vendored
Normal file
37
web/node_modules/bfj/src/unpipe.js
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
'use strict'
|
||||
|
||||
const stream = require('stream')
|
||||
const check = require('check-types')
|
||||
const parse = require('./parse')
|
||||
|
||||
module.exports = unpipe
|
||||
|
||||
/**
|
||||
* Public function `unpipe`.
|
||||
*
|
||||
* Returns a writeable stream that can be passed to stream.pipe, then parses JSON
|
||||
* data read from the stream. If there are no errors, the callback is invoked with
|
||||
* the result as the second argument. If errors occur, the first error is passed to
|
||||
* the callback as the first argument.
|
||||
*
|
||||
* @param callback: Function that will be called after parsing is complete.
|
||||
*
|
||||
* @option reviver: Transformation function, invoked depth-first.
|
||||
*
|
||||
* @option discard: The number of characters to process before discarding them
|
||||
* to save memory. The default value is `1048576`.
|
||||
*
|
||||
* @option yieldRate: The number of data items to process per timeslice,
|
||||
* default is 16384.
|
||||
**/
|
||||
function unpipe (callback, options) {
|
||||
check.assert.function(callback, 'Invalid callback argument')
|
||||
|
||||
const jsonstream = new stream.PassThrough()
|
||||
|
||||
parse(jsonstream, { ...options, ndjson: false })
|
||||
.then(data => callback(null, data))
|
||||
.catch(error => callback(error))
|
||||
|
||||
return jsonstream
|
||||
}
|
720
web/node_modules/bfj/src/walk.js
generated
vendored
Normal file
720
web/node_modules/bfj/src/walk.js
generated
vendored
Normal file
|
@ -0,0 +1,720 @@
|
|||
'use strict'
|
||||
|
||||
const check = require('check-types')
|
||||
const error = require('./error')
|
||||
const EventEmitter = require('events').EventEmitter
|
||||
const events = require('./events')
|
||||
const promise = require('./promise')
|
||||
|
||||
const terminators = {
|
||||
obj: '}',
|
||||
arr: ']'
|
||||
}
|
||||
|
||||
const escapes = {
|
||||
/* eslint-disable quote-props */
|
||||
'"': '"',
|
||||
'\\': '\\',
|
||||
'/': '/',
|
||||
'b': '\b',
|
||||
'f': '\f',
|
||||
'n': '\n',
|
||||
'r': '\r',
|
||||
't': '\t'
|
||||
/* eslint-enable quote-props */
|
||||
}
|
||||
|
||||
module.exports = initialise
|
||||
|
||||
/**
|
||||
* Public function `walk`.
|
||||
*
|
||||
* Returns an event emitter and asynchronously walks a stream of JSON data,
|
||||
* emitting events as it encounters tokens. The event emitter is decorated
|
||||
* with a `pause` method that can be called to pause processing.
|
||||
*
|
||||
* @param stream: Readable instance representing the incoming JSON.
|
||||
*
|
||||
* @option yieldRate: The number of data items to process per timeslice,
|
||||
* default is 16384.
|
||||
*
|
||||
* @option Promise: The promise constructor to use, defaults to bluebird.
|
||||
*
|
||||
* @option ndjson: Set this to true to parse newline-delimited JSON.
|
||||
**/
|
||||
function initialise (stream, options = {}) {
|
||||
check.assert.instanceStrict(stream, require('stream').Readable, 'Invalid stream argument')
|
||||
|
||||
const currentPosition = {
|
||||
line: 1,
|
||||
column: 1
|
||||
}
|
||||
const emitter = new EventEmitter()
|
||||
const handlers = {
|
||||
arr: value,
|
||||
obj: property
|
||||
}
|
||||
const json = []
|
||||
const lengths = []
|
||||
const previousPosition = {}
|
||||
const Promise = promise(options)
|
||||
const scopes = []
|
||||
const yieldRate = options.yieldRate || 16384
|
||||
const shouldHandleNdjson = !! options.ndjson
|
||||
|
||||
let index = 0
|
||||
let isStreamEnded = false
|
||||
let isWalkBegun = false
|
||||
let isWalkEnded = false
|
||||
let isWalkingString = false
|
||||
let hasEndedLine = true
|
||||
let count = 0
|
||||
let resumeFn
|
||||
let pause
|
||||
let cachedCharacter
|
||||
|
||||
stream.setEncoding('utf8')
|
||||
stream.on('data', readStream)
|
||||
stream.on('end', endStream)
|
||||
stream.on('error', err => {
|
||||
emitter.emit(events.error, err)
|
||||
endStream()
|
||||
})
|
||||
|
||||
emitter.pause = () => {
|
||||
let resolve
|
||||
pause = new Promise(res => resolve = res)
|
||||
return () => {
|
||||
pause = null
|
||||
count = 0
|
||||
|
||||
if (shouldHandleNdjson && isStreamEnded && isWalkEnded) {
|
||||
emit(events.end)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return emitter
|
||||
|
||||
function readStream (chunk) {
|
||||
addChunk(chunk)
|
||||
|
||||
if (isWalkBegun) {
|
||||
return resume()
|
||||
}
|
||||
|
||||
isWalkBegun = true
|
||||
value()
|
||||
}
|
||||
|
||||
function addChunk (chunk) {
|
||||
json.push(chunk)
|
||||
|
||||
const chunkLength = chunk.length
|
||||
lengths.push({
|
||||
item: chunkLength,
|
||||
aggregate: length() + chunkLength
|
||||
})
|
||||
}
|
||||
|
||||
function length () {
|
||||
const chunkCount = lengths.length
|
||||
|
||||
if (chunkCount === 0) {
|
||||
return 0
|
||||
}
|
||||
|
||||
return lengths[chunkCount - 1].aggregate
|
||||
}
|
||||
|
||||
function value () {
|
||||
/* eslint-disable no-underscore-dangle */
|
||||
if (++count % yieldRate !== 0) {
|
||||
return _do()
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
setImmediate(() => _do().then(resolve))
|
||||
})
|
||||
|
||||
function _do () {
|
||||
return awaitNonWhitespace()
|
||||
.then(next)
|
||||
.then(handleValue)
|
||||
.catch(() => {})
|
||||
}
|
||||
/* eslint-enable no-underscore-dangle */
|
||||
}
|
||||
|
||||
function awaitNonWhitespace () {
|
||||
return wait()
|
||||
|
||||
function wait () {
|
||||
return awaitCharacter()
|
||||
.then(step)
|
||||
}
|
||||
|
||||
function step () {
|
||||
if (isWhitespace(character())) {
|
||||
return next().then(wait)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function awaitCharacter () {
|
||||
let resolve, reject
|
||||
|
||||
if (index < length()) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
if (isStreamEnded) {
|
||||
setImmediate(endWalk)
|
||||
return Promise.reject()
|
||||
}
|
||||
|
||||
resumeFn = after
|
||||
|
||||
return new Promise((res, rej) => {
|
||||
resolve = res
|
||||
reject = rej
|
||||
})
|
||||
|
||||
function after () {
|
||||
if (index < length()) {
|
||||
return resolve()
|
||||
}
|
||||
|
||||
reject()
|
||||
|
||||
if (isStreamEnded) {
|
||||
setImmediate(endWalk)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function character () {
|
||||
if (cachedCharacter) {
|
||||
return cachedCharacter
|
||||
}
|
||||
|
||||
if (lengths[0].item > index) {
|
||||
return cachedCharacter = json[0][index]
|
||||
}
|
||||
|
||||
const len = lengths.length
|
||||
for (let i = 1; i < len; ++i) {
|
||||
const { aggregate, item } = lengths[i]
|
||||
if (aggregate > index) {
|
||||
return cachedCharacter = json[i][index + item - aggregate]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function isWhitespace (char) {
|
||||
switch (char) {
|
||||
case '\n':
|
||||
if (shouldHandleNdjson && scopes.length === 0) {
|
||||
return false
|
||||
}
|
||||
case ' ':
|
||||
case '\t':
|
||||
case '\r':
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
function next () {
|
||||
return awaitCharacter().then(after)
|
||||
|
||||
function after () {
|
||||
const result = character()
|
||||
|
||||
cachedCharacter = null
|
||||
index += 1
|
||||
previousPosition.line = currentPosition.line
|
||||
previousPosition.column = currentPosition.column
|
||||
|
||||
if (result === '\n') {
|
||||
currentPosition.line += 1
|
||||
currentPosition.column = 1
|
||||
} else {
|
||||
currentPosition.column += 1
|
||||
}
|
||||
|
||||
if (index > lengths[0].aggregate) {
|
||||
json.shift()
|
||||
|
||||
const difference = lengths.shift().item
|
||||
index -= difference
|
||||
|
||||
lengths.forEach(len => len.aggregate -= difference)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
function handleValue (char) {
|
||||
if (shouldHandleNdjson && scopes.length === 0) {
|
||||
if (char === '\n') {
|
||||
hasEndedLine = true
|
||||
return emit(events.endLine)
|
||||
.then(value)
|
||||
}
|
||||
|
||||
if (! hasEndedLine) {
|
||||
return fail(char, '\n', previousPosition)
|
||||
.then(value)
|
||||
}
|
||||
|
||||
hasEndedLine = false
|
||||
}
|
||||
|
||||
switch (char) {
|
||||
case '[':
|
||||
return array()
|
||||
case '{':
|
||||
return object()
|
||||
case '"':
|
||||
return string()
|
||||
case '0':
|
||||
case '1':
|
||||
case '2':
|
||||
case '3':
|
||||
case '4':
|
||||
case '5':
|
||||
case '6':
|
||||
case '7':
|
||||
case '8':
|
||||
case '9':
|
||||
case '-':
|
||||
case '.':
|
||||
return number(char)
|
||||
case 'f':
|
||||
return literalFalse()
|
||||
case 'n':
|
||||
return literalNull()
|
||||
case 't':
|
||||
return literalTrue()
|
||||
default:
|
||||
return fail(char, 'value', previousPosition)
|
||||
.then(value)
|
||||
}
|
||||
}
|
||||
|
||||
function array () {
|
||||
return scope(events.array, value)
|
||||
}
|
||||
|
||||
function scope (event, contentHandler) {
|
||||
return emit(event)
|
||||
.then(() => {
|
||||
scopes.push(event)
|
||||
return endScope(event)
|
||||
})
|
||||
.then(contentHandler)
|
||||
}
|
||||
|
||||
function emit (...args) {
|
||||
return (pause || Promise.resolve())
|
||||
.then(() => {
|
||||
try {
|
||||
emitter.emit(...args)
|
||||
} catch (err) {
|
||||
try {
|
||||
emitter.emit(events.error, err)
|
||||
} catch (_) {
|
||||
// When calling user code, anything is possible
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function endScope (scp) {
|
||||
return awaitNonWhitespace()
|
||||
.then(() => {
|
||||
if (character() === terminators[scp]) {
|
||||
return emit(events.endPrefix + scp)
|
||||
.then(() => {
|
||||
scopes.pop()
|
||||
return next()
|
||||
})
|
||||
.then(endValue)
|
||||
}
|
||||
})
|
||||
.catch(endWalk)
|
||||
}
|
||||
|
||||
function endValue () {
|
||||
return awaitNonWhitespace()
|
||||
.then(after)
|
||||
.catch(endWalk)
|
||||
|
||||
function after () {
|
||||
if (scopes.length === 0) {
|
||||
if (shouldHandleNdjson) {
|
||||
return value()
|
||||
}
|
||||
|
||||
return fail(character(), 'EOF', currentPosition)
|
||||
.then(value)
|
||||
}
|
||||
|
||||
return checkScope()
|
||||
}
|
||||
|
||||
function checkScope () {
|
||||
const scp = scopes[scopes.length - 1]
|
||||
const handler = handlers[scp]
|
||||
|
||||
return endScope(scp)
|
||||
.then(() => {
|
||||
if (scopes.length > 0) {
|
||||
return checkCharacter(character(), ',', currentPosition)
|
||||
}
|
||||
})
|
||||
.then(result => {
|
||||
if (result) {
|
||||
return next()
|
||||
}
|
||||
})
|
||||
.then(handler)
|
||||
}
|
||||
}
|
||||
|
||||
function fail (actual, expected, position) {
|
||||
return emit(
|
||||
events.dataError,
|
||||
error.create(
|
||||
actual,
|
||||
expected,
|
||||
position.line,
|
||||
position.column
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
function checkCharacter (char, expected, position) {
|
||||
if (char === expected) {
|
||||
return Promise.resolve(true)
|
||||
}
|
||||
|
||||
return fail(char, expected, position)
|
||||
.then(false)
|
||||
}
|
||||
|
||||
function object () {
|
||||
return scope(events.object, property)
|
||||
}
|
||||
|
||||
function property () {
|
||||
return awaitNonWhitespace()
|
||||
.then(next)
|
||||
.then(propertyName)
|
||||
}
|
||||
|
||||
function propertyName (char) {
|
||||
return checkCharacter(char, '"', previousPosition)
|
||||
.then(() => walkString(events.property))
|
||||
.then(awaitNonWhitespace)
|
||||
.then(next)
|
||||
.then(propertyValue)
|
||||
}
|
||||
|
||||
function propertyValue (char) {
|
||||
return checkCharacter(char, ':', previousPosition)
|
||||
.then(value)
|
||||
}
|
||||
|
||||
function walkString (event) {
|
||||
let isEscaping = false
|
||||
const str = []
|
||||
|
||||
isWalkingString = true
|
||||
|
||||
return next().then(step)
|
||||
|
||||
function step (char) {
|
||||
if (isEscaping) {
|
||||
isEscaping = false
|
||||
|
||||
return escape(char).then(escaped => {
|
||||
str.push(escaped)
|
||||
return next().then(step)
|
||||
})
|
||||
}
|
||||
|
||||
if (char === '\\') {
|
||||
isEscaping = true
|
||||
return next().then(step)
|
||||
}
|
||||
|
||||
if (char !== '"') {
|
||||
str.push(char)
|
||||
return next().then(step)
|
||||
}
|
||||
|
||||
isWalkingString = false
|
||||
return emit(event, str.join(''))
|
||||
}
|
||||
}
|
||||
|
||||
function escape (char) {
|
||||
if (escapes[char]) {
|
||||
return Promise.resolve(escapes[char])
|
||||
}
|
||||
|
||||
if (char === 'u') {
|
||||
return escapeHex()
|
||||
}
|
||||
|
||||
return fail(char, 'escape character', previousPosition)
|
||||
.then(() => `\\${char}`)
|
||||
}
|
||||
|
||||
function escapeHex () {
|
||||
let hexits = []
|
||||
|
||||
return next().then(step.bind(null, 0))
|
||||
|
||||
function step (idx, char) {
|
||||
if (isHexit(char)) {
|
||||
hexits.push(char)
|
||||
}
|
||||
|
||||
if (idx < 3) {
|
||||
return next().then(step.bind(null, idx + 1))
|
||||
}
|
||||
|
||||
hexits = hexits.join('')
|
||||
|
||||
if (hexits.length === 4) {
|
||||
return String.fromCharCode(parseInt(hexits, 16))
|
||||
}
|
||||
|
||||
return fail(char, 'hex digit', previousPosition)
|
||||
.then(() => `\\u${hexits}${char}`)
|
||||
}
|
||||
}
|
||||
|
||||
function string () {
|
||||
return walkString(events.string).then(endValue)
|
||||
}
|
||||
|
||||
function number (firstCharacter) {
|
||||
let digits = [ firstCharacter ]
|
||||
|
||||
return walkDigits().then(addDigits.bind(null, checkDecimalPlace))
|
||||
|
||||
function addDigits (step, result) {
|
||||
digits = digits.concat(result.digits)
|
||||
|
||||
if (result.atEnd) {
|
||||
return endNumber()
|
||||
}
|
||||
|
||||
return step()
|
||||
}
|
||||
|
||||
function checkDecimalPlace () {
|
||||
if (character() === '.') {
|
||||
return next()
|
||||
.then(char => {
|
||||
digits.push(char)
|
||||
return walkDigits()
|
||||
})
|
||||
.then(addDigits.bind(null, checkExponent))
|
||||
}
|
||||
|
||||
return checkExponent()
|
||||
}
|
||||
|
||||
function checkExponent () {
|
||||
if (character() === 'e' || character() === 'E') {
|
||||
return next()
|
||||
.then(char => {
|
||||
digits.push(char)
|
||||
return awaitCharacter()
|
||||
})
|
||||
.then(checkSign)
|
||||
.catch(fail.bind(null, 'EOF', 'exponent', currentPosition))
|
||||
}
|
||||
|
||||
return endNumber()
|
||||
}
|
||||
|
||||
function checkSign () {
|
||||
if (character() === '+' || character() === '-') {
|
||||
return next().then(char => {
|
||||
digits.push(char)
|
||||
return readExponent()
|
||||
})
|
||||
}
|
||||
|
||||
return readExponent()
|
||||
}
|
||||
|
||||
function readExponent () {
|
||||
return walkDigits().then(addDigits.bind(null, endNumber))
|
||||
}
|
||||
|
||||
function endNumber () {
|
||||
return emit(events.number, parseFloat(digits.join('')))
|
||||
.then(endValue)
|
||||
}
|
||||
}
|
||||
|
||||
function walkDigits () {
|
||||
const digits = []
|
||||
|
||||
return wait()
|
||||
|
||||
function wait () {
|
||||
return awaitCharacter()
|
||||
.then(step)
|
||||
.catch(atEnd)
|
||||
}
|
||||
|
||||
function step () {
|
||||
if (isDigit(character())) {
|
||||
return next().then(char => {
|
||||
digits.push(char)
|
||||
return wait()
|
||||
})
|
||||
}
|
||||
|
||||
return { digits, atEnd: false }
|
||||
}
|
||||
|
||||
function atEnd () {
|
||||
return { digits, atEnd: true }
|
||||
}
|
||||
}
|
||||
|
||||
function literalFalse () {
|
||||
return literal([ 'a', 'l', 's', 'e' ], false)
|
||||
}
|
||||
|
||||
function literal (expectedCharacters, val) {
|
||||
let actual, expected, invalid
|
||||
|
||||
return wait()
|
||||
|
||||
function wait () {
|
||||
return awaitCharacter()
|
||||
.then(step)
|
||||
.catch(atEnd)
|
||||
}
|
||||
|
||||
function step () {
|
||||
if (invalid || expectedCharacters.length === 0) {
|
||||
return atEnd()
|
||||
}
|
||||
|
||||
return next().then(afterNext)
|
||||
}
|
||||
|
||||
function atEnd () {
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
if (invalid) {
|
||||
return fail(actual, expected, previousPosition)
|
||||
}
|
||||
|
||||
if (expectedCharacters.length > 0) {
|
||||
return fail('EOF', expectedCharacters.shift(), currentPosition)
|
||||
}
|
||||
|
||||
return done()
|
||||
})
|
||||
.then(endValue)
|
||||
}
|
||||
|
||||
function afterNext (char) {
|
||||
actual = char
|
||||
expected = expectedCharacters.shift()
|
||||
|
||||
if (actual !== expected) {
|
||||
invalid = true
|
||||
}
|
||||
|
||||
return wait()
|
||||
}
|
||||
|
||||
function done () {
|
||||
return emit(events.literal, val)
|
||||
}
|
||||
}
|
||||
|
||||
function literalNull () {
|
||||
return literal([ 'u', 'l', 'l' ], null)
|
||||
}
|
||||
|
||||
function literalTrue () {
|
||||
return literal([ 'r', 'u', 'e' ], true)
|
||||
}
|
||||
|
||||
function endStream () {
|
||||
isStreamEnded = true
|
||||
|
||||
if (isWalkBegun) {
|
||||
return resume()
|
||||
}
|
||||
|
||||
endWalk()
|
||||
}
|
||||
|
||||
function resume () {
|
||||
if (resumeFn) {
|
||||
resumeFn()
|
||||
resumeFn = null
|
||||
}
|
||||
}
|
||||
|
||||
function endWalk () {
|
||||
if (isWalkEnded) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
isWalkEnded = true
|
||||
|
||||
return Promise.resolve()
|
||||
.then(() => {
|
||||
if (isWalkingString) {
|
||||
return fail('EOF', '"', currentPosition)
|
||||
}
|
||||
})
|
||||
.then(popScopes)
|
||||
.then(() => emit(events.end))
|
||||
}
|
||||
|
||||
function popScopes () {
|
||||
if (scopes.length === 0) {
|
||||
return Promise.resolve()
|
||||
}
|
||||
|
||||
return fail('EOF', terminators[scopes.pop()], currentPosition)
|
||||
.then(popScopes)
|
||||
}
|
||||
}
|
||||
|
||||
function isHexit (character) {
|
||||
return isDigit(character) ||
|
||||
isInRange(character, 'A', 'F') ||
|
||||
isInRange(character, 'a', 'f')
|
||||
}
|
||||
|
||||
function isDigit (character) {
|
||||
return isInRange(character, '0', '9')
|
||||
}
|
||||
|
||||
function isInRange (character, lower, upper) {
|
||||
const code = character.charCodeAt(0)
|
||||
|
||||
return code >= lower.charCodeAt(0) && code <= upper.charCodeAt(0)
|
||||
}
|
55
web/node_modules/bfj/src/write.js
generated
vendored
Normal file
55
web/node_modules/bfj/src/write.js
generated
vendored
Normal file
|
@ -0,0 +1,55 @@
|
|||
'use strict'
|
||||
|
||||
const fs = require('fs')
|
||||
const promise = require('./promise')
|
||||
const streamify = require('./streamify')
|
||||
|
||||
module.exports = write
|
||||
|
||||
/**
|
||||
* Public function `write`.
|
||||
*
|
||||
* Returns a promise and asynchronously serialises a data structure to a
|
||||
* JSON file on disk. Sanely handles promises, buffers, maps and other
|
||||
* iterables.
|
||||
*
|
||||
* @param path: Path to the JSON file.
|
||||
*
|
||||
* @param data: The data to transform.
|
||||
*
|
||||
* @option space: Indentation string, or the number of spaces
|
||||
* to indent each nested level by.
|
||||
*
|
||||
* @option promises: 'resolve' or 'ignore', default is 'resolve'.
|
||||
*
|
||||
* @option buffers: 'toString' or 'ignore', default is 'toString'.
|
||||
*
|
||||
* @option maps: 'object' or 'ignore', default is 'object'.
|
||||
*
|
||||
* @option iterables: 'array' or 'ignore', default is 'array'.
|
||||
*
|
||||
* @option circular: 'error' or 'ignore', default is 'error'.
|
||||
*
|
||||
* @option yieldRate: The number of data items to process per timeslice,
|
||||
* default is 16384.
|
||||
*
|
||||
* @option bufferLength: The length of the buffer, default is 1024.
|
||||
*
|
||||
* @option highWaterMark: If set, will be passed to the readable stream constructor
|
||||
* as the value for the highWaterMark option.
|
||||
*
|
||||
* @option Promise: The promise constructor to use, defaults to bluebird.
|
||||
**/
|
||||
function write (path, data, options) {
|
||||
const Promise = promise(options)
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
streamify(data, options)
|
||||
.pipe(fs.createWriteStream(path, options))
|
||||
.on('finish', () => {
|
||||
resolve()
|
||||
})
|
||||
.on('error', reject)
|
||||
.on('dataError', reject)
|
||||
})
|
||||
}
|
429
web/node_modules/bfj/test/integration.js
generated
vendored
Normal file
429
web/node_modules/bfj/test/integration.js
generated
vendored
Normal file
|
@ -0,0 +1,429 @@
|
|||
'use strict'
|
||||
|
||||
const assert = require('chai').assert
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const Promise = require('bluebird')
|
||||
const request = require('request')
|
||||
const stream = require('stream')
|
||||
|
||||
const modulePath = '../src'
|
||||
|
||||
suite('integration:', () => {
|
||||
let log
|
||||
|
||||
setup(() => {
|
||||
log = {}
|
||||
})
|
||||
|
||||
test('require does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
require(modulePath)
|
||||
})
|
||||
})
|
||||
|
||||
test('require returns object', () => {
|
||||
assert.isObject(require(modulePath))
|
||||
})
|
||||
|
||||
suite('require:', () => {
|
||||
let bfj
|
||||
|
||||
setup(() => {
|
||||
bfj = require(modulePath)
|
||||
})
|
||||
|
||||
test('walk function is exported', () => {
|
||||
assert.isFunction(bfj.walk)
|
||||
})
|
||||
|
||||
test('walk expects one argument', () => {
|
||||
assert.lengthOf(bfj.walk, 1)
|
||||
})
|
||||
|
||||
test('match function is exported', () => {
|
||||
assert.isFunction(bfj.match)
|
||||
})
|
||||
|
||||
test('match expects two arguments', () => {
|
||||
assert.lengthOf(bfj.match, 2)
|
||||
})
|
||||
|
||||
test('parse function is exported', () => {
|
||||
assert.isFunction(bfj.parse)
|
||||
})
|
||||
|
||||
test('parse expects one argument', () => {
|
||||
assert.lengthOf(bfj.parse, 1)
|
||||
})
|
||||
|
||||
test('read function is exported', () => {
|
||||
assert.isFunction(bfj.read)
|
||||
})
|
||||
|
||||
test('read expects two arguments', () => {
|
||||
assert.lengthOf(bfj.read, 2)
|
||||
})
|
||||
|
||||
test('eventify function is exported', () => {
|
||||
assert.isFunction(bfj.eventify)
|
||||
})
|
||||
|
||||
test('eventify expects one argument', () => {
|
||||
assert.lengthOf(bfj.eventify, 1)
|
||||
})
|
||||
|
||||
test('streamify function is exported', () => {
|
||||
assert.isFunction(bfj.streamify)
|
||||
})
|
||||
|
||||
test('streamify expects one argument', () => {
|
||||
assert.lengthOf(bfj.streamify, 1)
|
||||
})
|
||||
|
||||
test('stringify function is exported', () => {
|
||||
assert.isFunction(bfj.stringify)
|
||||
})
|
||||
|
||||
test('stringify expects two arguments', () => {
|
||||
assert.lengthOf(bfj.stringify, 2)
|
||||
})
|
||||
|
||||
test('write function is exported', () => {
|
||||
assert.isFunction(bfj.write)
|
||||
})
|
||||
|
||||
test('write expects two arguments', () => {
|
||||
assert.lengthOf(bfj.write, 3)
|
||||
})
|
||||
|
||||
test('events are exported', () => {
|
||||
assert.deepEqual(bfj.events, require('../src/events'))
|
||||
})
|
||||
|
||||
suite('read object:', () => {
|
||||
let failed, file, result, error
|
||||
|
||||
setup(() => {
|
||||
failed = false
|
||||
file = path.join(__dirname, 'data.json')
|
||||
fs.writeFileSync(file, JSON.stringify({
|
||||
foo: [ 'b', 'a', 'r' ],
|
||||
baz: null,
|
||||
qux: 3.14159265359e42
|
||||
}, null, '\t'))
|
||||
return bfj.read(file)
|
||||
.then(res => {
|
||||
result = res
|
||||
})
|
||||
.catch(err => {
|
||||
failed = true
|
||||
error = err
|
||||
})
|
||||
})
|
||||
|
||||
teardown(() => {
|
||||
fs.unlinkSync(file)
|
||||
})
|
||||
|
||||
test('result was correct', () => {
|
||||
assert.isFalse(failed)
|
||||
assert.isUndefined(error)
|
||||
assert.isObject(result)
|
||||
assert.lengthOf(Object.keys(result), 3)
|
||||
assert.isArray(result.foo)
|
||||
assert.lengthOf(result.foo, 3)
|
||||
assert.strictEqual(result.foo[0], 'b')
|
||||
assert.strictEqual(result.foo[1], 'a')
|
||||
assert.strictEqual(result.foo[2], 'r')
|
||||
assert.isNull(result.baz)
|
||||
assert.strictEqual(result.qux, 3.14159265359e42)
|
||||
})
|
||||
})
|
||||
|
||||
suite('read value:', () => {
|
||||
let failed, file, result, error
|
||||
|
||||
setup(() => {
|
||||
failed = false
|
||||
file = path.join(__dirname, 'data.json')
|
||||
fs.writeFileSync(file, '"foo"')
|
||||
return bfj.read(file)
|
||||
.then(res => {
|
||||
result = res
|
||||
})
|
||||
.catch(err => {
|
||||
failed = true
|
||||
error = err
|
||||
})
|
||||
})
|
||||
|
||||
teardown(() => {
|
||||
fs.unlinkSync(file)
|
||||
})
|
||||
|
||||
test('result was correct', () => {
|
||||
assert.isFalse(failed)
|
||||
assert.isUndefined(error)
|
||||
assert.strictEqual(result, 'foo')
|
||||
})
|
||||
})
|
||||
|
||||
suite('read error:', () => {
|
||||
let failed, file, result, error
|
||||
|
||||
setup(() => {
|
||||
failed = false
|
||||
file = path.join(__dirname, 'data.json')
|
||||
fs.writeFileSync(file, '"foo" "bar"')
|
||||
return bfj.read(file)
|
||||
.then(res => result = res)
|
||||
.catch(err => {
|
||||
failed = true
|
||||
error = err
|
||||
})
|
||||
})
|
||||
|
||||
teardown(() => {
|
||||
fs.unlinkSync(file)
|
||||
})
|
||||
|
||||
test('result was correct', () => {
|
||||
assert.isTrue(failed)
|
||||
assert.isUndefined(result)
|
||||
assert.instanceOf(error, Error)
|
||||
})
|
||||
})
|
||||
|
||||
suite('read missing file:', () => {
|
||||
let failed, file, result, error
|
||||
|
||||
setup(() => {
|
||||
failed = false
|
||||
file = path.join(__dirname, 'missing.json')
|
||||
assert.isFalse(fs.existsSync(file))
|
||||
return bfj.read(file)
|
||||
.then(res => result = res)
|
||||
.catch(err => {
|
||||
failed = true
|
||||
error = err
|
||||
})
|
||||
})
|
||||
|
||||
test('result was correct', () => {
|
||||
assert.isTrue(failed)
|
||||
assert.isUndefined(result)
|
||||
assert.instanceOf(error, Error)
|
||||
})
|
||||
})
|
||||
|
||||
suite('match predicate:', () => {
|
||||
let file, results, errors
|
||||
|
||||
setup(done => {
|
||||
file = path.join(__dirname, 'data.json')
|
||||
fs.writeFileSync(file, JSON.stringify({
|
||||
foo: 'bar',
|
||||
baz: 'qux',
|
||||
wibble: 'blee'
|
||||
}))
|
||||
results = []
|
||||
errors = []
|
||||
const datastream = bfj.match(
|
||||
fs.createReadStream(file),
|
||||
(k, v) => k === 'baz' || v === 'blee',
|
||||
{ minDepth: 1 }
|
||||
)
|
||||
datastream.on('data', item => results.push(item))
|
||||
datastream.on('error', error => errors.push(error))
|
||||
datastream.on('end', done)
|
||||
})
|
||||
|
||||
test('the correct properties were matched', () => {
|
||||
assert.deepEqual([ 'qux', 'blee' ], results)
|
||||
})
|
||||
|
||||
test('no errors occurred', () => {
|
||||
assert.deepEqual(errors, [])
|
||||
})
|
||||
})
|
||||
|
||||
suite('match nested:', () => {
|
||||
let file, results, errors
|
||||
|
||||
setup(done => {
|
||||
file = path.join(__dirname, 'data.json')
|
||||
fs.writeFileSync(file, JSON.stringify({
|
||||
foo: {
|
||||
bar: 'baz'
|
||||
}
|
||||
}))
|
||||
results = []
|
||||
errors = []
|
||||
const datastream = bfj.match(fs.createReadStream(file), () => true)
|
||||
datastream.on('data', item => results.push(item))
|
||||
datastream.on('error', error => errors.push(error))
|
||||
datastream.on('end', done)
|
||||
})
|
||||
|
||||
test('the correct properties were matched', () => {
|
||||
assert.deepEqual([ 'baz', { bar: 'baz' }, { foo: { bar: 'baz' } } ], results)
|
||||
})
|
||||
|
||||
test('no errors occurred', () => {
|
||||
assert.deepEqual(errors, [])
|
||||
})
|
||||
})
|
||||
|
||||
suite('match ndjson:', () => {
|
||||
let file, results, errors
|
||||
|
||||
setup(done => {
|
||||
file = path.join(__dirname, 'data.ndjson')
|
||||
fs.writeFileSync(file, [
|
||||
JSON.stringify([ 'a', 'b' ]),
|
||||
JSON.stringify(null),
|
||||
'',
|
||||
'',
|
||||
JSON.stringify('wibble')
|
||||
].join('\n'))
|
||||
results = []
|
||||
errors = []
|
||||
const datastream = bfj.match(fs.createReadStream(file), () => true, { ndjson: true })
|
||||
datastream.on('data', item => results.push(item))
|
||||
datastream.on('error', error => errors.push(error))
|
||||
datastream.on('end', done)
|
||||
})
|
||||
|
||||
test('the correct properties were matched', () => {
|
||||
assert.deepEqual([ 'a', 'b', [ 'a', 'b' ], 'wibble' ], results)
|
||||
})
|
||||
|
||||
test('no errors occurred', () => {
|
||||
assert.deepEqual(errors, [])
|
||||
})
|
||||
})
|
||||
|
||||
suite('parse request:', () => {
|
||||
let error, result
|
||||
|
||||
setup(done => {
|
||||
const jsonstream = new stream.PassThrough()
|
||||
request({ url: 'https://gitlab.com/philbooth/bfj/raw/master/package.json' })
|
||||
.pipe(bfj.unpipe((err, res) => {
|
||||
error = err
|
||||
result = res
|
||||
done()
|
||||
}))
|
||||
})
|
||||
|
||||
test('result was correct', () => {
|
||||
assert.isNull(error)
|
||||
assert.deepEqual(result, require('../package.json'))
|
||||
})
|
||||
})
|
||||
|
||||
suite('parse NDJSON:', () => {
|
||||
let failed, file, results
|
||||
|
||||
setup(() => {
|
||||
failed = false
|
||||
file = path.join(__dirname, 'data.ndjson')
|
||||
results = []
|
||||
fs.writeFileSync(file, [
|
||||
JSON.stringify([ 'b', 'a', 'r' ]),
|
||||
JSON.stringify(null),
|
||||
'',
|
||||
'',
|
||||
JSON.stringify('wibble')
|
||||
].join('\n'))
|
||||
const stream = fs.createReadStream(file)
|
||||
return bfj.parse(stream, { ndjson: true })
|
||||
.then(result => {
|
||||
results.push(result)
|
||||
return bfj.parse(stream, { ndjson: true })
|
||||
})
|
||||
.then(result => {
|
||||
results.push(result)
|
||||
return bfj.parse(stream, { ndjson: true })
|
||||
})
|
||||
.then(result => {
|
||||
results.push(result)
|
||||
return bfj.parse(stream, { ndjson: true })
|
||||
})
|
||||
.then(result => {
|
||||
results.push(result)
|
||||
return bfj.parse(stream, { ndjson: true })
|
||||
})
|
||||
.then(result => results.push(result))
|
||||
.catch(e => {
|
||||
failed = true
|
||||
})
|
||||
})
|
||||
|
||||
teardown(() => {
|
||||
fs.unlinkSync(file)
|
||||
})
|
||||
|
||||
test('results were correct', () => {
|
||||
assert.isFalse(failed)
|
||||
assert.lengthOf(results, 5)
|
||||
assert.deepEqual(results, [
|
||||
[ 'b', 'a', 'r' ],
|
||||
null,
|
||||
'wibble',
|
||||
undefined,
|
||||
undefined
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
suite('stringify value:', () => {
|
||||
let result
|
||||
|
||||
setup(() => {
|
||||
return bfj.stringify(new Promise(resolve => {
|
||||
setTimeout(resolve.bind(null, 'foo\t"\nbar'), 20)
|
||||
}))
|
||||
.then(res => result = res)
|
||||
})
|
||||
|
||||
test('result was correct', () => {
|
||||
assert.strictEqual(result, '"foo\\t\\"\\nbar"')
|
||||
})
|
||||
})
|
||||
|
||||
suite('write object:', () => {
|
||||
let failed, file, result
|
||||
|
||||
setup(() => {
|
||||
failed = false
|
||||
file = path.join(__dirname, 'data.json')
|
||||
return bfj.write(
|
||||
file,
|
||||
{ foo: [ 'b', 'a', 'r' ], baz: null, qux: 3.14159265359e42 }
|
||||
)
|
||||
.then(() => {
|
||||
result = fs.readFileSync(file, { encoding: 'utf8' })
|
||||
})
|
||||
.catch(error => {
|
||||
failed = true
|
||||
result = error
|
||||
})
|
||||
})
|
||||
|
||||
teardown(() => {
|
||||
fs.unlinkSync(file)
|
||||
})
|
||||
|
||||
test('did not fail', () => {
|
||||
assert.isFalse(failed)
|
||||
})
|
||||
|
||||
test('result was correct', () => {
|
||||
assert.strictEqual(result, '{"foo":["b","a","r"],"baz":null,"qux":3.14159265359e+42}')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
38
web/node_modules/bfj/test/performance.js
generated
vendored
Normal file
38
web/node_modules/bfj/test/performance.js
generated
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
'use strict'
|
||||
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const check = require('check-types')
|
||||
const bfj = require('../src')
|
||||
|
||||
console.log('reading json')
|
||||
|
||||
let time = process.hrtime()
|
||||
|
||||
bfj.read(getDataPath('.json'))
|
||||
.then(data => {
|
||||
reportTime()
|
||||
console.log('writing json')
|
||||
return bfj.write(getDataPath('-result.json'), data)
|
||||
})
|
||||
.then(() => done('succeeded'))
|
||||
.catch(error => done(error.stack, 1))
|
||||
|
||||
function getDataPath (suffix) {
|
||||
return path.resolve(__dirname, process.argv[2] + suffix)
|
||||
}
|
||||
|
||||
function reportTime () {
|
||||
let interimTime = process.hrtime(time)
|
||||
console.log('%d seconds and %d nanoseconds', interimTime[0], interimTime[1])
|
||||
time = process.hrtime()
|
||||
}
|
||||
|
||||
function done (message, code) {
|
||||
reportTime()
|
||||
console.log(message)
|
||||
process.exit(code)
|
||||
}
|
||||
|
95
web/node_modules/bfj/test/unit/datastream.js
generated
vendored
Normal file
95
web/node_modules/bfj/test/unit/datastream.js
generated
vendored
Normal file
|
@ -0,0 +1,95 @@
|
|||
'use strict'
|
||||
|
||||
const assert = require('chai').assert
|
||||
const spooks = require('spooks')
|
||||
|
||||
const modulePath = '../../src/datastream'
|
||||
|
||||
suite('datastream:', () => {
|
||||
let log
|
||||
|
||||
setup(() => {
|
||||
log = {}
|
||||
})
|
||||
|
||||
test('require does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
require(modulePath)
|
||||
})
|
||||
})
|
||||
|
||||
test('require returns function', () => {
|
||||
assert.isFunction(require(modulePath))
|
||||
})
|
||||
|
||||
suite('require:', () => {
|
||||
let Stream
|
||||
|
||||
setup(() => {
|
||||
Stream = require(modulePath)
|
||||
})
|
||||
|
||||
test('Stream expects two arguments', () => {
|
||||
assert.lengthOf(Stream, 2)
|
||||
})
|
||||
|
||||
test('calling Stream with function argument doesNotThrow', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
Stream(() => {})
|
||||
})
|
||||
})
|
||||
|
||||
test('calling Stream with object argument throws', () => {
|
||||
assert.throws(() => {
|
||||
Stream({ read: () => {} })
|
||||
})
|
||||
})
|
||||
|
||||
test('calling Stream with new returns Stream instance', () => {
|
||||
assert.instanceOf(new Stream(() => {}), Stream)
|
||||
})
|
||||
|
||||
test('calling Stream with new returns Readable instance', () => {
|
||||
assert.instanceOf(new Stream(() => {}), require('stream').Readable)
|
||||
})
|
||||
|
||||
test('calling Stream without new returns Stream instance', () => {
|
||||
assert.instanceOf(Stream(() => {}), Stream)
|
||||
})
|
||||
|
||||
suite('instantiate:', () => {
|
||||
let datastream
|
||||
|
||||
setup(() => {
|
||||
datastream = new Stream(spooks.fn({ name: 'read', log: log }))
|
||||
})
|
||||
|
||||
test('datastream has _read method', () => {
|
||||
assert.isFunction(datastream._read)
|
||||
})
|
||||
|
||||
test('_read expects no arguments', () => {
|
||||
assert.lengthOf(datastream._read, 0)
|
||||
})
|
||||
|
||||
test('read was not called', () => {
|
||||
assert.strictEqual(log.counts.read, 0)
|
||||
})
|
||||
|
||||
suite('datastream._read:', () => {
|
||||
setup(() => {
|
||||
datastream._read()
|
||||
})
|
||||
|
||||
test('read was called once', () => {
|
||||
assert.strictEqual(log.counts.read, 1)
|
||||
assert.isUndefined(log.these.read[0])
|
||||
})
|
||||
|
||||
test('read was called correctly', () => {
|
||||
assert.lengthOf(log.args.read[0], 0)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
80
web/node_modules/bfj/test/unit/error.js
generated
vendored
Normal file
80
web/node_modules/bfj/test/unit/error.js
generated
vendored
Normal file
|
@ -0,0 +1,80 @@
|
|||
'use strict'
|
||||
|
||||
const assert = require('chai').assert
|
||||
const modulePath = '../../src/error'
|
||||
|
||||
suite('error:', () => {
|
||||
let log
|
||||
|
||||
setup(() => {
|
||||
log = {}
|
||||
})
|
||||
|
||||
test('require does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
require(modulePath)
|
||||
})
|
||||
})
|
||||
|
||||
test('require returns object', () => {
|
||||
assert.isObject(require(modulePath))
|
||||
})
|
||||
|
||||
suite('require:', () => {
|
||||
let error
|
||||
|
||||
setup(() => {
|
||||
error = require(modulePath)
|
||||
})
|
||||
|
||||
test('error has create method', () => {
|
||||
assert.isFunction(error.create)
|
||||
})
|
||||
|
||||
test('error has no other methods', () => {
|
||||
assert.lengthOf(Object.keys(error), 1)
|
||||
})
|
||||
|
||||
test('create expects four arguments', () => {
|
||||
assert.lengthOf(error.create, 4)
|
||||
})
|
||||
|
||||
test('create does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
error.create()
|
||||
})
|
||||
})
|
||||
|
||||
test('create returns Error', () => {
|
||||
assert.instanceOf(error.create(), Error)
|
||||
})
|
||||
|
||||
suite('create:', () => {
|
||||
let created
|
||||
|
||||
setup(() => {
|
||||
created = error.create('foo', 'bar', 'baz', 'qux')
|
||||
})
|
||||
|
||||
test('created has correct actual property', () => {
|
||||
assert.strictEqual(created.actual, 'foo')
|
||||
})
|
||||
|
||||
test('created has correct expected property', () => {
|
||||
assert.strictEqual(created.expected, 'bar')
|
||||
})
|
||||
|
||||
test('created has correct lineNumber property', () => {
|
||||
assert.strictEqual(created.lineNumber, 'baz')
|
||||
})
|
||||
|
||||
test('created has correct columnNumber property', () => {
|
||||
assert.strictEqual(created.columnNumber, 'qux')
|
||||
})
|
||||
|
||||
test('created has correct message property', () => {
|
||||
assert.strictEqual(created.message, 'JSON error: encountered `foo` at line baz, column qux where `bar` was expected.')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
2357
web/node_modules/bfj/test/unit/eventify.js
generated
vendored
Normal file
2357
web/node_modules/bfj/test/unit/eventify.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
95
web/node_modules/bfj/test/unit/jsonstream.js
generated
vendored
Normal file
95
web/node_modules/bfj/test/unit/jsonstream.js
generated
vendored
Normal file
|
@ -0,0 +1,95 @@
|
|||
'use strict'
|
||||
|
||||
const assert = require('chai').assert
|
||||
const spooks = require('spooks')
|
||||
|
||||
const modulePath = '../../src/jsonstream'
|
||||
|
||||
suite('jsonstream:', () => {
|
||||
let log
|
||||
|
||||
setup(() => {
|
||||
log = {}
|
||||
})
|
||||
|
||||
test('require does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
require(modulePath)
|
||||
})
|
||||
})
|
||||
|
||||
test('require returns function', () => {
|
||||
assert.isFunction(require(modulePath))
|
||||
})
|
||||
|
||||
suite('require:', () => {
|
||||
let Stream
|
||||
|
||||
setup(() => {
|
||||
Stream = require(modulePath)
|
||||
})
|
||||
|
||||
test('Stream expects two arguments', () => {
|
||||
assert.lengthOf(Stream, 2)
|
||||
})
|
||||
|
||||
test('calling Stream with function argument doesNotThrow', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
Stream(() => {})
|
||||
})
|
||||
})
|
||||
|
||||
test('calling Stream with object argument throws', () => {
|
||||
assert.throws(() => {
|
||||
Stream({ read: () => {} })
|
||||
})
|
||||
})
|
||||
|
||||
test('calling Stream with new returns Stream instance', () => {
|
||||
assert.instanceOf(new Stream(() => {}), Stream)
|
||||
})
|
||||
|
||||
test('calling Stream with new returns Readable instance', () => {
|
||||
assert.instanceOf(new Stream(() => {}), require('stream').Readable)
|
||||
})
|
||||
|
||||
test('calling Stream without new returns Stream instance', () => {
|
||||
assert.instanceOf(Stream(() => {}), Stream)
|
||||
})
|
||||
|
||||
suite('instantiate:', () => {
|
||||
let jsonstream
|
||||
|
||||
setup(() => {
|
||||
jsonstream = new Stream(spooks.fn({ name: 'read', log: log }))
|
||||
})
|
||||
|
||||
test('jsonstream has _read method', () => {
|
||||
assert.isFunction(jsonstream._read)
|
||||
})
|
||||
|
||||
test('_read expects no arguments', () => {
|
||||
assert.lengthOf(jsonstream._read, 0)
|
||||
})
|
||||
|
||||
test('read was not called', () => {
|
||||
assert.strictEqual(log.counts.read, 0)
|
||||
})
|
||||
|
||||
suite('jsonstream._read:', () => {
|
||||
setup(() => {
|
||||
jsonstream._read()
|
||||
})
|
||||
|
||||
test('read was called once', () => {
|
||||
assert.strictEqual(log.counts.read, 1)
|
||||
assert.isUndefined(log.these.read[0])
|
||||
})
|
||||
|
||||
test('read was called correctly', () => {
|
||||
assert.lengthOf(log.args.read[0], 0)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
1198
web/node_modules/bfj/test/unit/match.js
generated
vendored
Normal file
1198
web/node_modules/bfj/test/unit/match.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
1020
web/node_modules/bfj/test/unit/parse.js
generated
vendored
Normal file
1020
web/node_modules/bfj/test/unit/parse.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
102
web/node_modules/bfj/test/unit/read.js
generated
vendored
Normal file
102
web/node_modules/bfj/test/unit/read.js
generated
vendored
Normal file
|
@ -0,0 +1,102 @@
|
|||
'use strict'
|
||||
|
||||
const assert = require('chai').assert
|
||||
const proxyquire = require('proxyquire')
|
||||
const spooks = require('spooks')
|
||||
|
||||
const modulePath = '../../src/read'
|
||||
|
||||
suite('read:', () => {
|
||||
test('require does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
require(modulePath)
|
||||
})
|
||||
})
|
||||
|
||||
test('require returns function', () => {
|
||||
assert.isFunction(require(modulePath))
|
||||
})
|
||||
|
||||
suite('require:', () => {
|
||||
let log, results, read
|
||||
|
||||
setup(() => {
|
||||
log = {}
|
||||
results = {
|
||||
parse: [ {} ],
|
||||
createReadStream: [ {} ]
|
||||
}
|
||||
read = proxyquire(modulePath, {
|
||||
fs: {
|
||||
createReadStream: spooks.fn({
|
||||
name: 'createReadStream',
|
||||
log: log,
|
||||
results: results.createReadStream
|
||||
})
|
||||
},
|
||||
'./parse': spooks.fn({
|
||||
name: 'parse',
|
||||
log: log,
|
||||
results: results.parse
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('read expects two arguments', () => {
|
||||
assert.lengthOf(read, 2)
|
||||
})
|
||||
|
||||
test('read does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
read()
|
||||
})
|
||||
})
|
||||
|
||||
test('parse was not called', () => {
|
||||
assert.strictEqual(log.counts.parse, 0)
|
||||
})
|
||||
|
||||
test('fs.createReadStream was not called', () => {
|
||||
assert.strictEqual(log.counts.createReadStream, 0)
|
||||
})
|
||||
|
||||
suite('read:', () => {
|
||||
let path, options, result
|
||||
|
||||
setup(() => {
|
||||
path = {}
|
||||
options = { foo: 'bar', ndjson: true }
|
||||
result = read(path, options)
|
||||
})
|
||||
|
||||
test('fs.createReadStream was called once', () => {
|
||||
assert.strictEqual(log.counts.createReadStream, 1)
|
||||
})
|
||||
|
||||
test('fs.createReadStream was called correctly', () => {
|
||||
assert.lengthOf(log.args.createReadStream[0], 2)
|
||||
assert.strictEqual(log.args.createReadStream[0][0], path)
|
||||
assert.lengthOf(Object.keys(log.args.createReadStream[0][0]), 0)
|
||||
assert.strictEqual(log.args.createReadStream[0][1], options)
|
||||
assert.lengthOf(Object.keys(log.args.createReadStream[0][1]), 2)
|
||||
})
|
||||
|
||||
test('parse was called once', () => {
|
||||
assert.strictEqual(log.counts.parse, 1)
|
||||
})
|
||||
|
||||
test('parse was called correctly', () => {
|
||||
assert.isUndefined(log.these.parse[0])
|
||||
assert.lengthOf(log.args.parse[0], 2)
|
||||
assert.strictEqual(log.args.parse[0][0], results.createReadStream[0])
|
||||
assert.lengthOf(Object.keys(log.args.parse[0][0]), 0)
|
||||
assert.notStrictEqual(log.args.parse[0][1], options)
|
||||
assert.deepEqual(log.args.parse[0][1], { foo: 'bar', ndjson: false })
|
||||
})
|
||||
|
||||
test('parse result was returned', () => {
|
||||
assert.strictEqual(result, results.parse[0])
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
1005
web/node_modules/bfj/test/unit/streamify.js
generated
vendored
Normal file
1005
web/node_modules/bfj/test/unit/streamify.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
195
web/node_modules/bfj/test/unit/stringify.js
generated
vendored
Normal file
195
web/node_modules/bfj/test/unit/stringify.js
generated
vendored
Normal file
|
@ -0,0 +1,195 @@
|
|||
'use strict'
|
||||
|
||||
const assert = require('chai').assert
|
||||
const proxyquire = require('proxyquire')
|
||||
const spooks = require('spooks')
|
||||
const Promise = require('bluebird')
|
||||
|
||||
const modulePath = '../../src/stringify'
|
||||
|
||||
suite('stringify:', () => {
|
||||
test('require does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
require(modulePath)
|
||||
})
|
||||
})
|
||||
|
||||
test('require returns function', () => {
|
||||
assert.isFunction(require(modulePath))
|
||||
})
|
||||
|
||||
suite('require:', () => {
|
||||
let log, stringify
|
||||
|
||||
setup(() => {
|
||||
log = {}
|
||||
|
||||
stringify = proxyquire(modulePath, {
|
||||
'./streamify': spooks.fn({
|
||||
name: 'streamify',
|
||||
log: log,
|
||||
results: [
|
||||
{ on: spooks.fn({ name: 'on', log: log }) }
|
||||
]
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('stringify expects two arguments', () => {
|
||||
assert.lengthOf(stringify, 2)
|
||||
})
|
||||
|
||||
test('stringify does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
stringify()
|
||||
})
|
||||
})
|
||||
|
||||
test('stringify returns promise', () => {
|
||||
assert.instanceOf(stringify(), Promise)
|
||||
})
|
||||
|
||||
test('streamify was not called', () => {
|
||||
assert.strictEqual(log.counts.streamify, 0)
|
||||
})
|
||||
|
||||
suite('stringify:', () => {
|
||||
let data, options, resolved, rejected, result, done
|
||||
|
||||
setup(() => {
|
||||
data = {}
|
||||
options = {}
|
||||
stringify(data, options)
|
||||
.then(res => {
|
||||
resolved = res
|
||||
done()
|
||||
})
|
||||
.catch(rej => {
|
||||
rejected = rej
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
teardown(() => {
|
||||
resolved = rejected = undefined
|
||||
})
|
||||
|
||||
test('streamify was called once', () => {
|
||||
assert.strictEqual(log.counts.streamify, 1)
|
||||
assert.isUndefined(log.these.streamify[0])
|
||||
})
|
||||
|
||||
test('streamify was called correctly', () => {
|
||||
assert.lengthOf(log.args.streamify[0], 2)
|
||||
assert.strictEqual(log.args.streamify[0][0], data)
|
||||
assert.lengthOf(Object.keys(log.args.streamify[0][0]), 0)
|
||||
assert.strictEqual(log.args.streamify[0][1], options)
|
||||
assert.lengthOf(Object.keys(log.args.streamify[0][1]), 0)
|
||||
})
|
||||
|
||||
test('stream.on was called four times', () => {
|
||||
assert.strictEqual(log.counts.on, 4)
|
||||
})
|
||||
|
||||
test('stream.on was called correctly first time', () => {
|
||||
assert.lengthOf(log.args.on[0], 2)
|
||||
assert.strictEqual(log.args.on[0][0], 'data')
|
||||
assert.isFunction(log.args.on[0][1])
|
||||
})
|
||||
|
||||
test('stream.on was called correctly second time', () => {
|
||||
assert.strictEqual(log.args.on[1][0], 'end')
|
||||
assert.isFunction(log.args.on[1][1])
|
||||
assert.notStrictEqual(log.args.on[1][1], log.args.on[0][1])
|
||||
})
|
||||
|
||||
test('stream.on was called correctly third time', () => {
|
||||
assert.strictEqual(log.args.on[2][0], 'error')
|
||||
assert.isFunction(log.args.on[2][1])
|
||||
assert.notStrictEqual(log.args.on[2][1], log.args.on[0][1])
|
||||
assert.notStrictEqual(log.args.on[2][1], log.args.on[1][1])
|
||||
})
|
||||
|
||||
test('stream.on was called correctly fourth time', () => {
|
||||
assert.strictEqual(log.args.on[3][0], 'dataError')
|
||||
assert.isFunction(log.args.on[3][1])
|
||||
assert.strictEqual(log.args.on[3][1], log.args.on[2][1])
|
||||
})
|
||||
|
||||
test('promise is unfulfilled', () => {
|
||||
assert.isUndefined(resolved)
|
||||
assert.isUndefined(rejected)
|
||||
})
|
||||
|
||||
suite('data event:', () => {
|
||||
setup(() => {
|
||||
log.args.on[0][1]('foo')
|
||||
})
|
||||
|
||||
test('promise is unfulfilled', () => {
|
||||
assert.isUndefined(resolved)
|
||||
assert.isUndefined(rejected)
|
||||
})
|
||||
|
||||
suite('end event:', () => {
|
||||
setup(d => {
|
||||
done = d
|
||||
log.args.on[1][1]()
|
||||
})
|
||||
|
||||
test('promise is resolved', () => {
|
||||
assert.strictEqual(resolved, 'foo')
|
||||
})
|
||||
|
||||
test('promise is not rejected', () => {
|
||||
assert.isUndefined(rejected)
|
||||
})
|
||||
})
|
||||
|
||||
suite('data event:', () => {
|
||||
setup(() => {
|
||||
log.args.on[0][1]('bar')
|
||||
})
|
||||
|
||||
test('promise is unfulfilled', () => {
|
||||
assert.isUndefined(resolved)
|
||||
assert.isUndefined(rejected)
|
||||
})
|
||||
|
||||
suite('end event:', () => {
|
||||
setup(d => {
|
||||
done = d
|
||||
log.args.on[1][1]()
|
||||
})
|
||||
|
||||
test('promise is resolved', () => {
|
||||
assert.strictEqual(resolved, 'foobar')
|
||||
})
|
||||
})
|
||||
|
||||
suite('error event:', () => {
|
||||
setup(d => {
|
||||
done = d
|
||||
log.args.on[2][1]('wibble')
|
||||
})
|
||||
|
||||
test('promise is rejected', () => {
|
||||
assert.strictEqual(rejected, 'wibble')
|
||||
})
|
||||
})
|
||||
|
||||
suite('dataError event:', () => {
|
||||
setup(d => {
|
||||
done = d
|
||||
log.args.on[3][1]('wibble')
|
||||
})
|
||||
|
||||
test('promise is rejected', () => {
|
||||
assert.strictEqual(rejected, 'wibble')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
125
web/node_modules/bfj/test/unit/unpipe.js
generated
vendored
Normal file
125
web/node_modules/bfj/test/unit/unpipe.js
generated
vendored
Normal file
|
@ -0,0 +1,125 @@
|
|||
'use strict'
|
||||
|
||||
const assert = require('chai').assert
|
||||
const proxyquire = require('proxyquire')
|
||||
const spooks = require('spooks')
|
||||
const Promise = require('bluebird')
|
||||
|
||||
const modulePath = '../../src/unpipe'
|
||||
|
||||
suite('unpipe:', () => {
|
||||
test('require does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
require(modulePath)
|
||||
})
|
||||
})
|
||||
|
||||
test('require returns function', () => {
|
||||
assert.isFunction(require(modulePath))
|
||||
})
|
||||
|
||||
suite('require:', () => {
|
||||
let log, results, unpipe
|
||||
|
||||
setup(() => {
|
||||
log = {}
|
||||
results = {
|
||||
parse: [ Promise.resolve() ]
|
||||
}
|
||||
unpipe = proxyquire(modulePath, {
|
||||
'./parse': spooks.fn({
|
||||
name: 'parse',
|
||||
log: log,
|
||||
results: results.parse
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('unpipe expects two arguments', () => {
|
||||
assert.lengthOf(unpipe, 2)
|
||||
})
|
||||
|
||||
test('unpipe does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
unpipe(() => {})
|
||||
})
|
||||
})
|
||||
|
||||
test('unpipe throws if callback is not provided', () => {
|
||||
assert.throws(() => {
|
||||
unpipe()
|
||||
})
|
||||
})
|
||||
|
||||
test('parse was not called', () => {
|
||||
assert.strictEqual(log.counts.parse, 0)
|
||||
})
|
||||
|
||||
suite('unpipe success:', () => {
|
||||
let result, error, options
|
||||
|
||||
setup(done => {
|
||||
results.parse[0] = Promise.resolve('foo')
|
||||
options = { foo: 'bar', ndjson: true }
|
||||
unpipe((err, res) => {
|
||||
error = err
|
||||
result = res
|
||||
done()
|
||||
}, options)
|
||||
})
|
||||
|
||||
test('parse was called once', () => {
|
||||
assert.strictEqual(log.counts.parse, 1)
|
||||
})
|
||||
|
||||
test('parse was called correctly', () => {
|
||||
assert.isUndefined(log.these.parse[0])
|
||||
assert.lengthOf(log.args.parse[0], 2)
|
||||
assert.isObject(log.args.parse[0][0])
|
||||
assert.isTrue(log.args.parse[0][0].readable)
|
||||
assert.isTrue(log.args.parse[0][0].writable)
|
||||
assert.isFunction(log.args.parse[0][0].pipe)
|
||||
assert.isFunction(log.args.parse[0][0].read)
|
||||
assert.isFunction(log.args.parse[0][0]._read)
|
||||
assert.isFunction(log.args.parse[0][0].write)
|
||||
assert.isFunction(log.args.parse[0][0]._write)
|
||||
assert.notStrictEqual(log.args.parse[0][1], options)
|
||||
assert.deepEqual(log.args.parse[0][1], { foo: 'bar', ndjson: false })
|
||||
})
|
||||
|
||||
test('parse result was returned', () => {
|
||||
assert.strictEqual(result, 'foo')
|
||||
})
|
||||
|
||||
test('did not fail', () => {
|
||||
assert.isNull(error)
|
||||
})
|
||||
})
|
||||
|
||||
suite('unpipe error:', () => {
|
||||
let result, error, options
|
||||
|
||||
setup(done => {
|
||||
results.parse[0] = Promise.reject('bar')
|
||||
options = {}
|
||||
unpipe((err, res) => {
|
||||
error = err
|
||||
result = res
|
||||
done()
|
||||
}, options)
|
||||
})
|
||||
|
||||
test('parse was called once', () => {
|
||||
assert.strictEqual(log.counts.parse, 1)
|
||||
})
|
||||
|
||||
test('parse result was not returned', () => {
|
||||
assert.isUndefined(result)
|
||||
})
|
||||
|
||||
test('failed', () => {
|
||||
assert.strictEqual(error, 'bar')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
3255
web/node_modules/bfj/test/unit/walk.js
generated
vendored
Normal file
3255
web/node_modules/bfj/test/unit/walk.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
260
web/node_modules/bfj/test/unit/write.js
generated
vendored
Normal file
260
web/node_modules/bfj/test/unit/write.js
generated
vendored
Normal file
|
@ -0,0 +1,260 @@
|
|||
'use strict'
|
||||
|
||||
const assert = require('chai').assert
|
||||
const proxyquire = require('proxyquire')
|
||||
const spooks = require('spooks')
|
||||
const Promise = require('bluebird')
|
||||
|
||||
const modulePath = '../../src/write'
|
||||
|
||||
suite('write:', () => {
|
||||
test('require does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
require(modulePath)
|
||||
})
|
||||
})
|
||||
|
||||
test('require returns function', () => {
|
||||
assert.isFunction(require(modulePath))
|
||||
})
|
||||
|
||||
suite('require:', () => {
|
||||
let log, results, write
|
||||
|
||||
setup(() => {
|
||||
log = {}
|
||||
results = {
|
||||
createWriteStream: [ {} ]
|
||||
}
|
||||
|
||||
write = proxyquire(modulePath, {
|
||||
'fs': {
|
||||
createWriteStream: spooks.fn({
|
||||
name: 'createWriteStream',
|
||||
log: log,
|
||||
results: results.createWriteStream
|
||||
})
|
||||
},
|
||||
'./streamify': spooks.fn({
|
||||
name: 'streamify',
|
||||
log: log,
|
||||
results: [
|
||||
{
|
||||
pipe: spooks.fn({ name: 'pipe', log: log, chain: true }),
|
||||
on: spooks.fn({ name: 'on', log: log, chain: true })
|
||||
}
|
||||
]
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('write expects three arguments', () => {
|
||||
assert.lengthOf(write, 3)
|
||||
})
|
||||
|
||||
test('write does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
write()
|
||||
})
|
||||
})
|
||||
|
||||
test('streamify was not called', () => {
|
||||
assert.strictEqual(log.counts.streamify, 0)
|
||||
})
|
||||
|
||||
test('fs.createWriteStream was not called', () => {
|
||||
assert.strictEqual(log.counts.createWriteStream, 0)
|
||||
})
|
||||
|
||||
test('stream.pipe was not called', () => {
|
||||
assert.strictEqual(log.counts.pipe, 0)
|
||||
})
|
||||
|
||||
test('stream.on was not called', () => {
|
||||
assert.strictEqual(log.counts.on, 0)
|
||||
})
|
||||
|
||||
suite('write:', () => {
|
||||
let path, data, options, result
|
||||
|
||||
setup(() => {
|
||||
path = {}
|
||||
data = {}
|
||||
options = {}
|
||||
result = write(path, data, options)
|
||||
})
|
||||
|
||||
test('streamify was called once', () => {
|
||||
assert.strictEqual(log.counts.streamify, 1)
|
||||
assert.isUndefined(log.these.streamify[0])
|
||||
})
|
||||
|
||||
test('streamify was called correctly', () => {
|
||||
assert.lengthOf(log.args.streamify[0], 2)
|
||||
assert.strictEqual(log.args.streamify[0][0], data)
|
||||
assert.lengthOf(Object.keys(log.args.streamify[0][0]), 0)
|
||||
assert.strictEqual(log.args.streamify[0][1], options)
|
||||
assert.lengthOf(Object.keys(log.args.streamify[0][1]), 0)
|
||||
})
|
||||
|
||||
test('fs.createWriteStream was called once', () => {
|
||||
assert.strictEqual(log.counts.createWriteStream, 1)
|
||||
})
|
||||
|
||||
test('fs.createWriteStream was called correctly', () => {
|
||||
assert.lengthOf(log.args.createWriteStream[0], 2)
|
||||
assert.strictEqual(log.args.createWriteStream[0][0], path)
|
||||
assert.lengthOf(Object.keys(log.args.createWriteStream[0][0]), 0)
|
||||
assert.strictEqual(log.args.createWriteStream[0][1], options)
|
||||
assert.lengthOf(Object.keys(log.args.createWriteStream[0][1]), 0)
|
||||
})
|
||||
|
||||
test('stream.pipe was called once', () => {
|
||||
assert.strictEqual(log.counts.pipe, 1)
|
||||
})
|
||||
|
||||
test('stream.pipe was called correctly', () => {
|
||||
assert.lengthOf(log.args.pipe[0], 1)
|
||||
assert.strictEqual(log.args.pipe[0][0], results.createWriteStream[0])
|
||||
assert.lengthOf(Object.keys(log.args.pipe[0][0]), 0)
|
||||
})
|
||||
|
||||
test('stream.on was called three times', () => {
|
||||
assert.strictEqual(log.counts.on, 3)
|
||||
})
|
||||
|
||||
test('stream.on was called correctly first time', () => {
|
||||
assert.lengthOf(log.args.on[0], 2)
|
||||
assert.strictEqual(log.args.on[0][0], 'finish')
|
||||
assert.isFunction(log.args.on[0][1])
|
||||
})
|
||||
|
||||
test('stream.on was called correctly second time', () => {
|
||||
assert.lengthOf(log.args.on[1], 2)
|
||||
assert.strictEqual(log.args.on[1][0], 'error')
|
||||
assert.isFunction(log.args.on[1][1])
|
||||
assert.notStrictEqual(log.args.on[1][1], log.args.on[0][1])
|
||||
})
|
||||
|
||||
test('stream.on was called correctly third time', () => {
|
||||
assert.lengthOf(log.args.on[2], 2)
|
||||
assert.strictEqual(log.args.on[2][0], 'dataError')
|
||||
assert.isFunction(log.args.on[2][1])
|
||||
assert.notStrictEqual(log.args.on[2][1], log.args.on[0][1])
|
||||
assert.strictEqual(log.args.on[2][1], log.args.on[1][1])
|
||||
})
|
||||
|
||||
test('promise was returned', () => {
|
||||
assert.instanceOf(result, Promise)
|
||||
})
|
||||
|
||||
suite('dispatch finish event:', () => {
|
||||
let resolved, error, passed, failed
|
||||
|
||||
setup(done => {
|
||||
passed = failed = false
|
||||
|
||||
result.then(res => {
|
||||
resolved = res
|
||||
passed = true
|
||||
done()
|
||||
}).catch(err => {
|
||||
error = err
|
||||
failed = true
|
||||
done()
|
||||
})
|
||||
log.args.on[0][1]('foo')
|
||||
})
|
||||
|
||||
test('promise was resolved', () => {
|
||||
assert.isTrue(passed)
|
||||
assert.isFalse(failed)
|
||||
assert.isUndefined(resolved)
|
||||
})
|
||||
})
|
||||
|
||||
suite('dispatch error event:', () => {
|
||||
let resolved, error, passed, failed
|
||||
|
||||
setup(done => {
|
||||
passed = failed = false
|
||||
|
||||
result.then(r => {
|
||||
resolved = r
|
||||
passed = true
|
||||
done()
|
||||
}).catch(e => {
|
||||
error = e
|
||||
failed = true
|
||||
done()
|
||||
})
|
||||
log.args.on[1][1]('foo')
|
||||
})
|
||||
|
||||
test('promise was rejected', () => {
|
||||
assert.isTrue(failed)
|
||||
assert.isFalse(passed)
|
||||
assert.strictEqual(error, 'foo')
|
||||
})
|
||||
})
|
||||
|
||||
suite('dispatch dataError event:', () => {
|
||||
let resolved, error, passed, failed
|
||||
|
||||
setup(done => {
|
||||
passed = failed = false
|
||||
|
||||
result.then(r => {
|
||||
resolved = r
|
||||
passed = true
|
||||
done()
|
||||
}).catch(e => {
|
||||
error = e
|
||||
failed = true
|
||||
done()
|
||||
})
|
||||
log.args.on[2][1]('wibble')
|
||||
})
|
||||
|
||||
test('promise was rejected', () => {
|
||||
assert.isTrue(failed)
|
||||
assert.isFalse(passed)
|
||||
assert.strictEqual(error, 'wibble')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
suite('write with error thrown by fs.createWriteStream:', () => {
|
||||
let write
|
||||
|
||||
setup(() => {
|
||||
write = proxyquire(modulePath, {
|
||||
fs: {
|
||||
createWriteStream () {
|
||||
throw new Error('foo')
|
||||
}
|
||||
},
|
||||
'./streamify': () => ({
|
||||
pipe: spooks.fn({ name: 'pipe', log: {}, chain: true }),
|
||||
on: spooks.fn({ name: 'on', log: {}, chain: true })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('write does not throw', () => {
|
||||
assert.doesNotThrow(() => {
|
||||
write().catch(() => {})
|
||||
})
|
||||
})
|
||||
|
||||
test('write rejects', () => {
|
||||
write()
|
||||
.then(() => assert.fail('write should reject'))
|
||||
.catch(error => {
|
||||
assert.instanceOf(error, Error)
|
||||
assert.equal(error.message, 'foo')
|
||||
})
|
||||
})
|
||||
})
|
Loading…
Add table
Add a link
Reference in a new issue